Skip to content

Commit

Permalink
merge: pull request TBXark#240 from TBXark/dev
Browse files Browse the repository at this point in the history
doc: fix config document
  • Loading branch information
TBXark authored Feb 4, 2024
2 parents b550977 + b0a60fb commit cdb722b
Show file tree
Hide file tree
Showing 9 changed files with 80 additions and 28 deletions.
2 changes: 1 addition & 1 deletion dist/buildinfo.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"sha": "9273c89", "timestamp": 1705300786}
{"sha": "4d3903e", "timestamp": 1707027852}
47 changes: 34 additions & 13 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ var Environment = class {
// -- 版本数据 --
//
// 当前版本
BUILD_TIMESTAMP = 1705300786;
BUILD_TIMESTAMP = 1707027852;
// 当前版本 commit id
BUILD_VERSION = "9273c89";
BUILD_VERSION = "4d3903e";
// -- 基础配置 --
/**
* @type {I18n | null}
Expand Down Expand Up @@ -899,6 +899,12 @@ async function makeResponse200(resp) {
});
}
}
function isJsonResponse(resp) {
return resp.headers.get("content-type").indexOf("json") !== -1;
}
function isEventStreamResponse(resp) {
return resp.headers.get("content-type").indexOf("text/event-stream") !== -1;
}

// src/vendors/stream.js
var Stream = class {
Expand Down Expand Up @@ -1064,8 +1070,8 @@ var LineDecoder = class {
return lines;
}
};
LineDecoder.NEWLINE_CHARS = /* @__PURE__ */ new Set(["\n", "\r", "\v", "\f", "", "", "", "\x85", "\u2028", "\u2029"]);
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029]/g;
LineDecoder.NEWLINE_CHARS = /* @__PURE__ */ new Set(["\n", "\r"]);
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
function partition(str, delimiter) {
const index = str.indexOf(delimiter);
if (index !== -1) {
Expand Down Expand Up @@ -1125,14 +1131,14 @@ async function requestCompletionsFromOpenAI(message, history, context, onStream)
body: JSON.stringify(body),
signal
});
if (onStream && resp.ok && resp.headers.get("content-type").indexOf("text/event-stream") !== -1) {
if (onStream && resp.ok && isEventStreamResponse(resp)) {
const stream = new Stream(resp, controller);
let contentFull = "";
let lengthDelta = 0;
let updateStep = 20;
try {
for await (const data of stream) {
const c = data.choices[0].delta?.content || "";
const c = data?.choices?.[0]?.delta?.content || "";
lengthDelta += c.length;
contentFull = contentFull + c;
if (lengthDelta > updateStep) {
Expand All @@ -1148,7 +1154,20 @@ ERROR: ${e.message}`;
}
return contentFull;
}
if (!isJsonResponse(resp)) {
if (ENV.DEBUG_MODE || ENV.DEV_MODE) {
throw new Error(`OpenAI API Error
> ${resp.statusText}
Body: ${await resp.text()}`);
} else {
throw new Error(`OpenAI API Error
> ${resp.statusText}`);
}
}
const result = await resp.json();
if (!result) {
throw new Error("Empty response");
}
if (result.error?.message) {
if (ENV.DEBUG_MODE || ENV.DEV_MODE) {
throw new Error(`OpenAI API Error
Expand All @@ -1159,13 +1178,10 @@ Body: ${JSON.stringify(body)}`);
> ${result.error.message}`);
}
}
setTimeout(() => updateBotUsage(result.usage, context).catch(console.error), 0);
try {
setTimeout(() => updateBotUsage(result?.usage, context).catch(console.error), 0);
return result.choices[0].message.content;
} catch (e) {
if (!result) {
throw new Error("Empty response");
}
throw Error(result?.error?.message || JSON.stringify(result));
}
}
Expand Down Expand Up @@ -1231,7 +1247,7 @@ async function updateBotUsage(usage, context) {
await DATABASE.put(context.SHARE_CONTEXT.usageKey, JSON.stringify(dbValue));
}

// src/workers-ai.js
// src/workersai.js
async function run(model, body) {
const id = ENV.CLOUDFLARE_ACCOUNT_ID;
const token = ENV.CLOUDFLARE_TOKEN;
Expand All @@ -1255,7 +1271,7 @@ async function requestCompletionsFromWorkersAI(message, history, context, onStre
};
const resp = await run(model, request);
const controller = new AbortController();
if (onStream && resp.ok && resp.headers.get("content-type").indexOf("text/event-stream") !== -1) {
if (onStream && resp.ok && isEventStreamResponse(resp)) {
const stream = new Stream(resp, controller);
let contentFull = "";
let lengthDelta = 0;
Expand Down Expand Up @@ -1522,7 +1538,12 @@ async function chatWithLLM(text, context, modifier) {
}
return sendMessageToTelegramWithContext(context)(answer);
} catch (e) {
return sendMessageToTelegramWithContext(context)(`Error: ${e.message}`);
let errMsg = `Error: ${e.message}`;
if (errMsg.length > 2048) {
errMsg = errMsg.substring(0, 2048);
}
context.CURRENT_CHAT_CONTEXT.disable_web_page_preview = true;
return sendMessageToTelegramWithContext(context)(errMsg);
}
}

Expand Down
2 changes: 1 addition & 1 deletion dist/timestamp
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1705300786
1707027852
2 changes: 1 addition & 1 deletion doc/cn/CONFIG.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
| API_KEY | OpenAI API Key | `null` | 可以同时使用多个key,使用的时候会随机选择一个 |
| CHAT_MODEL | open ai 模型选择 | `gpt-3.5-turbo` | |
| OPENAI_API_DOMAIN | OPENAI API Domain [废弃: 使用 OPENAI_API_BASE] | `https://api.openai.com` | 可以替换为其他与OpenAI API兼容的其他服务商的域名 |
| OPENAI_API_DOMAIN | OPENAI API Base URL | `https://api.openai.com/v1` | 兼容Cloudflare AI 网关 |
| OPENAI_API_BASE | OPENAI API Base URL | `https://api.openai.com/v1` | 兼容Cloudflare AI 网关 |
| - | - | - | - |
| AZURE_API_KEY | azure api key | `null` | 支持azure的API,两个密钥随便选一个就可以。如果你要默认使用azure,你可以设置`AI_PROVIDER``azure` |
| AZURE_COMPLETIONS_API | azure api url | `null` | 格式`https://YOUR_RESOURCE_NAME.openai.azure.com/openai/deployments/YOUR_DEPLOYMENT_NAME/chat/completions?api-version=2023-05-15` |
Expand Down
9 changes: 7 additions & 2 deletions src/llm.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {DATABASE, ENV} from './env.js';
import {Context} from './context.js';
import {isAzureEnable, isOpenAIEnable, requestCompletionsFromOpenAI, requestImageFromOpenAI} from './openai.js';
import {tokensCounter} from './utils.js';
import {isWorkersAIEnable, requestCompletionsFromWorkersAI, requestImageFromWorkersAI} from './workers-ai.js';
import {isWorkersAIEnable, requestCompletionsFromWorkersAI, requestImageFromWorkersAI} from './workersai.js';
import {isGeminiAIEnable, requestCompletionsFromGeminiAI} from './gemini.js';


Expand Down Expand Up @@ -242,6 +242,11 @@ export async function chatWithLLM(text, context, modifier) {
}
return sendMessageToTelegramWithContext(context)(answer);
} catch (e) {
return sendMessageToTelegramWithContext(context)(`Error: ${e.message}`);
let errMsg = `Error: ${e.message}`;
if (errMsg.length > 2048) { // 裁剪错误信息 最长2048
errMsg = errMsg.substring(0, 2048);
}
context.CURRENT_CHAT_CONTEXT.disable_web_page_preview = true;
return sendMessageToTelegramWithContext(context)(errMsg);
}
}
21 changes: 14 additions & 7 deletions src/openai.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
/* eslint-disable no-unused-vars */
import {Context} from './context.js';
import {DATABASE, ENV} from './env.js';
import {isEventStreamResponse, isJsonResponse} from './utils.js';
import {Stream} from './vendors/stream.js';


Expand Down Expand Up @@ -89,14 +90,14 @@ export async function requestCompletionsFromOpenAI(message, history, context, on
body: JSON.stringify(body),
signal,
});
if (onStream && resp.ok && resp.headers.get('content-type').indexOf('text/event-stream') !== -1) {
if (onStream && resp.ok && isEventStreamResponse(resp)) {
const stream = new Stream(resp, controller);
let contentFull = '';
let lengthDelta = 0;
let updateStep = 20;
try {
for await (const data of stream) {
const c = data.choices[0].delta?.content || '';
const c = data?.choices?.[0]?.delta?.content || '';
lengthDelta += c.length;
contentFull = contentFull + c;
if (lengthDelta > updateStep) {
Expand All @@ -110,22 +111,28 @@ export async function requestCompletionsFromOpenAI(message, history, context, on
}
return contentFull;
}

if (!isJsonResponse(resp)) {
if (ENV.DEBUG_MODE || ENV.DEV_MODE) {
throw new Error(`OpenAI API Error\n> ${resp.statusText}\nBody: ${await resp.text()}`);
} else {
throw new Error(`OpenAI API Error\n> ${resp.statusText}`);
}
}
const result = await resp.json();
if (!result) {
throw new Error('Empty response');
}
if (result.error?.message) {
if (ENV.DEBUG_MODE || ENV.DEV_MODE) {
throw new Error(`OpenAI API Error\n> ${result.error.message}\nBody: ${JSON.stringify(body)}`);
} else {
throw new Error(`OpenAI API Error\n> ${result.error.message}`);
}
}
setTimeout(() => updateBotUsage(result.usage, context).catch(console.error), 0);
try {
setTimeout(() => updateBotUsage(result?.usage, context).catch(console.error), 0);
return result.choices[0].message.content;
} catch (e) {
if (!result) {
throw new Error('Empty response');
}
throw Error(result?.error?.message || JSON.stringify(result));
}
}
Expand Down
18 changes: 18 additions & 0 deletions src/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -184,3 +184,21 @@ export async function makeResponse200(resp) {
}});
}
}

/**
*
* @param {Response} resp
* @return {boolean}
*/
export function isJsonResponse(resp) {
return resp.headers.get('content-type').indexOf('json') !== -1;
}

/**
*
* @param {Response} resp
* @return {boolean}
*/
export function isEventStreamResponse(resp) {
return resp.headers.get('content-type').indexOf('text/event-stream') !== -1;
}
4 changes: 2 additions & 2 deletions src/vendors/stream.js
Original file line number Diff line number Diff line change
Expand Up @@ -180,8 +180,8 @@ class LineDecoder {
}
}
// prettier-ignore
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r', '\x0b', '\x0c', '\x1c', '\x1d', '\x1e', '\x85', '\u2028', '\u2029']);
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029]/g;
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']);
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g;
function partition(str, delimiter) {
const index = str.indexOf(delimiter);
if (index !== -1) {
Expand Down
3 changes: 2 additions & 1 deletion src/workers-ai.js → src/workersai.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
/* eslint-disable no-unused-vars */
import {Context} from './context.js';
import {ENV} from './env.js';
import {isEventStreamResponse} from './utils.js';
import {Stream} from './vendors/stream.js';

/**
Expand Down Expand Up @@ -49,7 +50,7 @@ export async function requestCompletionsFromWorkersAI(message, history, context,
const resp = await run(model, request);
const controller = new AbortController();

if (onStream && resp.ok && resp.headers.get('content-type').indexOf('text/event-stream') !== -1) {
if (onStream && resp.ok && isEventStreamResponse(resp)) {
const stream = new Stream(resp, controller);
let contentFull = '';
let lengthDelta = 0;
Expand Down

0 comments on commit cdb722b

Please sign in to comment.