Skip to content

Commit

Permalink
perf: 优化ENV解析逻辑
Browse files Browse the repository at this point in the history
  • Loading branch information
TBXark committed Oct 8, 2023
1 parent 01eb8d9 commit cca7cf8
Show file tree
Hide file tree
Showing 10 changed files with 107 additions and 66 deletions.
2 changes: 1 addition & 1 deletion dist/buildinfo.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"sha": "12fa595", "timestamp": 1696745907}
{"sha": "01eb8d9", "timestamp": 1696754208}
61 changes: 37 additions & 24 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ var ENV = {
// 检查更新的分支
UPDATE_BRANCH: "master",
// 当前版本
BUILD_TIMESTAMP: 1696745907,
BUILD_TIMESTAMP: 1696754208,
// 当前版本 commit id
BUILD_VERSION: "12fa595",
BUILD_VERSION: "01eb8d9",
I18N: null,
LANGUAGE: "zh-cn",
// 使用流模式
Expand Down Expand Up @@ -74,17 +74,18 @@ var DATABASE = null;
var API_GUARD = null;
var AI = null;
var ENV_VALUE_TYPE = {
API_KEY: [],
API_KEY: "array",
AZURE_API_KEY: "string",
AZURE_COMPLETIONS_API: "string"
};
function initEnv(env, i18n2) {
DATABASE = env.DATABASE;
API_GUARD = env.API_GUARD;
AI = env.AI;
for (const key in ENV) {
for (const key of Object.keys(ENV)) {
const t = ENV_VALUE_TYPE[key] ? ENV_VALUE_TYPE[key] : typeof ENV[key];
if (env[key]) {
switch (ENV_VALUE_TYPE[key] ? typeof ENV_VALUE_TYPE[key] : typeof ENV[key]) {
switch (t) {
case "number":
ENV[key] = parseInt(env[key]) || ENV[key];
break;
Expand All @@ -94,11 +95,12 @@ function initEnv(env, i18n2) {
case "string":
ENV[key] = env[key];
break;
case "array":
ENV[key] = env[key].split(",");
break;
case "object":
if (Array.isArray(ENV[key])) {
ENV[key] = env[key].split(",");
} else if (ENV_VALUE_TYPE[key] && Array.isArray(ENV_VALUE_TYPE[key])) {
ENV[key] = env[key].split(",");
} else {
try {
ENV[key] = JSON.parse(env[key]);
Expand Down Expand Up @@ -292,7 +294,6 @@ var Context = class {
console.log(this.USER_CONFIG);
}
/**
*
* @return {string|null}
*/
openAIKeyFromContext() {
Expand All @@ -302,14 +303,22 @@ var Context = class {
if (this.USER_CONFIG.OPENAI_API_KEY) {
return this.USER_CONFIG.OPENAI_API_KEY;
}
if (Array.isArray(ENV.API_KEY)) {
if (ENV.API_KEY.length === 0) {
return null;
}
return ENV.API_KEY[Math.floor(Math.random() * ENV.API_KEY.length)];
} else {
return ENV.API_KEY;
if (ENV.API_KEY.length === 0) {
return null;
}
return ENV.API_KEY[Math.floor(Math.random() * ENV.API_KEY.length)];
}
/**
* @return {boolean}
*/
hasValidOpenAIKey() {
if (ENV.AZURE_COMPLETIONS_API) {
return ENV.AZURE_API_KEY !== null;
}
if (this.USER_CONFIG.OPENAI_API_KEY) {
return true;
}
return ENV.API_KEY.length > 0;
}
};

Expand Down Expand Up @@ -719,8 +728,7 @@ function readableStreamAsyncIterable(stream) {

// src/openai.js
function isOpenAIEnable(context) {
const key = context.openAIKeyFromContext();
return key && key.length > 0;
return context.hasValidOpenAIKey();
}
async function requestCompletionsFromOpenAI(message, history, context, onStream) {
const key = context.openAIKeyFromContext();
Expand Down Expand Up @@ -1588,6 +1596,15 @@ async function loadHistory(key, context) {
}
return { real: history, original };
}
function loadLLM(context) {
if (isOpenAIEnable(context)) {
return requestCompletionsFromOpenAI;
}
if (isWorkersAIEnable(context)) {
return requestCompletionsFromWorkersAI;
}
return null;
}
async function requestCompletionsFromLLM(text, context, llm, modifier, onStream) {
const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
const historyKey = context.SHARE_CONTEXT.chatHistoryKey;
Expand Down Expand Up @@ -1631,9 +1648,9 @@ async function chatWithLLM(text, context, modifier) {
}
};
}
let llm = requestCompletionsFromOpenAI;
if (isWorkersAIEnable(context)) {
llm = requestCompletionsFromWorkersAI;
const llm = loadLLM(context);
if (llm === null) {
return sendMessageToTelegramWithContext(context)("LLM is not enable");
}
const answer = await requestCompletionsFromLLM(text, context, llm, modifier, onStream);
context.CURRENT_CHAT_CONTEXT.parse_mode = parseMode;
Expand Down Expand Up @@ -2107,10 +2124,6 @@ async function msgIgnoreOldMessage(message, context) {
return null;
}
async function msgCheckEnvIsReady(message, context) {
const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context);
if (!llmEnable) {
return sendMessageToTelegramWithContext(context)("LLM Not Set");
}
if (!DATABASE) {
return sendMessageToTelegramWithContext(context)("DATABASE Not Set");
}
Expand Down
2 changes: 1 addition & 1 deletion dist/timestamp
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1696745907
1696754208
26 changes: 20 additions & 6 deletions src/chat.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
import {DATABASE, ENV} from './env.js';
// eslint-disable-next-line no-unused-vars
import {Context} from './context.js';
import {requestCompletionsFromOpenAI} from './openai.js';
import {isOpenAIEnable, requestCompletionsFromOpenAI} from './openai.js';
import {tokensCounter} from './utils.js';
import {isWorkersAIEnable, requestCompletionsFromWorkersAI} from './workers-ai.js';

Expand Down Expand Up @@ -105,6 +105,21 @@ async function loadHistory(key, context) {
}


/**
*
* @param {Context} context
* @return {function}
*/
function loadLLM(context) {
if (isOpenAIEnable(context)) {
return requestCompletionsFromOpenAI;
}
if (isWorkersAIEnable(context)) {
return requestCompletionsFromWorkersAI;
}
return null;
}

/**
*
* @param {string} text
Expand All @@ -114,7 +129,7 @@ async function loadHistory(key, context) {
* @param {function} onStream
* @return {Promise<string>}
*/
export async function requestCompletionsFromLLM(text, context, llm, modifier, onStream) {
async function requestCompletionsFromLLM(text, context, llm, modifier, onStream) {
const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
const historyKey = context.SHARE_CONTEXT.chatHistoryKey;
let history = await loadHistory(historyKey, context);
Expand Down Expand Up @@ -167,11 +182,10 @@ export async function chatWithLLM(text, context, modifier) {
};
}

let llm = requestCompletionsFromOpenAI;
if (isWorkersAIEnable(context)) {
llm = requestCompletionsFromWorkersAI;
const llm = loadLLM(context);
if (llm === null) {
return sendMessageToTelegramWithContext(context)('LLM is not enable');
}

const answer = await requestCompletionsFromLLM(text, context, llm, modifier, onStream);
context.CURRENT_CHAT_CONTEXT.parse_mode = parseMode;
if (ENV.SHOW_REPLY_BUTTON && context.CURRENT_CHAT_CONTEXT.message_id) {
Expand Down
25 changes: 17 additions & 8 deletions src/context.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import {CONST, DATABASE, ENV} from './env.js';
// import {TelegramMessage} from './type.d.ts';

/**
* 上下文信息
Expand Down Expand Up @@ -191,7 +192,6 @@ export class Context {
}

/**
*
* @return {string|null}
*/
openAIKeyFromContext() {
Expand All @@ -201,13 +201,22 @@ export class Context {
if (this.USER_CONFIG.OPENAI_API_KEY) {
return this.USER_CONFIG.OPENAI_API_KEY;
}
if (Array.isArray(ENV.API_KEY)) {
if (ENV.API_KEY.length === 0) {
return null;
}
return ENV.API_KEY[Math.floor(Math.random() * ENV.API_KEY.length)];
} else {
return ENV.API_KEY;
if (ENV.API_KEY.length === 0) {
return null;
}
return ENV.API_KEY[Math.floor(Math.random() * ENV.API_KEY.length)];
}

/**
* @return {boolean}
*/
hasValidOpenAIKey() {
if (ENV.AZURE_COMPLETIONS_API) {
return ENV.AZURE_API_KEY !== null;
}
if (this.USER_CONFIG.OPENAI_API_KEY) {
return true;
}
return ENV.API_KEY.length > 0;
}
}
33 changes: 22 additions & 11 deletions src/env.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
/**
* @typedef {Object} Environment
* @property {null | string} API_KEY
*
* @property {null | string[]} API_KEY
* @property {string[]} TELEGRAM_AVAILABLE_TOKENS
*
* @property {boolean} I_AM_A_GENEROUS_PERSON
* @property {string[]} CHAT_WHITE_LIST
*
* @property {string[]} TELEGRAM_BOT_NAME
* @property {string[]} CHAT_GROUP_WHITE_LIST
* @property {boolean} GROUP_CHAT_BOT_ENABLE
* @property {boolean} GROUP_CHAT_BOT_SHARE_MODE
*
* @property {string} CHAT_MODEL
* @property {boolean} AUTO_TRIM_HISTORY
* @property {number} MAX_HISTORY_LENGTH
Expand All @@ -16,27 +20,31 @@
* @property {string} GPT3_TOKENS_COUNT_REPO
* @property {string} SYSTEM_INIT_MESSAGE
* @property {string} SYSTEM_INIT_MESSAGE_ROLE
*
* @property {boolean} ENABLE_USAGE_STATISTICS
* @property {string[]} HIDE_COMMAND_BUTTONS
* @property {boolean} SHOW_REPLY_BUTTON
*
* @property {string} UPDATE_BRANCH
* @property {number} BUILD_TIMESTAMP
* @property {string} BUILD_VERSION
*
* @property {null | I18n} I18N
* @property {string} LANGUAGE
*
* @property {boolean} STREAM_MODE
* @property {boolean} SAFE_MODE
* @property {boolean} DEBUG_MODE
* @property {boolean} DEV_MODE
*
* @property {string} TELEGRAM_API_DOMAIN
* @property {string} OPENAI_API_DOMAIN
* @property {string} AZURE_API_KEY
* @property {string} AZURE_COMPLETIONS_API
*
* @property {null | string} AZURE_API_KEY
* @property {null | string} AZURE_COMPLETIONS_API
*
* @property {string} WORKERS_AI_MODEL
*/
/**
* @type {Environment}
*/
export const ENV = {

// OpenAI API Key
Expand Down Expand Up @@ -128,7 +136,7 @@ export let API_GUARD = null;
export let AI = null;

const ENV_VALUE_TYPE = {
API_KEY: [],
API_KEY: 'array',
AZURE_API_KEY: 'string',
AZURE_COMPLETIONS_API: 'string',
};
Expand All @@ -147,9 +155,11 @@ export function initEnv(env, i18n) {
DATABASE = env.DATABASE;
API_GUARD = env.API_GUARD;
AI = env.AI;
for (const key in ENV) {

for (const key of Object.keys(ENV)) {
const t = ENV_VALUE_TYPE[key]?ENV_VALUE_TYPE[key]:(typeof ENV[key]);
if (env[key]) {
switch (ENV_VALUE_TYPE[key]?typeof ENV_VALUE_TYPE[key]:(typeof ENV[key])) {
switch (t) {
case 'number':
ENV[key] = parseInt(env[key]) || ENV[key];
break;
Expand All @@ -159,11 +169,12 @@ export function initEnv(env, i18n) {
case 'string':
ENV[key] = env[key];
break;
case 'array':
ENV[key] = env[key].split(',');
break;
case 'object':
if (Array.isArray(ENV[key])) {
ENV[key] = env[key].split(',');
} else if (ENV_VALUE_TYPE[key] && Array.isArray(ENV_VALUE_TYPE[key])) {
ENV[key] = env[key].split(',');
} else {
try {
ENV[key] = JSON.parse(env[key]);
Expand Down
6 changes: 0 additions & 6 deletions src/message.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@ import {sendMessageToTelegramWithContext} from './telegram.js';
import {handleCommandMessage} from './command.js';
import {errorToString} from './utils.js';
import {chatWithLLM} from './chat.js';
import {isOpenAIEnable} from './openai.js';
import {isWorkersAIEnable} from './workers-ai.js';
// import {TelegramMessage, TelegramWebhookRequest} from './type.d.ts';


Expand Down Expand Up @@ -79,10 +77,6 @@ async function msgIgnoreOldMessage(message, context) {
* @return {Promise<Response>}
*/
async function msgCheckEnvIsReady(message, context) {
const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context);
if (!llmEnable) {
return sendMessageToTelegramWithContext(context)('LLM Not Set');
}
if (!DATABASE) {
return sendMessageToTelegramWithContext(context)('DATABASE Not Set');
}
Expand Down
3 changes: 1 addition & 2 deletions src/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,7 @@ import {Stream} from './vendors/stream.js';
* @param {Context} context
*/
export function isOpenAIEnable(context) {
const key = context.openAIKeyFromContext();
return key && key.length > 0;
return context.hasValidOpenAIKey();
}


Expand Down
Loading

0 comments on commit cca7cf8

Please sign in to comment.