This commit is contained in:
Archer
2023-09-26 14:31:37 +08:00
committed by GitHub
parent 38d4db5d5f
commit f6552d0d4f
48 changed files with 536 additions and 399 deletions

View File

@@ -4,8 +4,8 @@ import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '../lib/openai';
import { ChatCompletionRequestMessage } from 'openai';
import { axiosConfig, getAIChatApi } from '@fastgpt/core/aiApi/config';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/aiApi/type';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
import { replaceVariable } from '@/utils/common/tools/text';

View File

@@ -1,28 +0,0 @@
import { UserModelSchema } from '@/types/mongoSchema';
import { Configuration, OpenAIApi } from 'openai';
export const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
export const baseUrl = process.env.ONEAPI_URL || openaiBaseUrl;
export const systemAIChatKey = process.env.CHAT_API_KEY || '';
export const getAIChatApi = (props?: UserModelSchema['openaiAccount']) => {
return new OpenAIApi(
new Configuration({
basePath: props?.baseUrl || baseUrl,
apiKey: props?.key || systemAIChatKey
})
);
};
/* openai axios config */
export const axiosConfig = (props?: UserModelSchema['openaiAccount']) => {
return {
baseURL: props?.baseUrl || baseUrl, // 此处仅对非 npm 模块有效
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${props?.key || systemAIChatKey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
};
};

View File

@@ -56,6 +56,9 @@ const AppSchema = new Schema({
type: Array,
default: []
},
inited: {
type: Boolean
},
// 弃
chat: Object
});

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
@@ -46,7 +46,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
return completions(props);
})();
const result = agents.find((item) => item.key === arg?.type) || agents[0];
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
return {
[result.key]: 1,
@@ -120,12 +120,21 @@ async function functionCall({
}
);
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
try {
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
return {
arg,
tokens: response.data.usage?.total_tokens || 0
};
return {
arg,
tokens: response.data.usage?.total_tokens || 0
};
} catch (error) {
console.log('Your model may not support function_call');
return {
arg: {},
tokens: 0
};
}
}
async function completions({

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { FlowModuleTypeEnum } from '@/constants/flow';

View File

@@ -5,13 +5,13 @@ import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';

View File

@@ -1,5 +1,4 @@
import mongoose from 'mongoose';
import tunnel from 'tunnel';
import { startQueue } from './utils/tools';
import { getInitConfig } from '@/pages/api/system/getInitData';
import { User } from './models/user';
@@ -9,6 +8,7 @@ import { createHashPassword } from '@/utils/tools';
import { createLogger, format, transports } from 'winston';
import 'winston-mongodb';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
import { initHttpAgent } from '@fastgpt/core/init';
/**
* connect MongoDB and init data
@@ -24,15 +24,6 @@ export async function connectToDatabase(): Promise<void> {
global.vectorQueueLen = 0;
global.sendInformQueue = [];
global.sendInformQueueLen = 0;
// proxy obj
if (process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT) {
global.httpsAgent = tunnel.httpsOverHttp({
proxy: {
host: process.env.AXIOS_PROXY_HOST,
port: +process.env.AXIOS_PROXY_PORT
}
});
}
// logger
initLogger();
@@ -41,6 +32,7 @@ export async function connectToDatabase(): Promise<void> {
getInitConfig();
// init tikToken
getTikTokenEnc();
initHttpAgent();
try {
mongoose.set('strictQuery', true);

View File

@@ -19,6 +19,7 @@ export const connectPg = async (): Promise<Pool> => {
global.pgClient.on('error', (err) => {
console.log(err);
global.pgClient?.end();
global.pgClient = null;
connectPg();
});