This commit is contained in:
Archer
2023-10-17 10:00:32 +08:00
committed by GitHub
parent dd8f2744bf
commit 3b776b6639
98 changed files with 1525 additions and 983 deletions

View File

@@ -3,7 +3,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { sseErrRes } from '@/service/response';
import { sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { pushChatBill } from '@/service/common/bill/push';
@@ -59,12 +59,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
detail: true
});
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.appStreamResponse,
data: JSON.stringify(responseData)

View File

@@ -6,7 +6,8 @@ import { authUser } from '@fastgpt/support/user/auth';
import { ChatItemType } from '@/types/chat';
import { authApp } from '@/service/utils/auth';
import type { ChatSchema } from '@/types/mongoSchema';
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
import { getGuideModule } from '@/components/ChatBox/utils';
import { getChatModelNameListByModules } from '@/service/core/app/module';
import { TaskResponseKeyEnum } from '@/constants/chat';
/* 初始化我的聊天框,需要身份验证 */
@@ -83,7 +84,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
appId,
app: {
userGuideModule: getGuideModule(app.modules),
chatModels: getChatModelNameList(app.modules),
chatModels: getChatModelNameListByModules(app.modules),
name: app.name,
avatar: app.avatar,
intro: app.intro,

View File

@@ -12,6 +12,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true, authApiKey: true });
const qaModel = global.qaModels[0];
const { _id } = await Bill.create({
userId,
appName: name,
@@ -25,7 +27,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
},
{
moduleName: 'QA 拆分',
model: global.qaModel.name,
model: qaModel?.name,
amount: 0,
tokenLen: 0
}

View File

@@ -4,7 +4,6 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import type { CreateQuestionGuideParams } from '@/global/core/api/aiReq.d';
import { pushQuestionGuideBill } from '@/service/common/bill/push';
import { defaultQGModel } from '@/pages/api/system/getInitData';
import { createQuestionGuide } from '@fastgpt/core/ai/functions/createQuestionGuide';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -23,9 +22,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error('user not found');
}
const qgModel = global.qgModels[0];
const { result, tokens } = await createQuestionGuide({
messages,
model: (global.qgModel || defaultQGModel).model
model: qgModel.model
});
jsonRes(res, {

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import type { DatasetsItemType } from '@/types/core/dataset';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { PgDatasetTableName } from '@/constants/plugin';
import { connectToDatabase } from '@/service/mongo';

View File

@@ -8,7 +8,7 @@ import { findAllChildrenIds } from '../delete';
import QueryStream from 'pg-query-stream';
import { PgClient } from '@/service/pg';
import { addLog } from '@/service/utils/tools';
import { responseWriteController } from '@/service/common/stream';
import { responseWriteController } from '@fastgpt/common/tools/stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {

View File

@@ -7,10 +7,10 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authDataset } from '@/service/utils/auth';
import { authUser } from '@fastgpt/support/user/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { PgDatasetTableName } from '@/constants/plugin';
import { insertData2Dataset, PgClient } from '@/service/pg';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';

View File

@@ -5,15 +5,15 @@ import { connectToDatabase, TrainingData } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { authUser } from '@fastgpt/support/user/auth';
import { authDataset } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { getVectorModel } from '@/service/utils/data';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
import type { PushDataProps } from '@/global/core/api/datasetReq.d';
import { authFileIdValid } from '@/service/dataset/auth';
import { getVectorModel } from '@/service/core/ai/model';
const modeMap = {
[TrainingModeEnum.index]: true,
@@ -71,7 +71,7 @@ export async function pushDataToKb({
if (mode === TrainingModeEnum.index) {
const vectorModel = (await MongoDataset.findById(kbId, 'vectorModel'))?.vectorModel;
return getVectorModel(vectorModel || global.vectorModels[0].model);
return getVectorModel(vectorModel);
}
return global.vectorModels[0];
})()
@@ -79,7 +79,7 @@ export async function pushDataToKb({
const modeMaxToken = {
[TrainingModeEnum.index]: vectorModel.maxToken * 1.5,
[TrainingModeEnum.qa]: global.qaModel.maxToken * 0.8
[TrainingModeEnum.qa]: global.qaModels[0].maxToken * 0.8
};
// filter repeat or equal content

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { connectToDatabase } from '@/service/mongo';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
import { getVector } from '@/pages/api/openapi/plugin/vector';

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import { MongoDataset } from '@fastgpt/core/dataset/schema';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@fastgpt/support/user/auth';
import { getVectorModel } from '@/service/utils/data';
import { getVectorModel } from '@/service/core/ai/model';
import type { DatasetsItemType } from '@/types/core/dataset';
import { DatasetTypeEnum } from '@fastgpt/core/dataset/constant';
import { MongoDataset } from '@fastgpt/core/dataset/schema';

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@fastgpt/support/user/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { getVector } from '../../openapi/plugin/vector';
import { PgDatasetTableName } from '@/constants/plugin';
import { MongoDataset } from '@fastgpt/core/dataset/schema';

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authBalanceByUid, authUser } from '@fastgpt/support/user/auth';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { getAIApi } from '@fastgpt/core/ai/config';
import { pushGenerateVectorBill } from '@/service/common/bill/push';
import { connectToDatabase } from '@/service/mongo';

View File

@@ -1,5 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { withNextCors } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import ChatCompletion from '@/pages/api/v1/chat/completions';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -6,8 +6,9 @@ import { MongoUser } from '@fastgpt/support/user/schema';
import type { InitShareChatResponse } from '@/global/support/api/outLinkRes.d';
import { authApp } from '@/service/utils/auth';
import { HUMAN_ICON } from '@/constants/chat';
import { getChatModelNameList, getGuideModule } from '@/components/ChatBox/utils';
import { getGuideModule } from '@/components/ChatBox/utils';
import { authShareChatInit } from '@fastgpt/support/outLink/auth';
import { getChatModelNameListByModules } from '@/service/core/app/module';
/* init share chat window */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -51,7 +52,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
userAvatar: user?.avatar || HUMAN_ICON,
app: {
userGuideModule: getGuideModule(app.modules),
chatModels: getChatModelNameList(app.modules),
chatModels: getChatModelNameListByModules(app.modules),
name: app.name,
avatar: app.avatar,
intro: app.intro

View File

@@ -4,10 +4,23 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { readFileSync } from 'fs';
import type { InitDateResponse } from '@/global/common/api/systemRes';
import type { VectorModelItemType, FunctionModelItemType } from '@/types/model';
import { formatPrice } from '@fastgpt/common/bill';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
import { initHttpAgent } from '@fastgpt/core/init';
import {
defaultChatModels,
defaultQAModels,
defaultCQModels,
defaultExtractModels,
defaultQGModels,
defaultVectorModels
} from '@/constants/model';
import {
ChatModelItemType,
FunctionModelItemType,
LLMModelItemType,
VectorModelItemType
} from '@/types/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
getInitConfig();
@@ -17,7 +30,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
data: {
feConfigs: global.feConfigs,
chatModels: global.chatModels,
qaModel: global.qaModel,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
priceMd: global.priceMd,
systemVersion: global.systemVersion || '0.0.0'
@@ -42,72 +58,6 @@ const defaultFeConfigs: FeConfigsType = {
},
scripts: []
};
const defaultChatModels = [
{
model: 'gpt-3.5-turbo',
name: 'GPT35-4k',
contextMaxToken: 4000,
quoteMaxToken: 2400,
maxTemperature: 1.2,
price: 0
},
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
contextMaxToken: 16000,
quoteMaxToken: 8000,
maxTemperature: 1.2,
price: 0
},
{
model: 'gpt-4',
name: 'GPT4-8k',
contextMaxToken: 8000,
quoteMaxToken: 4000,
maxTemperature: 1.2,
price: 0
}
];
const defaultQAModel = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0
};
export const defaultExtractModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
prompt: '',
functionCall: true
};
export const defaultCQModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxToken: 16000,
price: 0,
prompt: '',
functionCall: true
};
export const defaultQGModel: FunctionModelItemType = {
model: 'gpt-3.5-turbo',
name: 'FastAI-4k',
maxToken: 4000,
price: 1.5,
prompt: '',
functionCall: false
};
const defaultVectorModels: VectorModelItemType[] = [
{
model: 'text-embedding-ada-002',
name: 'Embedding-2',
price: 0,
defaultToken: 500,
maxToken: 3000
}
];
export function initGlobal() {
// init tikToken
@@ -127,7 +77,16 @@ export function getInitConfig() {
const filename =
process.env.NODE_ENV === 'development' ? 'data/config.local.json' : '/app/data/config.json';
const res = JSON.parse(readFileSync(filename, 'utf-8'));
const res = JSON.parse(readFileSync(filename, 'utf-8')) as {
FeConfig: FeConfigsType;
SystemParams: SystemEnvType;
ChatModels: ChatModelItemType[];
QAModels: LLMModelItemType[];
CQModels: FunctionModelItemType[];
ExtractModels: FunctionModelItemType[];
QGModels: LLMModelItemType[];
VectorModels: VectorModelItemType[];
};
console.log(`System Version: ${global.systemVersion}`);
@@ -137,11 +96,13 @@ export function getInitConfig() {
? { ...defaultSystemEnv, ...res.SystemParams }
: defaultSystemEnv;
global.feConfigs = res.FeConfig ? { ...defaultFeConfigs, ...res.FeConfig } : defaultFeConfigs;
global.chatModels = res.ChatModels || defaultChatModels;
global.qaModel = res.QAModel || defaultQAModel;
global.extractModel = res.ExtractModel || defaultExtractModel;
global.cqModel = res.CQModel || defaultCQModel;
global.qgModel = res.QGModel || defaultQGModel;
global.qaModels = res.QAModels || defaultQAModels;
global.cqModels = res.CQModels || defaultCQModels;
global.extractModels = res.ExtractModels || defaultExtractModels;
global.qgModels = res.QGModels || defaultQGModels;
global.vectorModels = res.VectorModels || defaultVectorModels;
} catch (error) {
setDefaultData();
@@ -152,13 +113,27 @@ export function getInitConfig() {
export function setDefaultData() {
global.systemEnv = defaultSystemEnv;
global.feConfigs = defaultFeConfigs;
global.chatModels = defaultChatModels;
global.qaModel = defaultQAModel;
global.qaModels = defaultQAModels;
global.cqModels = defaultCQModels;
global.extractModels = defaultExtractModels;
global.qgModels = defaultQGModels;
global.vectorModels = defaultVectorModels;
global.extractModel = defaultExtractModel;
global.cqModel = defaultCQModel;
global.qgModel = defaultQGModel;
global.priceMd = '';
console.log('use default config');
console.log({
feConfigs: defaultFeConfigs,
systemEnv: defaultSystemEnv,
chatModels: defaultChatModels,
qaModels: defaultQAModels,
cqModels: defaultCQModels,
extractModels: defaultExtractModels,
qgModels: defaultQGModels,
vectorModels: defaultVectorModels
});
}
export function getSystemVersion() {
@@ -187,10 +162,18 @@ ${global.vectorModels
${global.chatModels
?.map((item) => `| 对话-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
| 文件QA拆分 | ${formatPrice(global.qaModel?.price, 1000)} |
| 高级编排 - 问题分类 | ${formatPrice(global.cqModel?.price, 1000)} |
| 高级编排 - 内容提取 | ${formatPrice(global.extractModel?.price, 1000)} |
| 下一步指引 | ${formatPrice(global.qgModel?.price, 1000)} |
${global.qaModels
?.map((item) => `| 文件QA拆分-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.cqModels
?.map((item) => `| 问题分类-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.extractModels
?.map((item) => `| 内容提取-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
${global.qgModels
?.map((item) => `| 下一步指引-${item.name} | ${formatPrice(item.price, 1000)} |`)
.join('\n')}
`;
console.log(global.priceMd);
}

View File

@@ -2,8 +2,8 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { MongoUser } from '@fastgpt/support/user/schema';
import { setCookie } from '@/service/utils/tools';
import { generateToken } from '@fastgpt/support/user/tools';
import { setCookie } from '@fastgpt/support/user/auth';
import { generateToken } from '@fastgpt/support/user/auth';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {

View File

@@ -1,7 +1,7 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { clearCookie } from '@/service/utils/tools';
import { clearCookie } from '@fastgpt/support/user/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {

View File

@@ -3,7 +3,8 @@ import { authApp } from '@/service/utils/auth';
import { authUser } from '@fastgpt/support/user/auth';
import { AuthUserTypeEnum } from '@fastgpt/support/user/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { addLog, withNextCors } from '@/service/utils/tools';
import { addLog } from '@/service/utils/tools';
import { withNextCors } from '@fastgpt/common/tools/nextjs';
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
import {
dispatchHistory,
@@ -21,7 +22,7 @@ import type { MessageItemType } from '@/types/core/chat/type';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/service/utils/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
@@ -217,7 +218,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const feResponseData = isOwner ? responseData : selectShareResponse({ responseData });
if (stream) {
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
@@ -225,14 +226,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
finish_reason: 'stop'
})
});
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: '[DONE]'
});
if (responseDetail && detail) {
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.appStreamResponse,
data: JSON.stringify(feResponseData)
@@ -323,13 +324,16 @@ export async function dispatchModules({
let chatAnswerText = ''; // AI answer
let runningTime = Date.now();
function pushStore({
answerText = '',
responseData
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
}) {
function pushStore(
{ inputs = [] }: RunningModuleItemType,
{
answerText = '',
responseData
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
}
) {
const time = Date.now();
if (responseData) {
if (Array.isArray(responseData)) {
@@ -342,7 +346,12 @@ export async function dispatchModules({
}
}
runningTime = time;
chatAnswerText += answerText;
const isResponseAnswerText =
inputs.find((item) => item.key === SystemInputEnum.isResponseAnswerText)?.value ?? true;
if (isResponseAnswerText) {
chatAnswerText += answerText;
}
}
function moduleInput(
module: RunningModuleItemType,
@@ -376,7 +385,7 @@ export async function dispatchModules({
module: RunningModuleItemType,
result: Record<string, any> = {}
): Promise<any> {
pushStore(result);
pushStore(module, result);
return Promise.all(
module.outputs.map((outputItem) => {
if (result[outputItem.key] === undefined) return;
@@ -505,7 +514,7 @@ export function responseStatus({
name?: string;
}) {
if (!name) return;
sseResponse({
responseWrite({
res,
event: sseResponseEventEnum.moduleStatus,
data: JSON.stringify({