v4.6.9-alpha (#918)

Co-authored-by: Mufei <327958099@qq.com>
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-03-04 00:05:25 +08:00
committed by GitHub
parent f9f0b4bffd
commit 42a8184ea0
153 changed files with 4906 additions and 4307 deletions

View File

@@ -1,21 +1,12 @@
import { initSystemConfig } from '@/pages/api/common/system/getInitData';
import { startQueue } from '@/service/utils/tools';
import { setCron } from '@fastgpt/service/common/system/cron';
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
export const startCron = () => {
setUpdateSystemConfigCron();
setTrainingQueueCron();
};
export const setUpdateSystemConfigCron = () => {
setCron('*/5 * * * *', () => {
initSystemConfig();
console.log('refresh system config');
});
};
export const setTrainingQueueCron = () => {
setCron('*/1 * * * *', () => {
startQueue();
startTrainingQueue();
});
};

View File

@@ -1,3 +1,5 @@
import { getTikTokenEnc } from '@fastgpt/global/common/string/tiktoken';
import { initHttpAgent } from '@fastgpt/service/common/middle/httpAgent';
import { existsSync, readFileSync } from 'fs';
export const readConfigData = (name: string) => {
@@ -23,3 +25,15 @@ export const readConfigData = (name: string) => {
return content;
};
export function initGlobal() {
if (global.communityPlugins) return;
global.communityPlugins = [];
global.simpleModeTemplates = [];
global.qaQueueLen = global.qaQueueLen ?? 0;
global.vectorQueueLen = global.vectorQueueLen ?? 0;
// init tikToken
getTikTokenEnc();
initHttpAgent();
}

View File

@@ -0,0 +1,21 @@
import { initSystemConfig } from '@/pages/api/common/system/getInitData';
import { createDatasetTrainingMongoWatch } from '@/service/core/dataset/training/utils';
import { MongoSystemConfigs } from '@fastgpt/service/common/system/config/schema';
export const startMongoWatch = async () => {
reloadConfigWatch();
createDatasetTrainingMongoWatch();
};
const reloadConfigWatch = () => {
const changeStream = MongoSystemConfigs.watch();
changeStream.on('change', async (change) => {
try {
if (change.operationType === 'insert') {
await initSystemConfig();
console.log('refresh system config');
}
} catch (error) {}
});
};

View File

@@ -35,7 +35,6 @@ import type {
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { startQueue } from '@/service/utils/tools';
export async function pushDataToTrainingQueue(
props: {
@@ -49,8 +48,6 @@ export async function pushDataToTrainingQueue(
datasetModelList: global.llmModels
});
startQueue();
return result;
}
@@ -129,7 +126,7 @@ export async function insertData2Dataset({
return {
insertId: _id,
charsLength: result.reduce((acc, cur) => acc + cur.charsLength, 0)
tokens: result.reduce((acc, cur) => acc + cur.tokens, 0)
};
}
@@ -240,11 +237,11 @@ export async function updateData2Dataset({
return result;
}
return {
charsLength: 0
tokens: 0
};
})
);
const charsLength = insertResult.reduce((acc, cur) => acc + cur.charsLength, 0);
const tokens = insertResult.reduce((acc, cur) => acc + cur.tokens, 0);
// console.log(clonePatchResult2Insert);
await mongoSessionRun(async (session) => {
// update mongo
@@ -273,7 +270,7 @@ export async function updateData2Dataset({
});
return {
charsLength
tokens
};
}
@@ -343,7 +340,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
};
};
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
const { vectors, charsLength } = await getVectorsByText({
const { vectors, tokens } = await getVectorsByText({
model: getVectorModel(model),
input: query
});
@@ -407,7 +404,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
return {
embeddingRecallResults: formatResult,
charsLength
tokens
};
};
const fullTextRecall = async ({
@@ -552,22 +549,21 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
// multi query recall
const embeddingRecallResList: SearchDataResponseItemType[][] = [];
const fullTextRecallResList: SearchDataResponseItemType[][] = [];
let totalCharsLength = 0;
let totalTokens = 0;
await Promise.all(
queries.map(async (query) => {
const [{ charsLength, embeddingRecallResults }, { fullTextRecallResults }] =
await Promise.all([
embeddingRecall({
query,
limit: embeddingLimit
}),
fullTextRecall({
query,
limit: fullTextLimit
})
]);
totalCharsLength += charsLength;
const [{ tokens, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
embeddingRecall({
query,
limit: embeddingLimit
}),
fullTextRecall({
query,
limit: fullTextLimit
})
]);
totalTokens += tokens;
embeddingRecallResList.push(embeddingRecallResults);
fullTextRecallResList.push(fullTextRecallResults);
@@ -583,7 +579,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
).slice(0, fullTextLimit);
return {
charsLength: totalCharsLength,
tokens: totalTokens,
embeddingRecallResults: rrfEmbRecall,
fullTextRecallResults: rrfFTRecall
};
@@ -594,7 +590,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
const { embeddingLimit, fullTextLimit } = countRecallLimit();
// recall
const { embeddingRecallResults, fullTextRecallResults, charsLength } = await multiQueryRecall({
const { embeddingRecallResults, fullTextRecallResults, tokens } = await multiQueryRecall({
embeddingLimit,
fullTextLimit
});
@@ -666,7 +662,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
return {
searchRes: filterResultsByMaxTokens(scoreFilter, maxTokens),
charsLength,
tokens,
searchMode,
limit: maxTokens,
similarity,

View File

@@ -0,0 +1,31 @@
import { generateQA } from '@/service/events/generateQA';
import { generateVector } from '@/service/events/generateVector';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
export const createDatasetTrainingMongoWatch = () => {
const changeStream = MongoDatasetTraining.watch();
changeStream.on('change', async (change) => {
try {
if (change.operationType === 'insert') {
const fullDocument = change.fullDocument as DatasetTrainingSchemaType;
const { mode } = fullDocument;
if (mode === TrainingModeEnum.qa) {
generateQA();
} else if (mode === TrainingModeEnum.chunk) {
generateVector();
}
}
} catch (error) {}
});
};
export const startTrainingQueue = (fast?: boolean) => {
const max = global.systemEnv?.qaMaxProcess || 10;
for (let i = 0; i < max; i++) {
generateQA();
generateVector();
}
};

View File

@@ -10,8 +10,10 @@ import { Prompt_AgentQA } from '@/global/core/prompt/agent';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
import { getLLMModel } from '@fastgpt/service/core/ai/model';
import { checkInvalidChunkAndLock, checkTeamAiPointsAndLock } from './utils';
import { countGptMessagesChars } from '@fastgpt/service/core/chat/utils';
import { checkTeamAiPointsAndLock } from './utils';
import { checkInvalidChunkAndLock } from '@fastgpt/service/core/dataset/training/utils';
import { addMinutes } from 'date-fns';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@@ -20,9 +22,11 @@ const reduceQueue = () => {
};
export async function generateQA(): Promise<any> {
if (global.qaQueueLen >= global.systemEnv.qaMaxProcess) return;
const max = global.systemEnv?.qaMaxProcess || 10;
if (global.qaQueueLen >= max) return;
global.qaQueueLen++;
const startTime = Date.now();
// get training data
const {
data,
@@ -33,7 +37,7 @@ export async function generateQA(): Promise<any> {
try {
const data = await MongoDatasetTraining.findOneAndUpdate(
{
lockTime: { $lte: new Date(Date.now() - 6 * 60 * 1000) },
lockTime: { $lte: addMinutes(new Date(), -6) },
mode: TrainingModeEnum.qa
},
{
@@ -66,7 +70,7 @@ export async function generateQA(): Promise<any> {
text: data.q
};
} catch (error) {
console.log(`Get Training Data error`, error);
addLog.error(`[QA Queue] Error`, error);
return {
error: true
};
@@ -75,7 +79,7 @@ export async function generateQA(): Promise<any> {
if (done || !data) {
if (reduceQueue()) {
console.log(`QA】Task Done`);
addLog.info(`[QA Queue] Done`);
}
return;
}
@@ -83,17 +87,15 @@ export async function generateQA(): Promise<any> {
reduceQueue();
return generateQA();
}
console.log('Start QA Training');
// auth balance
if (!(await checkTeamAiPointsAndLock(data.teamId, data.tmbId))) {
console.log('balance not enough');
reduceQueue();
return generateQA();
}
addLog.info(`[QA Queue] Start`);
try {
const startTime = Date.now();
const model = getLLMModel(data.model)?.model;
const prompt = `${data.prompt || Prompt_AgentQA.description}
${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
@@ -119,8 +121,8 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
const qaArr = formatSplitText(answer, text); // 格式化后的QA对
addLog.info(`QA Training Finish`, {
time: `${(Date.now() - startTime) / 1000}s`,
addLog.info(`[QA Queue] Finish`, {
time: Date.now() - startTime,
splitLength: qaArr.length,
usage: chatResponse.usage
});
@@ -146,7 +148,7 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
pushQAUsage({
teamId: data.teamId,
tmbId: data.tmbId,
charsLength: countGptMessagesChars(messages).length,
tokens: countGptMessagesTokens(messages),
billId: data.billId,
model
});

View File

@@ -2,8 +2,10 @@ import { insertData2Dataset } from '@/service/core/dataset/data/controller';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { checkInvalidChunkAndLock, checkTeamAiPointsAndLock } from './utils';
import { delay } from '@fastgpt/global/common/system/utils';
import { checkTeamAiPointsAndLock } from './utils';
import { checkInvalidChunkAndLock } from '@fastgpt/service/core/dataset/training/utils';
import { addMinutes } from 'date-fns';
import { addLog } from '@fastgpt/service/common/system/log';
const reduceQueue = () => {
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
@@ -13,7 +15,8 @@ const reduceQueue = () => {
/* 索引生成队列。每导入一次,就是一个单独的线程 */
export async function generateVector(): Promise<any> {
if (global.vectorQueueLen >= global.systemEnv.vectorMaxProcess) return;
const max = global.systemEnv?.vectorMaxProcess || 10;
if (global.vectorQueueLen >= max) return;
global.vectorQueueLen++;
const start = Date.now();
@@ -27,7 +30,7 @@ export async function generateVector(): Promise<any> {
try {
const data = await MongoDatasetTraining.findOneAndUpdate(
{
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) },
lockTime: { $lte: addMinutes(new Date(), -1) },
mode: TrainingModeEnum.chunk
},
{
@@ -68,7 +71,7 @@ export async function generateVector(): Promise<any> {
}
};
} catch (error) {
console.log(`Get Training Data error`, error);
addLog.error(`Get Training Data error`, error);
return {
error: true
};
@@ -77,11 +80,12 @@ export async function generateVector(): Promise<any> {
if (done || !data) {
if (reduceQueue()) {
console.log(`【index】Task done`);
addLog.info(`[Vector Queue] Done`);
}
return;
}
if (error) {
addLog.error(`[Vector Queue] Error`, { error });
reduceQueue();
return generateVector();
}
@@ -92,6 +96,8 @@ export async function generateVector(): Promise<any> {
return generateVector();
}
addLog.info(`[Vector Queue] Start`);
// create vector and insert
try {
// invalid data
@@ -103,7 +109,7 @@ export async function generateVector(): Promise<any> {
}
// insert to dataset
const { charsLength } = await insertData2Dataset({
const { tokens } = await insertData2Dataset({
teamId: data.teamId,
tmbId: data.tmbId,
datasetId: data.datasetId,
@@ -119,7 +125,7 @@ export async function generateVector(): Promise<any> {
pushGenerateVectorUsage({
teamId: data.teamId,
tmbId: data.tmbId,
charsLength,
tokens,
model: data.model,
billId: data.billId
});
@@ -129,7 +135,9 @@ export async function generateVector(): Promise<any> {
reduceQueue();
generateVector();
console.log(`embedding finished, time: ${Date.now() - start}ms`);
addLog.info(`[Vector Queue] Finish`, {
time: Date.now() - start
});
} catch (err: any) {
reduceQueue();

View File

@@ -2,10 +2,6 @@ import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import { checkTeamAIPoints } from '@fastgpt/service/support/permission/teamLimit';
import { sendOneInform } from '../support/user/inform/api';
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
import { addLog } from '@fastgpt/service/common/system/log';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { getErrText } from '@fastgpt/global/common/error/utils';
export const checkTeamAiPointsAndLock = async (teamId: string, tmbId: string) => {
try {
@@ -29,41 +25,3 @@ export const checkTeamAiPointsAndLock = async (teamId: string, tmbId: string) =>
return false;
}
};
export const checkInvalidChunkAndLock = async ({
err,
errText,
data
}: {
err: any;
errText: string;
data: DatasetTrainingSchemaType;
}) => {
if (err?.response) {
addLog.info(`openai error: ${errText}`, {
status: err.response?.status,
stateusText: err.response?.statusText,
data: err.response?.data
});
} else {
console.log(err);
addLog.error(getErrText(err, errText));
}
if (
err?.message === 'invalid message format' ||
err?.type === 'invalid_request_error' ||
err?.code === 500
) {
addLog.info('Lock training data');
console.log(err);
try {
await MongoDatasetTraining.findByIdAndUpdate(data._id, {
lockTime: new Date('2998/5/5')
});
} catch (error) {}
return true;
}
return false;
};

View File

@@ -1,6 +1,7 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type {
@@ -14,7 +15,7 @@ import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { getHistories } from '../utils';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.aiModel]: string;
@@ -46,7 +47,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const chatHistories = getHistories(history, histories);
const { arg, charsLength } = await (async () => {
const { arg, tokens } = await (async () => {
if (cqModel.toolChoice) {
return toolChoice({
...props,
@@ -65,7 +66,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const { totalPoints, modelName } = formatModelChars2Points({
model: cqModel.model,
charsLength,
tokens,
modelType: ModelTypeEnum.llm
});
@@ -75,7 +76,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: userChatInput,
charsLength,
tokens,
cqList: agents,
cqResult: result.value,
contextTotalLen: chatHistories.length + 2
@@ -85,7 +86,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
tokens
}
]
};
@@ -136,6 +137,13 @@ ${systemPrompt}
required: ['type']
}
};
const tools: any = [
{
type: 'function',
function: agentFunction
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
@@ -144,13 +152,8 @@ ${systemPrompt}
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
messages: [...adaptMessages],
tools: [
{
type: 'function',
function: agentFunction
}
],
messages: adaptMessages,
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
});
@@ -158,13 +161,10 @@ ${systemPrompt}
const arg = JSON.parse(
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
);
const functionChars =
agentFunction.description.length +
agentFunction.parameters.properties.type.description.length;
return {
arg,
charsLength: countMessagesChars(messages) + functionChars
tokens: countMessagesTokens(messages, tools)
};
} catch (error) {
console.log(agentFunction.parameters);
@@ -174,7 +174,7 @@ ${systemPrompt}
return {
arg: {},
charsLength: 0
tokens: 0
};
}
}
@@ -216,7 +216,7 @@ async function completions({
agents.find((item) => answer.includes(item.key) || answer.includes(item.value))?.key || '';
return {
charsLength: countMessagesChars(messages),
tokens: countMessagesTokens(messages),
arg: { type: id }
};
}

View File

@@ -1,6 +1,7 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type {
@@ -14,7 +15,7 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getHistories } from '../utils';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
type Props = ModuleDispatchProps<{
[ModuleInputKeyEnum.history]?: ChatItemType[];
@@ -46,7 +47,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
const extractModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { arg, charsLength } = await (async () => {
const { arg, tokens } = await (async () => {
if (extractModel.toolChoice) {
return toolChoice({
...props,
@@ -85,7 +86,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
const { totalPoints, modelName } = formatModelChars2Points({
model: extractModel.model,
charsLength,
tokens,
modelType: ModelTypeEnum.llm
});
@@ -98,7 +99,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
query: content,
charsLength,
tokens,
extractDescription: description,
extractResult: arg,
contextTotalLen: chatHistories.length + 2
@@ -108,7 +109,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
tokens
}
]
};
@@ -170,6 +171,12 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
required: extractKeys.filter((item) => item.required).map((item) => item.key)
}
};
const tools: any = [
{
type: 'function',
function: agentFunction
}
];
const ai = getAIApi({
userKey: user.openaiAccount,
@@ -180,12 +187,7 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
model: extractModel.model,
temperature: 0,
messages: [...adaptMessages],
tools: [
{
type: 'function',
function: agentFunction
}
],
tools,
tool_choice: { type: 'function', function: { name: agentFunName } }
});
@@ -202,12 +204,9 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
}
})();
const functionChars =
description.length + extractKeys.reduce((sum, item) => sum + item.desc.length, 0);
return {
rawResponse: response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '',
charsLength: countMessagesChars(messages) + functionChars,
tokens: countMessagesTokens(messages, tools),
arg
};
}
@@ -257,7 +256,7 @@ Human: ${content}`
if (start === -1 || end === -1)
return {
rawResponse: answer,
charsLength: countMessagesChars(messages),
tokens: countMessagesTokens(messages),
arg: {}
};
@@ -269,14 +268,14 @@ Human: ${content}`
try {
return {
rawResponse: answer,
charsLength: countMessagesChars(messages),
tokens: countMessagesTokens(messages),
arg: JSON.parse(jsonStr) as Record<string, any>
};
} catch (error) {
return {
rawResponse: answer,
charsLength: countMessagesChars(messages),
tokens: countMessagesTokens(messages),
arg: {}
};
}

View File

@@ -1,17 +1,17 @@
import type { NextApiResponse } from 'next';
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '@/service/common/censor';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
import type { ModuleDispatchResponse, ModuleItemType } from '@fastgpt/global/core/module/type.d';
import { countMessagesTokens, sliceMessagesTB } from '@fastgpt/global/common/string/tiktoken';
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d';
@@ -98,7 +98,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
userChatInput,
systemPrompt
});
const { max_tokens } = getMaxTokens({
const { max_tokens } = await getMaxTokens({
model: modelConstantsData,
maxToken,
filterMessages
@@ -137,8 +137,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const response = await ai.chat.completions.create(
{
presence_penalty: 0,
frequency_penalty: 0,
...modelConstantsData?.defaultConfig,
model: modelConstantsData.model,
temperature,
@@ -189,10 +187,10 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
})();
const charsLength = countMessagesChars(completeMessages);
const tokens = countMessagesTokens(completeMessages);
const { totalPoints, modelName } = formatModelChars2Points({
model,
charsLength,
tokens,
modelType: ModelTypeEnum.llm
});
@@ -201,7 +199,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
[ModuleOutputKeyEnum.responseData]: {
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength,
tokens,
query: `${userChatInput}`,
maxToken: max_tokens,
quoteList: filterQuoteQA,
@@ -213,7 +211,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
moduleName: name,
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
model: modelName,
charsLength
tokens
}
],
history: completeMessages
@@ -292,7 +290,7 @@ function getChatMessages({
const filterMessages = ChatContextFilter({
messages,
maxTokens: Math.ceil(model.maxContext - 300) // filter token. not response maxToken
maxTokens: model.maxContext - 300 // filter token. not response maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
@@ -315,11 +313,12 @@ function getMaxTokens({
const tokensLimit = model.maxContext;
/* count response max token */
const promptsToken = countMessagesTokens({
messages: filterMessages
});
const promptsToken = countMessagesTokens(filterMessages);
maxToken = promptsToken + maxToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
if (maxToken <= 0) {
return Promise.reject('Over max token');
}
return {
max_tokens: maxToken
};

View File

@@ -1,5 +1,5 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type {
@@ -12,7 +12,7 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
import { getHistories } from '../utils';
import { datasetSearchQueryExtension } from '@fastgpt/service/core/dataset/search/utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '@fastgpt/service/support/permission/teamLimit';
type DatasetSearchProps = ModuleDispatchProps<{
@@ -85,7 +85,7 @@ export async function dispatchDatasetSearch(
// start search
const {
searchRes,
charsLength,
tokens,
usingSimilarityFilter,
usingReRank: searchUsingReRank
} = await searchDatasetData({
@@ -104,37 +104,37 @@ export async function dispatchDatasetSearch(
// vector
const { totalPoints, modelName } = formatModelChars2Points({
model: vectorModel.model,
charsLength,
tokens,
modelType: ModelTypeEnum.vector
});
const responseData: moduleDispatchResType & { totalPoints: number } = {
totalPoints,
query: concatQueries.join('\n'),
model: modelName,
charsLength,
tokens,
similarity: usingSimilarityFilter ? similarity : undefined,
limit,
searchMode,
searchUsingReRank: searchUsingReRank
};
const moduleDispatchBills: ChatModuleBillType[] = [
const moduleDispatchBills: ChatModuleUsageType[] = [
{
totalPoints,
moduleName: module.name,
model: modelName,
charsLength
tokens
}
];
if (aiExtensionResult) {
const { totalPoints, modelName } = formatModelChars2Points({
model: aiExtensionResult.model,
charsLength: aiExtensionResult.charsLength,
tokens: aiExtensionResult.tokens,
modelType: ModelTypeEnum.llm
});
responseData.totalPoints += totalPoints;
responseData.charsLength = aiExtensionResult.charsLength;
responseData.tokens = aiExtensionResult.tokens;
responseData.extensionModel = modelName;
responseData.extensionResult =
aiExtensionResult.extensionQueries?.join('\n') ||
@@ -144,7 +144,7 @@ export async function dispatchDatasetSearch(
totalPoints,
moduleName: 'core.module.template.Query extension',
model: modelName,
charsLength: aiExtensionResult.charsLength
tokens: aiExtensionResult.tokens
});
}

View File

@@ -28,7 +28,7 @@ import { dispatchRunPlugin } from './plugin/run';
import { dispatchPluginInput } from './plugin/runInput';
import { dispatchPluginOutput } from './plugin/runOutput';
import { valueTypeFormat } from './utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
@@ -83,7 +83,7 @@ export async function dispatchModules({
// let storeData: Record<string, any> = {}; // after module used
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
let chatAnswerText = ''; // AI answer
let chatModuleBills: ChatModuleBillType[] = [];
let chatModuleBills: ChatModuleUsageType[] = [];
let runningTime = Date.now();
function pushStore(
@@ -95,7 +95,7 @@ export async function dispatchModules({
}: {
answerText?: string;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
moduleDispatchBills?: ChatModuleBillType[];
moduleDispatchBills?: ChatModuleUsageType[];
}
) {
const time = Date.now();
@@ -165,7 +165,6 @@ export async function dispatchModules({
const filterModules = nextRunModules.filter((module) => {
if (set.has(module.moduleId)) return false;
set.add(module.moduleId);
``;
return true;
});

View File

@@ -95,7 +95,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
moduleName: plugin.name,
totalPoints: moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
model: plugin.name,
charsLength: 0
tokens: 0
}
],
...(output ? output.pluginOutput : {})

View File

@@ -10,6 +10,7 @@ import {
import axios from 'axios';
import { valueTypeFormat } from '../utils';
import { SERVICE_LOCAL_HOST } from '@fastgpt/service/common/system/tools';
import { addLog } from '@fastgpt/service/common/system/log';
type PropsArrType = {
key: string;
@@ -130,7 +131,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
...results
};
} catch (error) {
const err = httpRequestErrorResponseData(error)
addLog.error('Http request error', error);
return {
[ModuleOutputKeyEnum.failed]: true,
[ModuleOutputKeyEnum.responseData]: {
@@ -138,7 +139,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
params: Object.keys(params).length > 0 ? params : undefined,
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
headers: Object.keys(headers).length > 0 ? headers : undefined,
httpResult: { error: err }
httpResult: { error: formatHttpError(error) }
}
};
}
@@ -280,21 +281,14 @@ function removeUndefinedSign(obj: Record<string, any>) {
}
return obj;
}
function httpRequestErrorResponseData(error: any) {
try {
return {
message: error?.message || undefined,
name: error?.name || undefined,
method: error?.config?.method || undefined,
baseURL: error?.config?.baseURL || undefined,
url: error?.config?.url || undefined,
code: error?.code || undefined,
status: error?.status || undefined
}
} catch (error) {
return {
message: 'Request Failed',
name: "AxiosError",
};
}
function formatHttpError(error: any) {
return {
message: error?.message,
name: error?.name,
method: error?.config?.method,
baseURL: error?.config?.baseURL,
url: error?.config?.url,
code: error?.code,
status: error?.status
};
}

View File

@@ -5,7 +5,7 @@ import type {
} from '@fastgpt/global/core/module/type.d';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModelTypeEnum, getLLMModel } from '@fastgpt/service/core/ai/model';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
import { getHistories } from '../utils';
import { hashStr } from '@fastgpt/global/common/string/tools';
@@ -32,7 +32,7 @@ export const dispatchQueryExtension = async ({
const queryExtensionModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
const { extensionQueries, charsLength } = await queryExtension({
const { extensionQueries, tokens } = await queryExtension({
chatBg: systemPrompt,
query: userChatInput,
histories: chatHistories,
@@ -43,7 +43,7 @@ export const dispatchQueryExtension = async ({
const { totalPoints, modelName } = formatModelChars2Points({
model: queryExtensionModel.model,
charsLength,
tokens,
modelType: ModelTypeEnum.llm
});
@@ -60,7 +60,7 @@ export const dispatchQueryExtension = async ({
[ModuleOutputKeyEnum.responseData]: {
totalPoints,
model: modelName,
charsLength,
tokens,
query: userChatInput,
textOutput: JSON.stringify(filterSameQueries)
},
@@ -69,7 +69,7 @@ export const dispatchQueryExtension = async ({
moduleName: module.name,
totalPoints,
model: modelName,
charsLength
tokens
}
],
[ModuleOutputKeyEnum.text]: JSON.stringify(filterSameQueries)

View File

@@ -1,4 +1,3 @@
import { startQueue } from './utils/tools';
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/constants';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
@@ -9,22 +8,29 @@ import { initVectorStore } from '@fastgpt/service/common/vectorStore/controller'
import { getInitConfig } from '@/pages/api/common/system/getInitData';
import { startCron } from './common/system/cron';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { initGlobal } from './common/system';
import { startMongoWatch } from './common/system/volumnMongoWatch';
import { startTrainingQueue } from './core/dataset/training/utils';
/**
* connect MongoDB and init data
*/
export function connectToDatabase(): Promise<void> {
return connectMongo({
beforeHook: () => {},
beforeHook: () => {
initGlobal();
},
afterHook: async () => {
initVectorStore();
// start queue
startQueue();
startMongoWatch();
// cron
startCron();
// init system config
getInitConfig();
// cron
startCron();
// init vector database
await initVectorStore();
// start queue
startTrainingQueue(true);
initRootUser();
}
@@ -62,7 +68,7 @@ async function initRootUser() {
rootId = _id;
}
// init root team
await createDefaultTeam({ userId: rootId, maxSize: 1, balance: 9999 * PRICE_SCALE, session });
await createDefaultTeam({ userId: rootId, balance: 9999 * PRICE_SCALE, session });
});
console.log(`root user init:`, {

View File

@@ -1,11 +1,16 @@
import { ChatSchema } from '@fastgpt/global/core/chat/type';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { AuthModeType } from '@fastgpt/service/support/permission/type';
import { authOutLink } from './outLink';
import { authOutLink, authOutLinkInit } from './outLink';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { AuthResponseType } from '@fastgpt/global/support/permission/type';
import { authTeamSpaceToken } from './team';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { authOutLinkValid } from '@fastgpt/service/support/permission/auth/outLink';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
/*
outLink: Must be the owner
token: team owner and chat owner have all permissions
@@ -14,46 +19,51 @@ export async function autChatCrud({
appId,
chatId,
shareId,
shareTeamId,
outLinkUid,
teamId: spaceTeamId,
teamToken,
per = 'owner',
...props
}: AuthModeType & {
appId: string;
chatId?: string;
shareTeamId?: string;
shareId?: string;
outLinkUid?: string;
teamId?: string;
teamToken?: string;
}): Promise<{
chat?: ChatSchema;
isOutLink: boolean;
uid?: string;
}> {
const isOutLink = Boolean((shareId || shareTeamId) && outLinkUid);
const isOutLink = Boolean((shareId || spaceTeamId) && outLinkUid);
if (!chatId) return { isOutLink, uid: outLinkUid };
const chat = await MongoChat.findOne({ appId, chatId }).lean();
if (!chat) return { isOutLink, uid: outLinkUid };
const { uid } = await (async () => {
// outLink Auth
if (shareId && outLinkUid) {
const { uid } = await authOutLink({ shareId, outLinkUid });
// auth outLinkUid
if (chat.shareId === shareId && chat.outLinkUid === uid) {
if (!chat || (chat.shareId === shareId && chat.outLinkUid === uid)) {
return { uid };
}
return Promise.reject(ChatErrEnum.unAuthChat);
}
if (shareTeamId && outLinkUid) {
if (chat.teamId == shareTeamId && chat.outLinkUid === outLinkUid) {
return { uid: outLinkUid };
// auth team space chat
if (spaceTeamId && teamToken) {
const { uid } = await authTeamSpaceToken({ teamId: spaceTeamId, teamToken });
if (!chat || (String(chat.teamId) === String(spaceTeamId) && chat.outLinkUid === uid)) {
return { uid };
}
return Promise.reject(ChatErrEnum.unAuthChat);
}
// req auth
if (!chat) return { id: outLinkUid };
// auth req
const { teamId, tmbId, role } = await authUserRole(props);
if (String(teamId) !== String(chat.teamId)) return Promise.reject(ChatErrEnum.unAuthChat);
@@ -67,9 +77,61 @@ export async function autChatCrud({
return Promise.reject(ChatErrEnum.unAuthChat);
})();
if (!chat) return { isOutLink, uid };
return {
chat,
isOutLink,
uid
};
}
/*
Different chat source
1. token (header)
2. apikey (header)
3. share page (body: shareId outLinkUid)
4. team chat page (body: teamId teamToken)
*/
export async function authChatCert(props: AuthModeType) {
const { teamId, teamToken, shareId, outLinkUid } = props.req.body as OutLinkChatAuthProps;
if (shareId && outLinkUid) {
const { shareChat } = await authOutLinkValid({ shareId });
const { uid } = await authOutLinkInit({
outLinkUid,
tokenUrl: shareChat.limit?.hookUrl
});
return {
teamId: String(shareChat.teamId),
tmbId: String(shareChat.tmbId),
authType: AuthUserTypeEnum.outLink,
apikey: '',
isOwner: false,
canWrite: false,
outLinkUid: uid
};
}
if (teamId && teamToken) {
const { uid } = await authTeamSpaceToken({ teamId, teamToken });
const tmb = await MongoTeamMember.findOne(
{ teamId, role: TeamMemberRoleEnum.owner },
'tmbId'
).lean();
if (!tmb) return Promise.reject(ChatErrEnum.unAuthChat);
return {
teamId,
tmbId: String(tmb._id),
authType: AuthUserTypeEnum.teamDomain,
apikey: '',
isOwner: false,
canWrite: false,
outLinkUid: uid
};
}
return authCert(props);
}

View File

@@ -1,9 +1,12 @@
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { TeamMemberWithUserSchema } from '@fastgpt/global/support/user/team/type';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
import { checkTeamAIPoints } from '@fastgpt/service/support/permission/teamLimit';
import axios from 'axios';
import { GET } from '@fastgpt/service/common/api/plusRequest';
import {
AuthTeamTagTokenProps,
AuthTokenFromTeamDomainResponse
} from '@fastgpt/global/support/user/team/tag';
export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
const tmb = (await MongoTeamMember.findById(tmbId, 'teamId userId').populate(
@@ -19,25 +22,21 @@ export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
};
}
type UserInfoType = {
data: {
uid: string;
tags: string[];
};
};
export async function getShareTeamUid(shareTeamId: string, authToken: string) {
try {
const teamInfo = await MongoTeam.findById(shareTeamId);
const tagsUrl = teamInfo?.tagsUrl;
const { data: userInfo } = await axios.post(tagsUrl + `/getUserInfo`, { autoken: authToken });
const uid = userInfo?.data?.uid;
if (uid) {
throw new Error('uid null');
}
return uid;
} catch (err) {
return '';
}
export function authTeamTagToken(data: AuthTeamTagTokenProps) {
return GET<AuthTokenFromTeamDomainResponse['data']>('/support/user/team/tag/authTeamToken', data);
}
export async function authTeamSpaceToken({
teamId,
teamToken
}: {
teamId: string;
teamToken: string;
}) {
// get outLink and app
const data = await authTeamTagToken({ teamId, teamToken });
const uid = data.uid;
return {
uid
};
}

View File

@@ -1,36 +0,0 @@
import { POST } from '@fastgpt/service/common/api/plusRequest';
import type { AuthOutLinkChatProps } from '@fastgpt/global/support/outLink/api.d';
import type { chatAppListSchema } from '@fastgpt/global/core/chat/type.d';
import { getUserChatInfoAndAuthTeamPoints } from './team';
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
export function authChatTeamInfo(data: { shareTeamId: string; authToken: string }) {
return POST<chatAppListSchema>('/core/chat/init', data);
}
export async function authTeamShareChatStart({
teamId,
ip,
outLinkUid,
question
}: AuthOutLinkChatProps & {
teamId: string;
}) {
// get outLink and app
const { teamInfo, uid } = await authChatTeamInfo({ shareTeamId: teamId, authToken: outLinkUid });
// check balance and chat limit
const tmb = await MongoTeamMember.findOne({ teamId, userId: String(teamInfo.ownerId) });
if (!tmb) {
throw new Error('can not find it');
}
const { user } = await getUserChatInfoAndAuthTeamPoints(String(tmb._id));
return {
user,
tmbId: String(tmb._id),
uid: uid
};
}

View File

@@ -1,10 +1,9 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { ModelTypeEnum } from '@fastgpt/service/core/ai/model';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { addLog } from '@fastgpt/service/common/system/log';
import { createUsage, concatUsage } from './controller';
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { ChatModuleUsageType } from '@fastgpt/global/support/wallet/bill/type';
export const pushChatUsage = ({
appName,
@@ -19,7 +18,7 @@ export const pushChatUsage = ({
teamId: string;
tmbId: string;
source: `${UsageSourceEnum}`;
moduleDispatchBills: ChatModuleBillType[];
moduleDispatchBills: ChatModuleUsageType[];
}) => {
const totalPoints = moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
@@ -34,7 +33,7 @@ export const pushChatUsage = ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
charsLength: item.charsLength
tokens: item.tokens
}))
});
addLog.info(`finish completions`, {
@@ -50,20 +49,20 @@ export const pushQAUsage = async ({
teamId,
tmbId,
model,
charsLength,
tokens,
billId
}: {
teamId: string;
tmbId: string;
model: string;
charsLength: number;
tokens: number;
billId: string;
}) => {
// 计算价格
const { totalPoints } = formatModelChars2Points({
model,
modelType: ModelTypeEnum.llm,
charsLength
tokens
});
concatUsage({
@@ -71,7 +70,7 @@ export const pushQAUsage = async ({
teamId,
tmbId,
totalPoints,
charsLength,
tokens,
listIndex: 1
});
@@ -82,30 +81,30 @@ export const pushGenerateVectorUsage = ({
billId,
teamId,
tmbId,
charsLength,
tokens,
model,
source = UsageSourceEnum.fastgpt,
extensionModel,
extensionCharsLength
extensionTokens
}: {
billId?: string;
teamId: string;
tmbId: string;
charsLength: number;
tokens: number;
model: string;
source?: `${UsageSourceEnum}`;
extensionModel?: string;
extensionCharsLength?: number;
extensionTokens?: number;
}) => {
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
modelType: ModelTypeEnum.vector,
model,
charsLength
tokens
});
const { extensionTotalPoints, extensionModelName } = (() => {
if (!extensionModel || !extensionCharsLength)
if (!extensionModel || !extensionTokens)
return {
extensionTotalPoints: 0,
extensionModelName: ''
@@ -113,7 +112,7 @@ export const pushGenerateVectorUsage = ({
const { totalPoints, modelName } = formatModelChars2Points({
modelType: ModelTypeEnum.llm,
model: extensionModel,
charsLength: extensionCharsLength
tokens: extensionTokens
});
return {
extensionTotalPoints: totalPoints,
@@ -130,7 +129,7 @@ export const pushGenerateVectorUsage = ({
tmbId,
totalPoints,
billId,
charsLength,
tokens,
listIndex: 0
});
} else {
@@ -145,7 +144,7 @@ export const pushGenerateVectorUsage = ({
moduleName: 'support.wallet.moduleName.index',
amount: totalVector,
model: vectorModelName,
charsLength
tokens
},
...(extensionModel !== undefined
? [
@@ -153,7 +152,7 @@ export const pushGenerateVectorUsage = ({
moduleName: 'core.module.template.Query extension',
amount: extensionTotalPoints,
model: extensionModelName,
charsLength: extensionCharsLength
tokens: extensionTokens
}
]
: [])
@@ -164,17 +163,17 @@ export const pushGenerateVectorUsage = ({
};
export const pushQuestionGuideUsage = ({
charsLength,
tokens,
teamId,
tmbId
}: {
charsLength: number;
tokens: number;
teamId: string;
tmbId: string;
}) => {
const qgModel = global.llmModels[0];
const { totalPoints, modelName } = formatModelChars2Points({
charsLength,
tokens,
model: qgModel.model,
modelType: ModelTypeEnum.llm
});
@@ -190,14 +189,14 @@ export const pushQuestionGuideUsage = ({
moduleName: 'core.app.Next Step Guide',
amount: totalPoints,
model: modelName,
charsLength
tokens
}
]
});
};
export function pushAudioSpeechUsage({
appName = 'support.wallet.bill.Audio Speech',
appName = 'support.wallet.usage.Audio Speech',
model,
charsLength,
teamId,
@@ -213,7 +212,7 @@ export function pushAudioSpeechUsage({
}) {
const { totalPoints, modelName } = formatModelChars2Points({
model,
charsLength,
tokens: charsLength,
modelType: ModelTypeEnum.audioSpeech
});
@@ -249,12 +248,12 @@ export function pushWhisperUsage({
const { totalPoints, modelName } = formatModelChars2Points({
model: whisperModel.model,
charsLength: duration,
tokens: duration,
modelType: ModelTypeEnum.whisper,
multiple: 60
});
const name = 'support.wallet.bill.Whisper';
const name = 'support.wallet.usage.Whisper';
createUsage({
teamId,

View File

@@ -1,4 +1,3 @@
import { ModelTypeEnum, getModelMap } from '@fastgpt/service/core/ai/model';
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
@@ -16,29 +15,3 @@ export function authType2UsageSource({
if (authType === AuthUserTypeEnum.apikey) return UsageSourceEnum.api;
return UsageSourceEnum.fastgpt;
}
export const formatModelChars2Points = ({
model,
charsLength = 0,
modelType,
multiple = 1000
}: {
model: string;
charsLength: number;
modelType: `${ModelTypeEnum}`;
multiple?: number;
}) => {
const modelData = getModelMap?.[modelType]?.(model);
if (!modelData)
return {
totalPoints: 0,
modelName: ''
};
const totalPoints = (modelData.charsPointsPrice || 0) * (charsLength / multiple);
return {
modelName: modelData.name,
totalPoints
};
};

View File

@@ -1,10 +0,0 @@
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
/* start task */
export const startQueue = () => {
if (!global.systemEnv) return;
generateQA();
generateVector();
};