mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
@@ -6,9 +6,11 @@ import {
|
||||
} from '@fastgpt/global/core/dataset/controller';
|
||||
import {
|
||||
insertDatasetDataVector,
|
||||
recallFromVectorStore
|
||||
recallFromVectorStore,
|
||||
updateDatasetDataVector
|
||||
} from '@fastgpt/service/common/vectorStore/controller';
|
||||
import {
|
||||
DatasetDataIndexTypeEnum,
|
||||
DatasetSearchModeEnum,
|
||||
DatasetSearchModeMap,
|
||||
SearchScoreTypeEnum
|
||||
@@ -20,7 +22,6 @@ import { deleteDatasetDataVector } from '@fastgpt/service/common/vectorStore/con
|
||||
import { getVectorsByText } from '@fastgpt/service/core/ai/embedding';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import {
|
||||
DatasetDataItemType,
|
||||
DatasetDataSchemaType,
|
||||
DatasetDataWithCollectionType,
|
||||
SearchDataResponseItemType
|
||||
@@ -34,7 +35,7 @@ import type {
|
||||
} from '@fastgpt/global/core/dataset/api.d';
|
||||
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { getVectorModel } from '../../ai/model';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
export async function pushDataToTrainingQueue(
|
||||
props: {
|
||||
@@ -77,7 +78,7 @@ export async function insertData2Dataset({
|
||||
return Promise.reject("teamId and tmbId can't be the same");
|
||||
}
|
||||
|
||||
const qaStr = getDefaultIndex({ q, a }).text;
|
||||
const qaStr = `${q}\n${a}`.trim();
|
||||
|
||||
// empty indexes check, if empty, create default index
|
||||
indexes =
|
||||
@@ -85,16 +86,10 @@ export async function insertData2Dataset({
|
||||
? indexes.map((index) => ({
|
||||
...index,
|
||||
dataId: undefined,
|
||||
defaultIndex: index.text.trim() === qaStr
|
||||
defaultIndex: indexes?.length === 1 && index.text === qaStr ? true : index.defaultIndex
|
||||
}))
|
||||
: [getDefaultIndex({ q, a })];
|
||||
|
||||
if (!indexes.find((index) => index.defaultIndex)) {
|
||||
indexes.unshift(getDefaultIndex({ q, a }));
|
||||
}
|
||||
|
||||
indexes = indexes.slice(0, 6);
|
||||
|
||||
// insert to vector store
|
||||
const result = await Promise.all(
|
||||
indexes.map((item) =>
|
||||
@@ -133,10 +128,8 @@ export async function insertData2Dataset({
|
||||
/**
|
||||
* update data
|
||||
* 1. compare indexes
|
||||
* 2. insert new pg data
|
||||
* session run:
|
||||
* 3. update mongo data(session run)
|
||||
* 4. delete old pg data
|
||||
* 2. update pg data
|
||||
* 3. update mongo data
|
||||
*/
|
||||
export async function updateData2Dataset({
|
||||
dataId,
|
||||
@@ -148,30 +141,31 @@ export async function updateData2Dataset({
|
||||
if (!Array.isArray(indexes)) {
|
||||
return Promise.reject('indexes is required');
|
||||
}
|
||||
const qaStr = getDefaultIndex({ q, a }).text;
|
||||
const qaStr = `${q}\n${a}`.trim();
|
||||
|
||||
// patch index and update pg
|
||||
const mongoData = await MongoDatasetData.findById(dataId);
|
||||
if (!mongoData) return Promise.reject('core.dataset.error.Data not found');
|
||||
|
||||
// remove defaultIndex
|
||||
let formatIndexes = indexes.map((index) => ({
|
||||
...index,
|
||||
text: index.text.trim(),
|
||||
defaultIndex: index.text.trim() === qaStr
|
||||
}));
|
||||
if (!formatIndexes.find((index) => index.defaultIndex)) {
|
||||
const defaultIndex = mongoData.indexes.find((index) => index.defaultIndex);
|
||||
formatIndexes.unshift(defaultIndex ? defaultIndex : getDefaultIndex({ q, a }));
|
||||
// make sure have one index
|
||||
if (indexes.length === 0) {
|
||||
const databaseDefaultIndex = mongoData.indexes.find((index) => index.defaultIndex);
|
||||
|
||||
indexes = [
|
||||
getDefaultIndex({
|
||||
q,
|
||||
a,
|
||||
dataId: databaseDefaultIndex ? String(databaseDefaultIndex.dataId) : undefined
|
||||
})
|
||||
];
|
||||
}
|
||||
formatIndexes = formatIndexes.slice(0, 6);
|
||||
|
||||
// patch indexes, create, update, delete
|
||||
const patchResult: PatchIndexesProps[] = [];
|
||||
|
||||
// find database indexes in new Indexes, if have not, delete it
|
||||
for (const item of mongoData.indexes) {
|
||||
const index = formatIndexes.find((index) => index.dataId === item.dataId);
|
||||
const index = indexes.find((index) => index.dataId === item.dataId);
|
||||
if (!index) {
|
||||
patchResult.push({
|
||||
type: 'delete',
|
||||
@@ -179,34 +173,35 @@ export async function updateData2Dataset({
|
||||
});
|
||||
}
|
||||
}
|
||||
for (const item of formatIndexes) {
|
||||
for (const item of indexes) {
|
||||
const index = mongoData.indexes.find((index) => index.dataId === item.dataId);
|
||||
// in database, update
|
||||
if (index) {
|
||||
// default index update
|
||||
if (index.defaultIndex && index.text !== qaStr) {
|
||||
patchResult.push({
|
||||
type: 'update',
|
||||
index: {
|
||||
//@ts-ignore
|
||||
...index.toObject(),
|
||||
text: qaStr
|
||||
}
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// custom index update
|
||||
// manual update index
|
||||
if (index.text !== item.text) {
|
||||
patchResult.push({
|
||||
type: 'update',
|
||||
index: item
|
||||
});
|
||||
continue;
|
||||
} else if (index.defaultIndex && index.text !== qaStr) {
|
||||
// update default index
|
||||
patchResult.push({
|
||||
type: 'update',
|
||||
index: {
|
||||
...item,
|
||||
type:
|
||||
item.type === DatasetDataIndexTypeEnum.qa && !a
|
||||
? DatasetDataIndexTypeEnum.chunk
|
||||
: item.type,
|
||||
text: qaStr
|
||||
}
|
||||
});
|
||||
} else {
|
||||
patchResult.push({
|
||||
type: 'unChange',
|
||||
index: item
|
||||
});
|
||||
}
|
||||
patchResult.push({
|
||||
type: 'unChange',
|
||||
index: item
|
||||
});
|
||||
} else {
|
||||
// not in database, create
|
||||
patchResult.push({
|
||||
@@ -220,12 +215,10 @@ export async function updateData2Dataset({
|
||||
mongoData.updateTime = new Date();
|
||||
await mongoData.save();
|
||||
|
||||
// insert vector
|
||||
const clonePatchResult2Insert: PatchIndexesProps[] = JSON.parse(JSON.stringify(patchResult));
|
||||
const insertResult = await Promise.all(
|
||||
clonePatchResult2Insert.map(async (item) => {
|
||||
// insert new vector and update dateId
|
||||
if (item.type === 'create' || item.type === 'update') {
|
||||
// update vector
|
||||
const result = await Promise.all(
|
||||
patchResult.map(async (item) => {
|
||||
if (item.type === 'create') {
|
||||
const result = await insertDatasetDataVector({
|
||||
query: item.index.text,
|
||||
model: getVectorModel(model),
|
||||
@@ -236,54 +229,50 @@ export async function updateData2Dataset({
|
||||
item.index.dataId = result.insertId;
|
||||
return result;
|
||||
}
|
||||
if (item.type === 'update' && item.index.dataId) {
|
||||
const result = await updateDatasetDataVector({
|
||||
teamId: mongoData.teamId,
|
||||
datasetId: mongoData.datasetId,
|
||||
collectionId: mongoData.collectionId,
|
||||
id: item.index.dataId,
|
||||
query: item.index.text,
|
||||
model: getVectorModel(model)
|
||||
});
|
||||
item.index.dataId = result.insertId;
|
||||
|
||||
return result;
|
||||
}
|
||||
if (item.type === 'delete' && item.index.dataId) {
|
||||
await deleteDatasetDataVector({
|
||||
teamId: mongoData.teamId,
|
||||
id: item.index.dataId
|
||||
});
|
||||
return {
|
||||
charsLength: 0
|
||||
};
|
||||
}
|
||||
return {
|
||||
charsLength: 0
|
||||
};
|
||||
})
|
||||
);
|
||||
const charsLength = insertResult.reduce((acc, cur) => acc + cur.charsLength, 0);
|
||||
// console.log(clonePatchResult2Insert);
|
||||
await mongoSessionRun(async (session) => {
|
||||
// update mongo
|
||||
const newIndexes = clonePatchResult2Insert
|
||||
.filter((item) => item.type !== 'delete')
|
||||
.map((item) => item.index);
|
||||
// update mongo other data
|
||||
mongoData.q = q || mongoData.q;
|
||||
mongoData.a = a ?? mongoData.a;
|
||||
mongoData.fullTextToken = jiebaSplit({ text: mongoData.q + mongoData.a });
|
||||
// @ts-ignore
|
||||
mongoData.indexes = newIndexes;
|
||||
await mongoData.save({ session });
|
||||
|
||||
// delete vector
|
||||
const deleteIdList = patchResult
|
||||
.filter((item) => item.type === 'delete' || item.type === 'update')
|
||||
.map((item) => item.index.dataId)
|
||||
.filter(Boolean);
|
||||
if (deleteIdList.length > 0) {
|
||||
await deleteDatasetDataVector({
|
||||
teamId: mongoData.teamId,
|
||||
idList: deleteIdList as string[]
|
||||
});
|
||||
}
|
||||
});
|
||||
const charsLength = result.reduce((acc, cur) => acc + cur.charsLength, 0);
|
||||
const newIndexes = patchResult.filter((item) => item.type !== 'delete').map((item) => item.index);
|
||||
|
||||
// update mongo other data
|
||||
mongoData.q = q || mongoData.q;
|
||||
mongoData.a = a ?? mongoData.a;
|
||||
mongoData.fullTextToken = jiebaSplit({ text: mongoData.q + mongoData.a });
|
||||
// @ts-ignore
|
||||
mongoData.indexes = newIndexes;
|
||||
await mongoData.save();
|
||||
|
||||
return {
|
||||
charsLength
|
||||
};
|
||||
}
|
||||
|
||||
export const deleteDatasetData = async (data: DatasetDataItemType) => {
|
||||
await mongoSessionRun(async (session) => {
|
||||
await MongoDatasetData.findByIdAndDelete(data.id, { session });
|
||||
await deleteDatasetDataVector({
|
||||
teamId: data.teamId,
|
||||
idList: data.indexes.map((item) => item.dataId)
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
type SearchDatasetDataProps = {
|
||||
teamId: string;
|
||||
model: string;
|
||||
@@ -388,7 +377,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
a: data.a,
|
||||
chunkIndex: data.chunkIndex,
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId?._id),
|
||||
collectionId: String(data.collectionId._id),
|
||||
sourceName: data.collectionId.name || '',
|
||||
sourceId: data.collectionId?.fileId || data.collectionId?.rawLink,
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: data.score, index }]
|
||||
@@ -492,7 +481,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
}))
|
||||
});
|
||||
|
||||
if (results.length === 0) {
|
||||
if (!Array.isArray(results)) {
|
||||
usingReRank = false;
|
||||
return [];
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { pushQAUsage } from '@/service/support/wallet/usage/push';
|
||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { pushQABill } from '@/service/support/wallet/bill/push';
|
||||
import { DatasetDataIndexTypeEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { sendOneInform } from '../support/user/inform/api';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
|
||||
@@ -9,12 +9,12 @@ import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { Prompt_AgentQA } from '@/global/core/prompt/agent';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { authTeamBalance } from '../support/permission/auth/bill';
|
||||
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
|
||||
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
|
||||
import { getLLMModel } from '../core/ai/model';
|
||||
import { checkTeamAIPoints } from '../support/permission/teamLimit';
|
||||
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
|
||||
|
||||
const reduceQueue = () => {
|
||||
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
|
||||
@@ -89,16 +89,16 @@ export async function generateQA(): Promise<any> {
|
||||
|
||||
// auth balance
|
||||
try {
|
||||
await checkTeamAIPoints(data.teamId);
|
||||
await authTeamBalance(data.teamId);
|
||||
} catch (error: any) {
|
||||
if (error?.statusText === TeamErrEnum.aiPointsNotEnough) {
|
||||
if (error?.statusText === UserErrEnum.balanceNotEnough) {
|
||||
// send inform and lock data
|
||||
try {
|
||||
sendOneInform({
|
||||
type: 'system',
|
||||
title: '文本训练任务中止',
|
||||
content:
|
||||
'该团队账号的AI积分不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
|
||||
'该团队账号余额不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
|
||||
tmbId: data.tmbId
|
||||
});
|
||||
console.log('余额不足,暂停【QA】生成任务');
|
||||
@@ -161,7 +161,7 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
|
||||
|
||||
// add bill
|
||||
if (insertLen > 0) {
|
||||
pushQAUsage({
|
||||
pushQABill({
|
||||
teamId: data.teamId,
|
||||
tmbId: data.tmbId,
|
||||
charsLength: `${prompt}${answer}`.length,
|
||||
@@ -230,6 +230,7 @@ function formatSplitText(text: string, rawText: string) {
|
||||
indexes: [
|
||||
{
|
||||
defaultIndex: true,
|
||||
type: DatasetDataIndexTypeEnum.qa,
|
||||
text: `${q}\n${a.trim().replace(/\n\s*/g, '\n')}`
|
||||
}
|
||||
]
|
||||
@@ -247,6 +248,7 @@ function formatSplitText(text: string, rawText: string) {
|
||||
indexes: [
|
||||
{
|
||||
defaultIndex: true,
|
||||
type: DatasetDataIndexTypeEnum.chunk,
|
||||
text: chunk
|
||||
}
|
||||
]
|
||||
|
@@ -4,10 +4,10 @@ import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { sendOneInform } from '../support/user/inform/api';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { checkTeamAIPoints } from '../support/permission/teamLimit';
|
||||
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
|
||||
import { authTeamBalance } from '@/service/support/permission/auth/bill';
|
||||
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
|
||||
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
|
||||
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
|
||||
|
||||
const reduceQueue = () => {
|
||||
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
|
||||
@@ -93,16 +93,16 @@ export async function generateVector(): Promise<any> {
|
||||
|
||||
// auth balance
|
||||
try {
|
||||
await checkTeamAIPoints(data.teamId);
|
||||
await authTeamBalance(data.teamId);
|
||||
} catch (error: any) {
|
||||
if (error?.statusText === TeamErrEnum.aiPointsNotEnough) {
|
||||
if (error?.statusText === UserErrEnum.balanceNotEnough) {
|
||||
// send inform and lock data
|
||||
try {
|
||||
sendOneInform({
|
||||
type: 'system',
|
||||
title: '文本训练任务中止',
|
||||
content:
|
||||
'该团队账号AI积分不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
|
||||
'该团队账号余额不足,文本训练任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
|
||||
tmbId: data.tmbId
|
||||
});
|
||||
console.log('余额不足,暂停【向量】生成任务');
|
||||
@@ -138,7 +138,7 @@ export async function generateVector(): Promise<any> {
|
||||
});
|
||||
|
||||
// push bill
|
||||
pushGenerateVectorUsage({
|
||||
pushGenerateVectorBill({
|
||||
teamId: data.teamId,
|
||||
tmbId: data.tmbId,
|
||||
charsLength,
|
||||
|
@@ -1,12 +1,9 @@
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type {
|
||||
ClassifyQuestionAgentItemType,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
@@ -14,7 +11,7 @@ import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { getHistories } from '../utils';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
@@ -23,9 +20,10 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
|
||||
}>;
|
||||
type CQResponse = ModuleDispatchResponse<{
|
||||
type CQResponse = {
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
};
|
||||
|
||||
const agentFunName = 'classify_question';
|
||||
|
||||
@@ -33,7 +31,6 @@ const agentFunName = 'classify_question';
|
||||
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
|
||||
const {
|
||||
user,
|
||||
module: { name },
|
||||
histories,
|
||||
params: { model, history = 6, agents, userChatInput }
|
||||
} = props as Props;
|
||||
@@ -46,7 +43,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { arg, charsLength } = await (async () => {
|
||||
const { arg, inputTokens, outputTokens } = await (async () => {
|
||||
if (cqModel.toolChoice) {
|
||||
return toolChoice({
|
||||
...props,
|
||||
@@ -63,31 +60,25 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
|
||||
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: cqModel.model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.llm
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
[result.key]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
price: user.openaiAccount?.key ? 0 : total,
|
||||
model: modelName,
|
||||
query: userChatInput,
|
||||
charsLength,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
cqList: agents,
|
||||
cqResult: result.value,
|
||||
contextTotalLen: chatHistories.length + 2
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
@@ -158,13 +149,11 @@ ${systemPrompt}
|
||||
const arg = JSON.parse(
|
||||
response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || ''
|
||||
);
|
||||
const functionChars =
|
||||
agentFunction.description.length +
|
||||
agentFunction.parameters.properties.type.description.length;
|
||||
|
||||
return {
|
||||
arg,
|
||||
charsLength: countMessagesChars(messages) + functionChars
|
||||
inputTokens: response.usage?.prompt_tokens || 0,
|
||||
outputTokens: response.usage?.completion_tokens || 0
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(agentFunction.parameters);
|
||||
@@ -174,7 +163,8 @@ ${systemPrompt}
|
||||
|
||||
return {
|
||||
arg: {},
|
||||
charsLength: 0
|
||||
inputTokens: 0,
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -216,7 +206,8 @@ async function completions({
|
||||
agents.find((item) => answer.includes(item.key) || answer.includes(item.value))?.key || '';
|
||||
|
||||
return {
|
||||
charsLength: countMessagesChars(messages),
|
||||
inputTokens: data.usage?.prompt_tokens || 0,
|
||||
outputTokens: data.usage?.completion_tokens || 0,
|
||||
arg: { type: id }
|
||||
};
|
||||
}
|
||||
|
@@ -1,12 +1,9 @@
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type {
|
||||
ContextExtractAgentItemType,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type';
|
||||
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||
@@ -14,7 +11,7 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getHistories } from '../utils';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[];
|
||||
@@ -23,18 +20,18 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.description]: string;
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
type Response = {
|
||||
[ModuleOutputKeyEnum.success]?: boolean;
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[ModuleOutputKeyEnum.contextExtractFields]: string;
|
||||
}>;
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
};
|
||||
|
||||
const agentFunName = 'extract_json_data';
|
||||
|
||||
export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const {
|
||||
user,
|
||||
module: { name },
|
||||
histories,
|
||||
params: { content, history = 6, model, description, extractKeys }
|
||||
} = props;
|
||||
@@ -46,7 +43,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const extractModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { arg, charsLength } = await (async () => {
|
||||
const { arg, inputTokens, outputTokens } = await (async () => {
|
||||
if (extractModel.toolChoice) {
|
||||
return toolChoice({
|
||||
...props,
|
||||
@@ -83,10 +80,11 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
}
|
||||
}
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: extractModel.model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.llm
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -95,22 +93,15 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
|
||||
...arg,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
price: user.openaiAccount?.key ? 0 : total,
|
||||
model: modelName,
|
||||
query: content,
|
||||
charsLength,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
extractDescription: description,
|
||||
extractResult: arg,
|
||||
contextTotalLen: chatHistories.length + 2
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -202,12 +193,10 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
|
||||
}
|
||||
})();
|
||||
|
||||
const functionChars =
|
||||
description.length + extractKeys.reduce((sum, item) => sum + item.desc.length, 0);
|
||||
|
||||
return {
|
||||
rawResponse: response?.choices?.[0]?.message?.tool_calls?.[0]?.function?.arguments || '',
|
||||
charsLength: countMessagesChars(messages) + functionChars,
|
||||
inputTokens: response.usage?.prompt_tokens || 0,
|
||||
outputTokens: response.usage?.completion_tokens || 0,
|
||||
arg
|
||||
};
|
||||
}
|
||||
@@ -249,6 +238,8 @@ Human: ${content}`
|
||||
stream: false
|
||||
});
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
const inputTokens = data.usage?.prompt_tokens || 0;
|
||||
const outputTokens = data.usage?.completion_tokens || 0;
|
||||
|
||||
// parse response
|
||||
const start = answer.indexOf('{');
|
||||
@@ -257,7 +248,8 @@ Human: ${content}`
|
||||
if (start === -1 || end === -1)
|
||||
return {
|
||||
rawResponse: answer,
|
||||
charsLength: countMessagesChars(messages),
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
arg: {}
|
||||
};
|
||||
|
||||
@@ -269,14 +261,15 @@ Human: ${content}`
|
||||
try {
|
||||
return {
|
||||
rawResponse: answer,
|
||||
charsLength: countMessagesChars(messages),
|
||||
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
arg: JSON.parse(jsonStr) as Record<string, any>
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
rawResponse: answer,
|
||||
charsLength: countMessagesChars(messages),
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
arg: {}
|
||||
};
|
||||
}
|
||||
|
@@ -1,16 +1,16 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { ChatContextFilter, countMessagesChars } from '@fastgpt/service/core/chat/utils';
|
||||
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { postTextCensor } from '@/service/common/censor';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
|
||||
import type { ModuleDispatchResponse, ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { countMessagesTokens, sliceMessagesTB } from '@fastgpt/global/common/string/tiktoken';
|
||||
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
|
||||
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
|
||||
@@ -32,10 +32,11 @@ export type ChatProps = ModuleDispatchProps<
|
||||
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
|
||||
}
|
||||
>;
|
||||
export type ChatResponse = ModuleDispatchResponse<{
|
||||
export type ChatResponse = {
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
[ModuleOutputKeyEnum.history]: ChatItemType[];
|
||||
}>;
|
||||
};
|
||||
|
||||
/* request openai chat */
|
||||
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
|
||||
@@ -45,7 +46,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
detail = false,
|
||||
user,
|
||||
histories,
|
||||
module: { name, outputs },
|
||||
outputs,
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
@@ -153,7 +154,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
}
|
||||
);
|
||||
|
||||
const { answerText, completeMessages } = await (async () => {
|
||||
const { answerText, inputTokens, outputTokens, completeMessages } = await (async () => {
|
||||
if (stream) {
|
||||
// sse response
|
||||
const { answer } = await streamResponse({
|
||||
@@ -171,6 +172,17 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
|
||||
return {
|
||||
answerText: answer,
|
||||
inputTokens: countMessagesTokens({
|
||||
messages: filterMessages
|
||||
}),
|
||||
outputTokens: countMessagesTokens({
|
||||
messages: [
|
||||
{
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answer
|
||||
}
|
||||
]
|
||||
}),
|
||||
completeMessages
|
||||
};
|
||||
} else {
|
||||
@@ -184,38 +196,33 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
|
||||
return {
|
||||
answerText: answer,
|
||||
inputTokens: unStreamResponse.usage?.prompt_tokens || 0,
|
||||
outputTokens: unStreamResponse.usage?.completion_tokens || 0,
|
||||
completeMessages
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
const charsLength = countMessagesChars(completeMessages);
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.llm
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
answerText,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
responseData: {
|
||||
price: user.openaiAccount?.key ? 0 : total,
|
||||
model: modelName,
|
||||
charsLength,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
query: `${userChatInput}`,
|
||||
maxToken: max_tokens,
|
||||
quoteList: filterQuoteQA,
|
||||
historyPreview: getHistoryPreview(completeMessages),
|
||||
contextTotalLen: completeMessages.length
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
],
|
||||
history: completeMessages
|
||||
};
|
||||
};
|
||||
@@ -242,13 +249,30 @@ function filterQuote({
|
||||
// slice filterSearch
|
||||
const filterQuoteQA = filterSearchResultsByMaxChars(quoteQA, model.quoteMaxToken);
|
||||
|
||||
// filterQuoteQA按collectionId聚合在一起后,再按chunkIndex从小到大排序
|
||||
const sortQuoteQAMap: Record<string, SearchDataResponseItemType[]> = {};
|
||||
filterQuoteQA.forEach((item) => {
|
||||
if (sortQuoteQAMap[item.collectionId]) {
|
||||
sortQuoteQAMap[item.collectionId].push(item);
|
||||
} else {
|
||||
sortQuoteQAMap[item.collectionId] = [item];
|
||||
}
|
||||
});
|
||||
const sortQuoteQAList = Object.values(sortQuoteQAMap);
|
||||
|
||||
sortQuoteQAList.forEach((qaList) => {
|
||||
qaList.sort((a, b) => a.chunkIndex - b.chunkIndex);
|
||||
});
|
||||
|
||||
const flatQuoteList = sortQuoteQAList.flat();
|
||||
|
||||
const quoteText =
|
||||
filterQuoteQA.length > 0
|
||||
? `${filterQuoteQA.map((item, index) => getValue(item, index).trim()).join('\n------\n')}`
|
||||
flatQuoteList.length > 0
|
||||
? `${flatQuoteList.map((item, index) => getValue(item, index)).join('\n')}`
|
||||
: '';
|
||||
|
||||
return {
|
||||
filterQuoteQA: filterQuoteQA,
|
||||
filterQuoteQA: flatQuoteList,
|
||||
quoteText
|
||||
};
|
||||
}
|
||||
|
@@ -1,19 +1,15 @@
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModelTypeEnum, getLLMModel, getVectorModel } from '@/service/core/ai/model';
|
||||
import { searchDatasetData } from '@/service/core/dataset/data/controller';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
|
||||
import { getHistories } from '../utils';
|
||||
import { datasetSearchQueryExtension } from '@fastgpt/service/core/dataset/search/utils';
|
||||
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
import { checkTeamReRankPermission } from '@/service/support/permission/teamLimit';
|
||||
|
||||
type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
|
||||
@@ -26,11 +22,12 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.datasetSearchExtensionModel]: string;
|
||||
[ModuleInputKeyEnum.datasetSearchExtensionBg]: string;
|
||||
}>;
|
||||
export type DatasetSearchResponse = ModuleDispatchResponse<{
|
||||
export type DatasetSearchResponse = {
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
[ModuleOutputKeyEnum.datasetIsEmpty]?: boolean;
|
||||
[ModuleOutputKeyEnum.datasetUnEmpty]?: boolean;
|
||||
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
|
||||
}>;
|
||||
};
|
||||
|
||||
export async function dispatchDatasetSearch(
|
||||
props: DatasetSearchProps
|
||||
@@ -38,7 +35,6 @@ export async function dispatchDatasetSearch(
|
||||
const {
|
||||
teamId,
|
||||
histories,
|
||||
module,
|
||||
params: {
|
||||
datasets = [],
|
||||
similarity,
|
||||
@@ -77,8 +73,6 @@ export async function dispatchDatasetSearch(
|
||||
histories: getHistories(6, histories)
|
||||
});
|
||||
|
||||
// console.log(concatQueries, rewriteQuery, aiExtensionResult);
|
||||
|
||||
// get vector
|
||||
const vectorModel = getVectorModel(datasets[0]?.vectorModel?.model);
|
||||
|
||||
@@ -97,18 +91,18 @@ export async function dispatchDatasetSearch(
|
||||
limit,
|
||||
datasetIds: datasets.map((item) => item.datasetId),
|
||||
searchMode,
|
||||
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId))
|
||||
usingReRank
|
||||
});
|
||||
|
||||
// count bill results
|
||||
// vector
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: vectorModel.model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.vector
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.vector
|
||||
});
|
||||
const responseData: moduleDispatchResType & { totalPoints: number } = {
|
||||
totalPoints,
|
||||
const responseData: moduleDispatchResType & { price: number } = {
|
||||
price: total,
|
||||
query: concatQueries.join('\n'),
|
||||
model: modelName,
|
||||
charsLength,
|
||||
@@ -117,42 +111,28 @@ export async function dispatchDatasetSearch(
|
||||
searchMode,
|
||||
searchUsingReRank: searchUsingReRank
|
||||
};
|
||||
const moduleDispatchBills: ChatModuleBillType[] = [
|
||||
{
|
||||
totalPoints,
|
||||
moduleName: module.name,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
];
|
||||
|
||||
if (aiExtensionResult) {
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: aiExtensionResult.model,
|
||||
charsLength: aiExtensionResult.charsLength,
|
||||
modelType: ModelTypeEnum.llm
|
||||
inputLen: aiExtensionResult.inputTokens,
|
||||
outputLen: aiExtensionResult.outputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
responseData.totalPoints += totalPoints;
|
||||
responseData.charsLength = aiExtensionResult.charsLength;
|
||||
responseData.price += total;
|
||||
responseData.inputTokens = aiExtensionResult.inputTokens;
|
||||
responseData.outputTokens = aiExtensionResult.outputTokens;
|
||||
responseData.extensionModel = modelName;
|
||||
responseData.extensionResult =
|
||||
aiExtensionResult.extensionQueries?.join('\n') ||
|
||||
JSON.stringify(aiExtensionResult.extensionQueries);
|
||||
|
||||
moduleDispatchBills.push({
|
||||
totalPoints,
|
||||
moduleName: 'core.module.template.Query extension',
|
||||
model: modelName,
|
||||
charsLength: aiExtensionResult.charsLength
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
isEmpty: searchRes.length === 0 ? true : undefined,
|
||||
unEmpty: searchRes.length > 0 ? true : undefined,
|
||||
quoteQA: searchRes,
|
||||
responseData,
|
||||
moduleDispatchBills
|
||||
responseData
|
||||
};
|
||||
}
|
||||
|
@@ -23,12 +23,11 @@ import { dispatchContentExtract } from './agent/extract';
|
||||
import { dispatchHttpRequest } from './tools/http';
|
||||
import { dispatchHttp468Request } from './tools/http468';
|
||||
import { dispatchAppRequest } from './tools/runApp';
|
||||
import { dispatchQueryExtension } from './tools/queryExternsion';
|
||||
import { dispatchCFR } from './tools/cfr';
|
||||
import { dispatchRunPlugin } from './plugin/run';
|
||||
import { dispatchPluginInput } from './plugin/runInput';
|
||||
import { dispatchPluginOutput } from './plugin/runOutput';
|
||||
import { valueTypeFormat } from './utils';
|
||||
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
|
||||
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
|
||||
@@ -45,7 +44,7 @@ const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.pluginModule]: dispatchRunPlugin,
|
||||
[FlowNodeTypeEnum.pluginInput]: dispatchPluginInput,
|
||||
[FlowNodeTypeEnum.pluginOutput]: dispatchPluginOutput,
|
||||
[FlowNodeTypeEnum.queryExtension]: dispatchQueryExtension,
|
||||
[FlowNodeTypeEnum.cfr]: dispatchCFR,
|
||||
|
||||
// none
|
||||
[FlowNodeTypeEnum.userGuide]: () => Promise.resolve()
|
||||
@@ -83,19 +82,16 @@ export async function dispatchModules({
|
||||
// let storeData: Record<string, any> = {}; // after module used
|
||||
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
|
||||
let chatAnswerText = ''; // AI answer
|
||||
let chatModuleBills: ChatModuleBillType[] = [];
|
||||
let runningTime = Date.now();
|
||||
|
||||
function pushStore(
|
||||
{ inputs = [] }: RunningModuleItemType,
|
||||
{
|
||||
answerText = '',
|
||||
responseData,
|
||||
moduleDispatchBills
|
||||
responseData
|
||||
}: {
|
||||
answerText?: string;
|
||||
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
|
||||
moduleDispatchBills?: ChatModuleBillType[];
|
||||
}
|
||||
) {
|
||||
const time = Date.now();
|
||||
@@ -109,9 +105,6 @@ export async function dispatchModules({
|
||||
});
|
||||
}
|
||||
}
|
||||
if (moduleDispatchBills) {
|
||||
chatModuleBills = chatModuleBills.concat(moduleDispatchBills);
|
||||
}
|
||||
runningTime = time;
|
||||
|
||||
const isResponseAnswerText =
|
||||
@@ -165,7 +158,6 @@ export async function dispatchModules({
|
||||
const filterModules = nextRunModules.filter((module) => {
|
||||
if (set.has(module.moduleId)) return false;
|
||||
set.add(module.moduleId);
|
||||
``;
|
||||
return true;
|
||||
});
|
||||
|
||||
@@ -207,7 +199,8 @@ export async function dispatchModules({
|
||||
user,
|
||||
stream,
|
||||
detail,
|
||||
module,
|
||||
outputs: module.outputs,
|
||||
inputs: module.inputs,
|
||||
params
|
||||
};
|
||||
|
||||
@@ -244,11 +237,10 @@ export async function dispatchModules({
|
||||
? params[ModuleOutputKeyEnum.userChatInput]
|
||||
: undefined,
|
||||
...dispatchRes,
|
||||
[ModuleOutputKeyEnum.responseData]: formatResponseData,
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]:
|
||||
dispatchRes[ModuleOutputKeyEnum.moduleDispatchBills]
|
||||
[ModuleOutputKeyEnum.responseData]: formatResponseData
|
||||
});
|
||||
}
|
||||
|
||||
// start process width initInput
|
||||
const initModules = runningModules.filter((item) => initRunningModuleType[item.flowType]);
|
||||
|
||||
@@ -274,8 +266,7 @@ export async function dispatchModules({
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.answerText]: chatAnswerText,
|
||||
[ModuleOutputKeyEnum.responseData]: chatResponse,
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: chatModuleBills
|
||||
[ModuleOutputKeyEnum.responseData]: chatResponse
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,4 @@
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { dispatchModules } from '../index';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import {
|
||||
@@ -17,9 +14,10 @@ type RunPluginProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.pluginId]: string;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type RunPluginResponse = ModuleDispatchResponse<{
|
||||
type RunPluginResponse = {
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
}>;
|
||||
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
|
||||
};
|
||||
|
||||
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
|
||||
const {
|
||||
@@ -60,7 +58,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
return params;
|
||||
})();
|
||||
|
||||
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
...props,
|
||||
modules: plugin.modules.map((module) => ({
|
||||
...module,
|
||||
@@ -78,9 +76,9 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
return {
|
||||
answerText,
|
||||
// responseData, // debug
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
responseData: {
|
||||
moduleLogo: plugin.avatar,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
price: responseData.reduce((sum, item) => sum + (item.price || 0), 0),
|
||||
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0),
|
||||
pluginOutput: output?.pluginOutput,
|
||||
pluginDetail:
|
||||
@@ -91,14 +89,6 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
})
|
||||
: undefined
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: plugin.name,
|
||||
totalPoints: moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
model: plugin.name,
|
||||
charsLength: 0
|
||||
}
|
||||
],
|
||||
...(output ? output.pluginOutput : {})
|
||||
};
|
||||
};
|
||||
|
@@ -14,7 +14,7 @@ export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResp
|
||||
|
||||
return {
|
||||
responseData: {
|
||||
totalPoints: 0,
|
||||
price: 0,
|
||||
pluginOutput: params
|
||||
}
|
||||
};
|
||||
|
64
projects/app/src/service/moduleDispatch/tools/cfr.ts
Normal file
64
projects/app/src/service/moduleDispatch/tools/cfr.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import type { ChatItemType, moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
import { queryCfr } from '@fastgpt/service/core/ai/functions/cfr';
|
||||
import { getHistories } from '../utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
}>;
|
||||
type Response = {
|
||||
[ModuleOutputKeyEnum.text]: string;
|
||||
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
|
||||
};
|
||||
|
||||
export const dispatchCFR = async ({
|
||||
histories,
|
||||
params: { model, systemPrompt, history, userChatInput }
|
||||
}: Props): Promise<Response> => {
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Question is empty');
|
||||
}
|
||||
|
||||
// none
|
||||
// first chat and no system prompt
|
||||
if (systemPrompt === 'none' || (histories.length === 0 && !systemPrompt)) {
|
||||
return {
|
||||
[ModuleOutputKeyEnum.text]: userChatInput
|
||||
};
|
||||
}
|
||||
|
||||
const cfrModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { cfrQuery, inputTokens, outputTokens } = await queryCfr({
|
||||
chatBg: systemPrompt,
|
||||
query: userChatInput,
|
||||
histories: chatHistories,
|
||||
model: cfrModel.model
|
||||
});
|
||||
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: cfrModel.model,
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
price: total,
|
||||
model: modelName,
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
query: userChatInput,
|
||||
textOutput: cfrQuery
|
||||
},
|
||||
[ModuleOutputKeyEnum.text]: cfrQuery
|
||||
};
|
||||
};
|
@@ -1,8 +1,5 @@
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
@@ -19,10 +16,11 @@ type HttpRequestProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.httpHeaders]: string;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type HttpResponse = ModuleDispatchResponse<{
|
||||
type HttpResponse = {
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
};
|
||||
|
||||
const flatDynamicParams = (params: Record<string, any>) => {
|
||||
const dynamicParams = params[DYNAMIC_INPUT_KEY];
|
||||
@@ -40,7 +38,7 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
variables,
|
||||
module: { outputs },
|
||||
outputs,
|
||||
params: {
|
||||
system_httpMethod: httpMethod = 'POST',
|
||||
system_httpReqUrl: httpReqUrl,
|
||||
@@ -99,8 +97,8 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
}
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: 0,
|
||||
responseData: {
|
||||
price: 0,
|
||||
body: formatBody,
|
||||
httpResult: response
|
||||
},
|
||||
@@ -111,8 +109,8 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.failed]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: 0,
|
||||
responseData: {
|
||||
price: 0,
|
||||
body: formatBody,
|
||||
httpResult: { error }
|
||||
}
|
||||
|
@@ -1,7 +1,5 @@
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
@@ -26,10 +24,11 @@ type HttpRequestProps = ModuleDispatchProps<{
|
||||
[DYNAMIC_INPUT_KEY]: Record<string, any>;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type HttpResponse = ModuleDispatchResponse<{
|
||||
type HttpResponse = {
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
};
|
||||
|
||||
const UNDEFINED_SIGN = 'UNDEFINED_SIGN';
|
||||
|
||||
@@ -39,7 +38,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
variables,
|
||||
module: { outputs },
|
||||
outputs,
|
||||
histories,
|
||||
params: {
|
||||
system_httpMethod: httpMethod = 'POST',
|
||||
@@ -120,8 +119,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
}
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: 0,
|
||||
responseData: {
|
||||
price: 0,
|
||||
params: Object.keys(params).length > 0 ? params : undefined,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
headers: Object.keys(headers).length > 0 ? headers : undefined,
|
||||
@@ -132,8 +131,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
} catch (error) {
|
||||
return {
|
||||
[ModuleOutputKeyEnum.failed]: true,
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints: 0,
|
||||
responseData: {
|
||||
price: 0,
|
||||
params: Object.keys(params).length > 0 ? params : undefined,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
headers: Object.keys(headers).length > 0 ? headers : undefined,
|
||||
|
@@ -1,77 +0,0 @@
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { queryExtension } from '@fastgpt/service/core/ai/functions/queryExtension';
|
||||
import { getHistories } from '../utils';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.aiModel]: string;
|
||||
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
[ModuleOutputKeyEnum.text]: string;
|
||||
}>;
|
||||
|
||||
export const dispatchQueryExtension = async ({
|
||||
histories,
|
||||
module,
|
||||
params: { model, systemPrompt, history, userChatInput }
|
||||
}: Props): Promise<Response> => {
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Question is empty');
|
||||
}
|
||||
|
||||
const queryExtensionModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { extensionQueries, charsLength } = await queryExtension({
|
||||
chatBg: systemPrompt,
|
||||
query: userChatInput,
|
||||
histories: chatHistories,
|
||||
model: queryExtensionModel.model
|
||||
});
|
||||
|
||||
extensionQueries.unshift(userChatInput);
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
model: queryExtensionModel.model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
const set = new Set<string>();
|
||||
const filterSameQueries = extensionQueries.filter((item) => {
|
||||
// 删除所有的标点符号与空格等,只对文本进行比较
|
||||
const str = hashStr(item.replace(/[^\p{L}\p{N}]/gu, ''));
|
||||
if (set.has(str)) return false;
|
||||
set.add(str);
|
||||
return true;
|
||||
});
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
charsLength,
|
||||
query: userChatInput,
|
||||
textOutput: JSON.stringify(filterSameQueries)
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: module.name,
|
||||
totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
],
|
||||
[ModuleOutputKeyEnum.text]: JSON.stringify(filterSameQueries)
|
||||
};
|
||||
};
|
@@ -1,8 +1,5 @@
|
||||
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type {
|
||||
ModuleDispatchProps,
|
||||
ModuleDispatchResponse
|
||||
} from '@fastgpt/global/core/module/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
|
||||
import { dispatchModules } from '../index';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
@@ -18,10 +15,11 @@ type Props = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
app: SelectAppItemType;
|
||||
}>;
|
||||
type Response = ModuleDispatchResponse<{
|
||||
type Response = {
|
||||
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType[];
|
||||
[ModuleOutputKeyEnum.answerText]: string;
|
||||
[ModuleOutputKeyEnum.history]: ChatItemType[];
|
||||
}>;
|
||||
};
|
||||
|
||||
export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
const {
|
||||
@@ -32,7 +30,6 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
histories,
|
||||
params: { userChatInput, history, app }
|
||||
} = props;
|
||||
let start = Date.now();
|
||||
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Input is empty');
|
||||
@@ -59,7 +56,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { responseData, moduleDispatchBills, answerText } = await dispatchModules({
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
...props,
|
||||
appId: app.id,
|
||||
modules: appData.modules,
|
||||
@@ -81,20 +78,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
]);
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.responseData]: {
|
||||
moduleLogo: appData.avatar,
|
||||
query: userChatInput,
|
||||
textOutput: answerText,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0)
|
||||
},
|
||||
[ModuleOutputKeyEnum.moduleDispatchBills]: [
|
||||
{
|
||||
moduleName: appData.name,
|
||||
totalPoints: responseData.reduce((sum, item) => sum + (item.totalPoints || 0), 0),
|
||||
charsLength: 0,
|
||||
model: appData.name
|
||||
}
|
||||
],
|
||||
responseData,
|
||||
answerText: answerText,
|
||||
history: completeMessages
|
||||
};
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { startQueue } from './utils/tools';
|
||||
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/constants';
|
||||
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
|
||||
import { MongoUser } from '@fastgpt/service/support/user/schema';
|
||||
import { connectMongo } from '@fastgpt/service/common/mongo/init';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
|
9
projects/app/src/service/support/permission/auth/bill.ts
Normal file
9
projects/app/src/service/support/permission/auth/bill.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { GET } from '@fastgpt/service/common/api/plusRequest';
|
||||
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
|
||||
|
||||
export const authTeamBalance = async (teamId: string) => {
|
||||
if (FastGPTProUrl) {
|
||||
return GET('/support/permission/authBalance', { teamId });
|
||||
}
|
||||
return true;
|
||||
};
|
@@ -24,7 +24,6 @@ export async function authDatasetData({
|
||||
|
||||
const data: DatasetDataItemType = {
|
||||
id: String(datasetData._id),
|
||||
teamId: datasetData.teamId,
|
||||
q: datasetData.q,
|
||||
a: datasetData.a,
|
||||
chunkIndex: datasetData.chunkIndex,
|
||||
|
@@ -6,7 +6,7 @@ import type {
|
||||
AuthOutLinkResponse
|
||||
} from '@fastgpt/global/support/outLink/api.d';
|
||||
import { authOutLinkValid } from '@fastgpt/service/support/permission/auth/outLink';
|
||||
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
|
||||
import { getUserAndAuthBalance } from '@fastgpt/service/support/user/controller';
|
||||
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { OutLinkErrEnum } from '@fastgpt/global/common/error/code/outLink';
|
||||
import { OutLinkSchema } from '@fastgpt/global/support/outLink/type';
|
||||
@@ -58,15 +58,13 @@ export async function authOutLinkChatStart({
|
||||
// get outLink and app
|
||||
const { shareChat, appId } = await authOutLinkValid({ shareId });
|
||||
|
||||
// check ai points and chat limit
|
||||
const [{ user }, { uid }] = await Promise.all([
|
||||
getUserChatInfoAndAuthTeamPoints(shareChat.tmbId),
|
||||
// check balance and chat limit
|
||||
const [user, { uid }] = await Promise.all([
|
||||
getUserAndAuthBalance({ tmbId: shareChat.tmbId, minBalance: 0 }),
|
||||
authOutLinkChatLimit({ outLink: shareChat, ip, outLinkUid, question })
|
||||
]);
|
||||
|
||||
return {
|
||||
teamId: shareChat.teamId,
|
||||
tmbId: shareChat.tmbId,
|
||||
authType: AuthUserTypeEnum.token,
|
||||
responseDetail: shareChat.responseDetail,
|
||||
user,
|
||||
|
@@ -1,18 +0,0 @@
|
||||
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
|
||||
import { TeamMemberWithUserSchema } from '@fastgpt/global/support/user/team/type';
|
||||
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
|
||||
import { checkTeamAIPoints } from '../teamLimit';
|
||||
|
||||
export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
|
||||
const tmb = (await MongoTeamMember.findById(tmbId, 'teamId userId').populate(
|
||||
'userId',
|
||||
'timezone openaiAccount'
|
||||
)) as TeamMemberWithUserSchema;
|
||||
if (!tmb) return Promise.reject(UserErrEnum.unAuthUser);
|
||||
|
||||
await checkTeamAIPoints(tmb.teamId);
|
||||
|
||||
return {
|
||||
user: tmb.userId
|
||||
};
|
||||
}
|
@@ -1,44 +0,0 @@
|
||||
import { POST } from '@fastgpt/service/common/api/plusRequest';
|
||||
import type {
|
||||
AuthOutLinkChatProps,
|
||||
AuthOutLinkLimitProps,
|
||||
AuthOutLinkInitProps,
|
||||
AuthOutLinkResponse
|
||||
} from '@fastgpt/global/support/outLink/api.d';
|
||||
import { getUserChatInfoAndAuthTeamPoints } from './team';
|
||||
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
|
||||
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
|
||||
export function authOutLinkInit(data: AuthOutLinkInitProps): Promise<AuthOutLinkResponse> {
|
||||
if (!global.feConfigs?.isPlus) return Promise.resolve({ uid: data.outLinkUid });
|
||||
return POST<AuthOutLinkResponse>('/support/outLink/authInit', data);
|
||||
}
|
||||
export function authOutLinkChatLimit(data: AuthOutLinkLimitProps): Promise<AuthOutLinkResponse> {
|
||||
if (!global.feConfigs?.isPlus) return Promise.resolve({ uid: data.outLinkUid });
|
||||
return POST<AuthOutLinkResponse>('/support/outLink/authChatStart', data);
|
||||
}
|
||||
|
||||
export async function authTeamShareChatStart({
|
||||
teamId,
|
||||
ip,
|
||||
outLinkUid,
|
||||
question
|
||||
}: AuthOutLinkChatProps & {
|
||||
teamId: string;
|
||||
}) {
|
||||
// get outLink and app
|
||||
const res: any = await MongoTeam.findById(teamId);
|
||||
|
||||
// check balance and chat limit
|
||||
const tmb = await MongoTeamMember.findOne({ teamId, userId: String(res.ownerId) });
|
||||
|
||||
if (!tmb) {
|
||||
throw new Error('can not find it');
|
||||
}
|
||||
|
||||
const { user } = await getUserChatInfoAndAuthTeamPoints(String(tmb._id));
|
||||
|
||||
return {
|
||||
user,
|
||||
uid: outLinkUid
|
||||
};
|
||||
}
|
@@ -1,102 +0,0 @@
|
||||
import { getVectorCountByTeamId } from '@fastgpt/service/common/vectorStore/controller';
|
||||
import { getTeamSubPlans, getTeamStandPlan } from '@fastgpt/service/support/wallet/sub/utils';
|
||||
import { getStandardSubPlan } from '../wallet/sub/utils';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
|
||||
|
||||
export const checkDatasetLimit = async ({
|
||||
teamId,
|
||||
insertLen = 0
|
||||
}: {
|
||||
teamId: string;
|
||||
insertLen?: number;
|
||||
}) => {
|
||||
const [{ totalPoints, usedPoints, datasetMaxSize }, usedSize] = await Promise.all([
|
||||
getTeamSubPlans({ teamId, standardPlans: getStandardSubPlan() }),
|
||||
getVectorCountByTeamId(teamId)
|
||||
]);
|
||||
|
||||
if (usedSize + insertLen >= datasetMaxSize) {
|
||||
return Promise.reject(TeamErrEnum.datasetSizeNotEnough);
|
||||
}
|
||||
|
||||
if (usedPoints >= totalPoints) {
|
||||
return Promise.reject(TeamErrEnum.aiPointsNotEnough);
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
||||
export const checkTeamAIPoints = async (teamId: string) => {
|
||||
const { totalPoints, usedPoints } = await getTeamSubPlans({
|
||||
teamId,
|
||||
standardPlans: getStandardSubPlan()
|
||||
});
|
||||
|
||||
if (usedPoints >= totalPoints) {
|
||||
return Promise.reject(TeamErrEnum.aiPointsNotEnough);
|
||||
}
|
||||
|
||||
return {
|
||||
totalPoints,
|
||||
usedPoints
|
||||
};
|
||||
};
|
||||
|
||||
export const checkTeamDatasetLimit = async (teamId: string) => {
|
||||
const [{ standardConstants }, datasetCount] = await Promise.all([
|
||||
getTeamStandPlan({ teamId, standardPlans: getStandardSubPlan() }),
|
||||
MongoDataset.countDocuments({
|
||||
teamId,
|
||||
type: DatasetTypeEnum.dataset
|
||||
})
|
||||
]);
|
||||
|
||||
if (standardConstants && datasetCount >= standardConstants.maxDatasetAmount) {
|
||||
return Promise.reject(TeamErrEnum.datasetAmountNotEnough);
|
||||
}
|
||||
};
|
||||
export const checkTeamAppLimit = async (teamId: string) => {
|
||||
const [{ standardConstants }, appCount] = await Promise.all([
|
||||
getTeamStandPlan({ teamId, standardPlans: getStandardSubPlan() }),
|
||||
MongoApp.count({ teamId })
|
||||
]);
|
||||
|
||||
if (standardConstants && appCount >= standardConstants.maxAppAmount) {
|
||||
return Promise.reject(TeamErrEnum.appAmountNotEnough);
|
||||
}
|
||||
};
|
||||
export const checkTeamPluginLimit = async (teamId: string) => {
|
||||
const [{ standardConstants }, pluginCount] = await Promise.all([
|
||||
getTeamStandPlan({ teamId, standardPlans: getStandardSubPlan() }),
|
||||
MongoPlugin.count({ teamId })
|
||||
]);
|
||||
|
||||
if (standardConstants && pluginCount >= standardConstants.maxAppAmount) {
|
||||
return Promise.reject(TeamErrEnum.pluginAmountNotEnough);
|
||||
}
|
||||
};
|
||||
|
||||
export const checkTeamReRankPermission = async (teamId: string) => {
|
||||
const { standardConstants } = await getTeamStandPlan({
|
||||
teamId,
|
||||
standardPlans: getStandardSubPlan()
|
||||
});
|
||||
|
||||
if (standardConstants && !standardConstants?.permissionReRank) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
export const checkTeamWebSyncPermission = async (teamId: string) => {
|
||||
const { standardConstants } = await getTeamStandPlan({
|
||||
teamId,
|
||||
standardPlans: getStandardSubPlan()
|
||||
});
|
||||
|
||||
if (standardConstants && !standardConstants?.permissionWebsiteSync) {
|
||||
return Promise.reject(TeamErrEnum.websiteSyncNotEnough);
|
||||
}
|
||||
};
|
23
projects/app/src/service/support/wallet/bill/controller.ts
Normal file
23
projects/app/src/service/support/wallet/bill/controller.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { ConcatBillProps, CreateBillProps } from '@fastgpt/global/support/wallet/bill/api';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { POST } from '@fastgpt/service/common/api/plusRequest';
|
||||
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
|
||||
|
||||
export function createBill(data: CreateBillProps) {
|
||||
if (!FastGPTProUrl) return;
|
||||
if (data.total === 0) {
|
||||
addLog.info('0 Bill', data);
|
||||
}
|
||||
try {
|
||||
POST('/support/wallet/bill/createBill', data);
|
||||
} catch (error) {}
|
||||
}
|
||||
export function concatBill(data: ConcatBillProps) {
|
||||
if (!FastGPTProUrl) return;
|
||||
if (data.total === 0) {
|
||||
addLog.info('0 Bill', data);
|
||||
}
|
||||
try {
|
||||
POST('/support/wallet/bill/concatBill', data);
|
||||
} catch (error) {}
|
||||
}
|
327
projects/app/src/service/support/wallet/bill/push.ts
Normal file
327
projects/app/src/service/support/wallet/bill/push.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
|
||||
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { PostReRankProps } from '@fastgpt/global/core/ai/api';
|
||||
import { createBill, concatBill } from './controller';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
export const pushChatBill = ({
|
||||
appName,
|
||||
appId,
|
||||
teamId,
|
||||
tmbId,
|
||||
source,
|
||||
response
|
||||
}: {
|
||||
appName: string;
|
||||
appId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${BillSourceEnum}`;
|
||||
response: ChatHistoryItemResType[];
|
||||
}) => {
|
||||
const total = response.reduce((sum, item) => sum + (item.price || 0), 0);
|
||||
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName,
|
||||
appId,
|
||||
total,
|
||||
source,
|
||||
list: response.map((item) => ({
|
||||
moduleName: item.moduleName,
|
||||
amount: item.price || 0,
|
||||
model: item.model,
|
||||
inputTokens: item.inputTokens,
|
||||
outputTokens: item.outputTokens,
|
||||
charsLength: item.charsLength
|
||||
}))
|
||||
});
|
||||
addLog.info(`finish completions`, {
|
||||
source,
|
||||
teamId,
|
||||
tmbId,
|
||||
price: formatStorePrice2Read(total)
|
||||
});
|
||||
return { total };
|
||||
};
|
||||
|
||||
export const pushQABill = async ({
|
||||
teamId,
|
||||
tmbId,
|
||||
model,
|
||||
charsLength,
|
||||
billId
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
model: string;
|
||||
charsLength: number;
|
||||
billId: string;
|
||||
}) => {
|
||||
// 计算价格
|
||||
const { total } = formatModelPrice2Store({
|
||||
model,
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
concatBill({
|
||||
billId,
|
||||
teamId,
|
||||
tmbId,
|
||||
total,
|
||||
charsLength,
|
||||
listIndex: 1
|
||||
});
|
||||
|
||||
return { total };
|
||||
};
|
||||
|
||||
export const pushGenerateVectorBill = ({
|
||||
billId,
|
||||
teamId,
|
||||
tmbId,
|
||||
charsLength,
|
||||
model,
|
||||
source = BillSourceEnum.fastgpt,
|
||||
extensionModel,
|
||||
extensionInputTokens,
|
||||
extensionOutputTokens
|
||||
}: {
|
||||
billId?: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
charsLength: number;
|
||||
model: string;
|
||||
source?: `${BillSourceEnum}`;
|
||||
|
||||
extensionModel?: string;
|
||||
extensionInputTokens?: number;
|
||||
extensionOutputTokens?: number;
|
||||
}) => {
|
||||
const { total: totalVector, modelName: vectorModelName } = formatModelPrice2Store({
|
||||
model,
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.vector
|
||||
});
|
||||
|
||||
const { extensionTotal, extensionModelName } = (() => {
|
||||
if (!extensionModel || !extensionInputTokens || !extensionOutputTokens)
|
||||
return {
|
||||
extensionTotal: 0,
|
||||
extensionModelName: ''
|
||||
};
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: extensionModel,
|
||||
inputLen: extensionInputTokens,
|
||||
outputLen: extensionOutputTokens,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
return {
|
||||
extensionTotal: total,
|
||||
extensionModelName: modelName
|
||||
};
|
||||
})();
|
||||
|
||||
const total = totalVector + extensionTotal;
|
||||
|
||||
// 插入 Bill 记录
|
||||
if (billId) {
|
||||
concatBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
total: totalVector,
|
||||
billId,
|
||||
charsLength,
|
||||
listIndex: 0
|
||||
});
|
||||
} else {
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'wallet.moduleName.index',
|
||||
total,
|
||||
source,
|
||||
list: [
|
||||
{
|
||||
moduleName: 'wallet.moduleName.index',
|
||||
amount: totalVector,
|
||||
model: vectorModelName,
|
||||
charsLength
|
||||
},
|
||||
...(extensionModel !== undefined
|
||||
? [
|
||||
{
|
||||
moduleName: 'core.module.template.Query extension',
|
||||
amount: extensionTotal,
|
||||
model: extensionModelName,
|
||||
inputTokens: extensionInputTokens,
|
||||
outputTokens: extensionOutputTokens
|
||||
}
|
||||
]
|
||||
: [])
|
||||
]
|
||||
});
|
||||
}
|
||||
return { total };
|
||||
};
|
||||
|
||||
export const pushQuestionGuideBill = ({
|
||||
inputTokens,
|
||||
outputTokens,
|
||||
teamId,
|
||||
tmbId
|
||||
}: {
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
}) => {
|
||||
const qgModel = global.llmModels[0];
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
model: qgModel.model,
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'wallet.bill.Next Step Guide',
|
||||
total,
|
||||
source: BillSourceEnum.fastgpt,
|
||||
list: [
|
||||
{
|
||||
moduleName: 'wallet.bill.Next Step Guide',
|
||||
amount: total,
|
||||
model: modelName,
|
||||
inputTokens,
|
||||
outputTokens
|
||||
}
|
||||
]
|
||||
});
|
||||
};
|
||||
|
||||
export function pushAudioSpeechBill({
|
||||
appName = 'wallet.bill.Audio Speech',
|
||||
model,
|
||||
charsLength,
|
||||
teamId,
|
||||
tmbId,
|
||||
source = BillSourceEnum.fastgpt
|
||||
}: {
|
||||
appName?: string;
|
||||
model: string;
|
||||
charsLength: number;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${BillSourceEnum}`;
|
||||
}) {
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model,
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.audioSpeech
|
||||
});
|
||||
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName,
|
||||
total,
|
||||
source,
|
||||
list: [
|
||||
{
|
||||
moduleName: appName,
|
||||
amount: total,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function pushWhisperBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
duration
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
duration: number;
|
||||
}) {
|
||||
const whisperModel = global.whisperModel;
|
||||
|
||||
if (!whisperModel) return;
|
||||
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: whisperModel.model,
|
||||
inputLen: duration,
|
||||
type: ModelTypeEnum.whisper,
|
||||
multiple: 60
|
||||
});
|
||||
|
||||
const name = 'wallet.bill.Whisper';
|
||||
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
total,
|
||||
source: BillSourceEnum.fastgpt,
|
||||
list: [
|
||||
{
|
||||
moduleName: name,
|
||||
amount: total,
|
||||
model: modelName,
|
||||
duration
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function pushReRankBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
source,
|
||||
inputs
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${BillSourceEnum}`;
|
||||
inputs: PostReRankProps['inputs'];
|
||||
}) {
|
||||
const reRankModel = global.reRankModels[0];
|
||||
if (!reRankModel) return { total: 0 };
|
||||
|
||||
const charsLength = inputs.reduce((sum, item) => sum + item.text.length, 0);
|
||||
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
model: reRankModel.model,
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.rerank
|
||||
});
|
||||
const name = 'wallet.bill.ReRank';
|
||||
|
||||
createBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
total,
|
||||
source,
|
||||
list: [
|
||||
{
|
||||
moduleName: name,
|
||||
amount: total,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
return { total };
|
||||
}
|
54
projects/app/src/service/support/wallet/bill/utils.ts
Normal file
54
projects/app/src/service/support/wallet/bill/utils.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { ModelTypeEnum, getModelMap } from '@/service/core/ai/model';
|
||||
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { BillSourceEnum, PRICE_SCALE } from '@fastgpt/global/support/wallet/bill/constants';
|
||||
|
||||
export function authType2BillSource({
|
||||
authType,
|
||||
shareId,
|
||||
source
|
||||
}: {
|
||||
authType?: `${AuthUserTypeEnum}`;
|
||||
shareId?: string;
|
||||
source?: `${BillSourceEnum}`;
|
||||
}) {
|
||||
if (source) return source;
|
||||
if (shareId) return BillSourceEnum.shareLink;
|
||||
if (authType === AuthUserTypeEnum.apikey) return BillSourceEnum.api;
|
||||
return BillSourceEnum.fastgpt;
|
||||
}
|
||||
|
||||
export const formatModelPrice2Store = ({
|
||||
model,
|
||||
inputLen = 0,
|
||||
outputLen = 0,
|
||||
type,
|
||||
multiple = 1000
|
||||
}: {
|
||||
model: string;
|
||||
inputLen: number;
|
||||
outputLen?: number;
|
||||
type: `${ModelTypeEnum}`;
|
||||
multiple?: number;
|
||||
}) => {
|
||||
const modelData = getModelMap?.[type]?.(model);
|
||||
if (!modelData)
|
||||
return {
|
||||
inputTotal: 0,
|
||||
outputTotal: 0,
|
||||
total: 0,
|
||||
modelName: ''
|
||||
};
|
||||
const inputTotal = modelData.inputPrice
|
||||
? Math.ceil(modelData.inputPrice * (inputLen / multiple) * PRICE_SCALE)
|
||||
: 0;
|
||||
const outputTotal = modelData.outputPrice
|
||||
? Math.ceil(modelData.outputPrice * (outputLen / multiple) * PRICE_SCALE)
|
||||
: 0;
|
||||
|
||||
return {
|
||||
modelName: modelData.name,
|
||||
inputTotal: inputTotal,
|
||||
outputTotal: outputTotal,
|
||||
total: inputTotal + outputTotal
|
||||
};
|
||||
};
|
@@ -1,23 +0,0 @@
|
||||
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { POST } from '@fastgpt/service/common/api/plusRequest';
|
||||
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
|
||||
|
||||
export function createUsage(data: CreateUsageProps) {
|
||||
if (!FastGPTProUrl) return;
|
||||
if (data.totalPoints === 0) {
|
||||
addLog.info('0 totalPoints', data);
|
||||
}
|
||||
try {
|
||||
POST('/support/wallet/usage/createUsage', data);
|
||||
} catch (error) {}
|
||||
}
|
||||
export function concatUsage(data: ConcatUsageProps) {
|
||||
if (!FastGPTProUrl) return;
|
||||
if (data.totalPoints === 0) {
|
||||
addLog.info('0 totalPoints', data);
|
||||
}
|
||||
try {
|
||||
POST('/support/wallet/usage/concatUsage', data);
|
||||
} catch (error) {}
|
||||
}
|
@@ -1,274 +0,0 @@
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { createUsage, concatUsage } from './controller';
|
||||
import { formatModelChars2Points } from '@/service/support/wallet/usage/utils';
|
||||
import { ChatModuleBillType } from '@fastgpt/global/support/wallet/bill/type';
|
||||
|
||||
export const pushChatUsage = ({
|
||||
appName,
|
||||
appId,
|
||||
teamId,
|
||||
tmbId,
|
||||
source,
|
||||
moduleDispatchBills
|
||||
}: {
|
||||
appName: string;
|
||||
appId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${UsageSourceEnum}`;
|
||||
moduleDispatchBills: ChatModuleBillType[];
|
||||
}) => {
|
||||
const totalPoints = moduleDispatchBills.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
|
||||
|
||||
createUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName,
|
||||
appId,
|
||||
totalPoints,
|
||||
source,
|
||||
list: moduleDispatchBills.map((item) => ({
|
||||
moduleName: item.moduleName,
|
||||
amount: item.totalPoints || 0,
|
||||
model: item.model,
|
||||
charsLength: item.charsLength
|
||||
}))
|
||||
});
|
||||
addLog.info(`finish completions`, {
|
||||
source,
|
||||
teamId,
|
||||
tmbId,
|
||||
totalPoints
|
||||
});
|
||||
return { totalPoints };
|
||||
};
|
||||
|
||||
export const pushQAUsage = async ({
|
||||
teamId,
|
||||
tmbId,
|
||||
model,
|
||||
charsLength,
|
||||
billId
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
model: string;
|
||||
charsLength: number;
|
||||
billId: string;
|
||||
}) => {
|
||||
// 计算价格
|
||||
const { totalPoints } = formatModelChars2Points({
|
||||
model,
|
||||
modelType: ModelTypeEnum.llm,
|
||||
charsLength
|
||||
});
|
||||
|
||||
concatUsage({
|
||||
billId,
|
||||
teamId,
|
||||
tmbId,
|
||||
totalPoints,
|
||||
charsLength,
|
||||
listIndex: 1
|
||||
});
|
||||
|
||||
return { totalPoints };
|
||||
};
|
||||
|
||||
export const pushGenerateVectorUsage = ({
|
||||
billId,
|
||||
teamId,
|
||||
tmbId,
|
||||
charsLength,
|
||||
model,
|
||||
source = UsageSourceEnum.fastgpt,
|
||||
extensionModel,
|
||||
extensionCharsLength
|
||||
}: {
|
||||
billId?: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
charsLength: number;
|
||||
model: string;
|
||||
source?: `${UsageSourceEnum}`;
|
||||
|
||||
extensionModel?: string;
|
||||
extensionCharsLength?: number;
|
||||
}) => {
|
||||
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
|
||||
modelType: ModelTypeEnum.vector,
|
||||
model,
|
||||
charsLength
|
||||
});
|
||||
|
||||
const { extensionTotalPoints, extensionModelName } = (() => {
|
||||
if (!extensionModel || !extensionCharsLength)
|
||||
return {
|
||||
extensionTotalPoints: 0,
|
||||
extensionModelName: ''
|
||||
};
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
modelType: ModelTypeEnum.llm,
|
||||
model: extensionModel,
|
||||
charsLength: extensionCharsLength
|
||||
});
|
||||
return {
|
||||
extensionTotalPoints: totalPoints,
|
||||
extensionModelName: modelName
|
||||
};
|
||||
})();
|
||||
|
||||
const totalPoints = totalVector + extensionTotalPoints;
|
||||
|
||||
// 插入 Bill 记录
|
||||
if (billId) {
|
||||
concatUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
totalPoints,
|
||||
billId,
|
||||
charsLength,
|
||||
listIndex: 0
|
||||
});
|
||||
} else {
|
||||
createUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'support.wallet.moduleName.index',
|
||||
totalPoints,
|
||||
source,
|
||||
list: [
|
||||
{
|
||||
moduleName: 'support.wallet.moduleName.index',
|
||||
amount: totalVector,
|
||||
model: vectorModelName,
|
||||
charsLength
|
||||
},
|
||||
...(extensionModel !== undefined
|
||||
? [
|
||||
{
|
||||
moduleName: 'core.module.template.Query extension',
|
||||
amount: extensionTotalPoints,
|
||||
model: extensionModelName,
|
||||
charsLength: extensionCharsLength
|
||||
}
|
||||
]
|
||||
: [])
|
||||
]
|
||||
});
|
||||
}
|
||||
return { totalPoints };
|
||||
};
|
||||
|
||||
export const pushQuestionGuideUsage = ({
|
||||
charsLength,
|
||||
teamId,
|
||||
tmbId
|
||||
}: {
|
||||
charsLength: number;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
}) => {
|
||||
const qgModel = global.llmModels[0];
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
charsLength,
|
||||
model: qgModel.model,
|
||||
modelType: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
createUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'core.app.Next Step Guide',
|
||||
totalPoints,
|
||||
source: UsageSourceEnum.fastgpt,
|
||||
list: [
|
||||
{
|
||||
moduleName: 'core.app.Next Step Guide',
|
||||
amount: totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
});
|
||||
};
|
||||
|
||||
export function pushAudioSpeechUsage({
|
||||
appName = 'support.wallet.bill.Audio Speech',
|
||||
model,
|
||||
charsLength,
|
||||
teamId,
|
||||
tmbId,
|
||||
source = UsageSourceEnum.fastgpt
|
||||
}: {
|
||||
appName?: string;
|
||||
model: string;
|
||||
charsLength: number;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
source: `${UsageSourceEnum}`;
|
||||
}) {
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
model,
|
||||
charsLength,
|
||||
modelType: ModelTypeEnum.audioSpeech
|
||||
});
|
||||
|
||||
createUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName,
|
||||
totalPoints,
|
||||
source,
|
||||
list: [
|
||||
{
|
||||
moduleName: appName,
|
||||
amount: totalPoints,
|
||||
model: modelName,
|
||||
charsLength
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
export function pushWhisperUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
duration
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
duration: number;
|
||||
}) {
|
||||
const whisperModel = global.whisperModel;
|
||||
|
||||
if (!whisperModel) return;
|
||||
|
||||
const { totalPoints, modelName } = formatModelChars2Points({
|
||||
model: whisperModel.model,
|
||||
charsLength: duration,
|
||||
modelType: ModelTypeEnum.whisper,
|
||||
multiple: 60
|
||||
});
|
||||
|
||||
const name = 'support.wallet.bill.Whisper';
|
||||
|
||||
createUsage({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
totalPoints,
|
||||
source: UsageSourceEnum.fastgpt,
|
||||
list: [
|
||||
{
|
||||
moduleName: name,
|
||||
amount: totalPoints,
|
||||
model: modelName,
|
||||
duration
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
@@ -1,44 +0,0 @@
|
||||
import { ModelTypeEnum, getModelMap } from '@/service/core/ai/model';
|
||||
import { AuthUserTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
|
||||
|
||||
export function authType2UsageSource({
|
||||
authType,
|
||||
shareId,
|
||||
source
|
||||
}: {
|
||||
authType?: `${AuthUserTypeEnum}`;
|
||||
shareId?: string;
|
||||
source?: `${UsageSourceEnum}`;
|
||||
}) {
|
||||
if (source) return source;
|
||||
if (shareId) return UsageSourceEnum.shareLink;
|
||||
if (authType === AuthUserTypeEnum.apikey) return UsageSourceEnum.api;
|
||||
return UsageSourceEnum.fastgpt;
|
||||
}
|
||||
|
||||
export const formatModelChars2Points = ({
|
||||
model,
|
||||
charsLength = 0,
|
||||
modelType,
|
||||
multiple = 1000
|
||||
}: {
|
||||
model: string;
|
||||
charsLength: number;
|
||||
modelType: `${ModelTypeEnum}`;
|
||||
multiple?: number;
|
||||
}) => {
|
||||
const modelData = getModelMap?.[modelType]?.(model);
|
||||
if (!modelData)
|
||||
return {
|
||||
totalPoints: 0,
|
||||
modelName: ''
|
||||
};
|
||||
|
||||
const totalPoints = (modelData.charsPointsPrice || 0) * (charsLength / multiple);
|
||||
|
||||
return {
|
||||
modelName: modelData.name,
|
||||
totalPoints
|
||||
};
|
||||
};
|
@@ -5,7 +5,6 @@ import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
|
||||
type Props = {
|
||||
chatId: string;
|
||||
@@ -47,54 +46,61 @@ export async function saveChat({
|
||||
...chat?.metadata,
|
||||
...metadata
|
||||
};
|
||||
const title =
|
||||
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
|
||||
content[1]?.value?.slice(0, 20) ||
|
||||
'Chat';
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
await MongoChatItem.insertMany(
|
||||
const promise: any[] = [
|
||||
MongoChatItem.insertMany(
|
||||
content.map((item) => ({
|
||||
chatId,
|
||||
teamId,
|
||||
tmbId,
|
||||
appId,
|
||||
...item
|
||||
})),
|
||||
{ session }
|
||||
);
|
||||
}))
|
||||
)
|
||||
];
|
||||
|
||||
if (chat) {
|
||||
chat.title = title;
|
||||
chat.updateTime = new Date();
|
||||
chat.metadata = metadataUpdate;
|
||||
await chat.save({ session });
|
||||
} else {
|
||||
MongoChat.create(
|
||||
[
|
||||
{
|
||||
chatId,
|
||||
teamId,
|
||||
tmbId,
|
||||
appId,
|
||||
variables,
|
||||
title,
|
||||
source,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
metadata: metadataUpdate
|
||||
}
|
||||
],
|
||||
{ session }
|
||||
);
|
||||
}
|
||||
});
|
||||
const title =
|
||||
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
|
||||
content[1]?.value?.slice(0, 20) ||
|
||||
'Chat';
|
||||
|
||||
if (chat) {
|
||||
promise.push(
|
||||
MongoChat.updateOne(
|
||||
{ appId, chatId },
|
||||
{
|
||||
title,
|
||||
updateTime: new Date(),
|
||||
metadata: metadataUpdate
|
||||
}
|
||||
)
|
||||
);
|
||||
} else {
|
||||
promise.push(
|
||||
MongoChat.create({
|
||||
chatId,
|
||||
teamId,
|
||||
tmbId,
|
||||
appId,
|
||||
variables,
|
||||
title,
|
||||
source,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
metadata: metadataUpdate
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
if (updateUseTime && source === ChatSourceEnum.online) {
|
||||
MongoApp.findByIdAndUpdate(appId, {
|
||||
updateTime: new Date()
|
||||
});
|
||||
promise.push(
|
||||
MongoApp.findByIdAndUpdate(appId, {
|
||||
updateTime: new Date()
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(promise);
|
||||
} catch (error) {
|
||||
addLog.error(`update chat history error`, error);
|
||||
}
|
||||
|
Reference in New Issue
Block a user