mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
4.6.8-alpha (#804)
* perf: redirect request and err log replace perf: dataset openapi feat: session fix: retry input error feat: 468 doc sub page feat: standard sub perf: rerank tip perf: rerank tip perf: api sdk perf: openapi sub plan perf: sub ui fix: ts * perf: init log * fix: variable select * sub page * icon * perf: llm model config * perf: menu ux * perf: system store * perf: publish app name * fix: init data * perf: flow edit ux * fix: value type format and ux * fix prompt editor default value (#13) * fix prompt editor default value * fix prompt editor update when not focus * add key with variable --------- Co-authored-by: Archer <545436317@qq.com> * fix: value type * doc * i18n * import path * home page * perf: mongo session running * fix: ts * perf: use toast * perf: flow edit * perf: sse response * slider ui * fetch error * fix prompt editor rerender when not focus by key defaultvalue (#14) * perf: prompt editor * feat: dataset search concat * perf: doc * fix:ts * perf: doc * fix json editor onblur value (#15) * faq * vector model default config * ipv6 --------- Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import { SERVICE_LOCAL_HOST } from '@fastgpt/service/common/system/tools';
|
||||
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
|
||||
import { isIPv6 } from 'net';
|
||||
|
||||
interface ConfigType {
|
||||
headers?: { [key: string]: string };
|
||||
hold?: boolean;
|
||||
@@ -78,12 +79,7 @@ export function request(url: string, data: any, config: ConfigType, method: Meth
|
||||
|
||||
return instance
|
||||
.request({
|
||||
baseURL: `http://${
|
||||
process.env.HOSTNAME && isIPv6(process.env.HOSTNAME)
|
||||
? `[${process.env.HOSTNAME}]:${process.env.PORT || 3000}`
|
||||
: `${process.env.HOSTNAME || 'localhost'}:${process.env.PORT || 3000}`
|
||||
}`,
|
||||
|
||||
baseURL: `http://${SERVICE_LOCAL_HOST}`,
|
||||
url,
|
||||
method,
|
||||
data: ['POST', 'PUT'].includes(method) ? data : null,
|
||||
|
@@ -6,7 +6,7 @@ export function jiebaSplit({ text }: { text: string }) {
|
||||
|
||||
return (
|
||||
tokens
|
||||
.map((item) => item.replace(/[^\u4e00-\u9fa5a-zA-Z0-9\s]/g, '').trim())
|
||||
.map((item) => item.replace(/[\u3000-\u303f\uff00-\uffef]/g, '').trim())
|
||||
.filter((item) => item && !stopWords.has(item))
|
||||
.join(' ') || ''
|
||||
);
|
||||
|
@@ -1,17 +1,11 @@
|
||||
export const getChatModel = (model?: string) => {
|
||||
return global.chatModels.find((item) => item.model === model) ?? global.chatModels[0];
|
||||
export const getLLMModel = (model?: string) => {
|
||||
return global.llmModels.find((item) => item.model === model) ?? global.llmModels[0];
|
||||
};
|
||||
export const getQAModel = (model?: string) => {
|
||||
return global.qaModels.find((item) => item.model === model) || global.qaModels[0];
|
||||
};
|
||||
export const getCQModel = (model?: string) => {
|
||||
return global.cqModels.find((item) => item.model === model) || global.cqModels[0];
|
||||
};
|
||||
export const getExtractModel = (model?: string) => {
|
||||
return global.extractModels.find((item) => item.model === model) || global.extractModels[0];
|
||||
};
|
||||
export const getQGModel = (model?: string) => {
|
||||
return global.qgModels.find((item) => item.model === model) || global.qgModels[0];
|
||||
export const getDatasetModel = (model?: string) => {
|
||||
return (
|
||||
global.llmModels?.filter((item) => item.datasetProcess)?.find((item) => item.model === model) ??
|
||||
global.llmModels[0]
|
||||
);
|
||||
};
|
||||
|
||||
export const getVectorModel = (model?: string) => {
|
||||
@@ -33,22 +27,14 @@ export function getReRankModel(model?: string) {
|
||||
}
|
||||
|
||||
export enum ModelTypeEnum {
|
||||
chat = 'chat',
|
||||
qa = 'qa',
|
||||
cq = 'cq',
|
||||
extract = 'extract',
|
||||
qg = 'qg',
|
||||
llm = 'llm',
|
||||
vector = 'vector',
|
||||
audioSpeech = 'audioSpeech',
|
||||
whisper = 'whisper',
|
||||
rerank = 'rerank'
|
||||
}
|
||||
export const getModelMap = {
|
||||
[ModelTypeEnum.chat]: getChatModel,
|
||||
[ModelTypeEnum.qa]: getQAModel,
|
||||
[ModelTypeEnum.cq]: getCQModel,
|
||||
[ModelTypeEnum.extract]: getExtractModel,
|
||||
[ModelTypeEnum.qg]: getQGModel,
|
||||
[ModelTypeEnum.llm]: getLLMModel,
|
||||
[ModelTypeEnum.vector]: getVectorModel,
|
||||
[ModelTypeEnum.audioSpeech]: getAudioSpeechModel,
|
||||
[ModelTypeEnum.whisper]: getWhisperModel,
|
||||
|
@@ -6,7 +6,7 @@ export const getChatModelNameListByModules = (modules: ModuleItemType[]): string
|
||||
return chatModules
|
||||
.map((item) => {
|
||||
const model = item.inputs.find((input) => input.key === 'model')?.value;
|
||||
return global.chatModels.find((item) => item.model === model)?.name || '';
|
||||
return global.llmModels.find((item) => item.model === model)?.name || '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
@@ -15,6 +15,7 @@ import {
|
||||
DatasetSearchModeMap,
|
||||
SearchScoreTypeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
|
||||
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
|
||||
import { jiebaSplit } from '@/service/common/string/jieba';
|
||||
import { deleteDatasetDataVector } from '@fastgpt/service/common/vectorStore/controller';
|
||||
@@ -33,6 +34,7 @@ import type {
|
||||
PushDatasetDataResponse
|
||||
} from '@fastgpt/global/core/dataset/api.d';
|
||||
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { getVectorModel } from '../../ai/model';
|
||||
|
||||
export async function pushDataToTrainingQueue(
|
||||
props: {
|
||||
@@ -43,7 +45,7 @@ export async function pushDataToTrainingQueue(
|
||||
const result = await pushDataListToTrainingQueue({
|
||||
...props,
|
||||
vectorModelList: global.vectorModels,
|
||||
qaModelList: global.qaModels
|
||||
datasetModelList: global.llmModels
|
||||
});
|
||||
|
||||
return result;
|
||||
@@ -92,7 +94,7 @@ export async function insertData2Dataset({
|
||||
indexes.map((item) =>
|
||||
insertDatasetDataVector({
|
||||
query: item.text,
|
||||
model,
|
||||
model: getVectorModel(model),
|
||||
teamId,
|
||||
datasetId,
|
||||
collectionId
|
||||
@@ -218,7 +220,7 @@ export async function updateData2Dataset({
|
||||
if (item.type === 'create') {
|
||||
const result = await insertDatasetDataVector({
|
||||
query: item.index.text,
|
||||
model,
|
||||
model: getVectorModel(model),
|
||||
teamId: mongoData.teamId,
|
||||
datasetId: mongoData.datasetId,
|
||||
collectionId: mongoData.collectionId
|
||||
@@ -233,7 +235,7 @@ export async function updateData2Dataset({
|
||||
collectionId: mongoData.collectionId,
|
||||
id: item.index.dataId,
|
||||
query: item.index.text,
|
||||
model
|
||||
model: getVectorModel(model)
|
||||
});
|
||||
item.index.dataId = result.insertId;
|
||||
|
||||
@@ -328,14 +330,15 @@ export async function searchDatasetData(props: {
|
||||
};
|
||||
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
|
||||
const { vectors, charsLength } = await getVectorsByText({
|
||||
model,
|
||||
model: getVectorModel(model),
|
||||
input: query
|
||||
});
|
||||
|
||||
const { results } = await recallFromVectorStore({
|
||||
vectors,
|
||||
limit,
|
||||
datasetIds
|
||||
datasetIds,
|
||||
efSearch: global.systemEnv?.pgHNSWEfSearch
|
||||
});
|
||||
|
||||
// get q and a
|
||||
@@ -479,6 +482,7 @@ export async function searchDatasetData(props: {
|
||||
});
|
||||
|
||||
if (!Array.isArray(results)) {
|
||||
usingReRank = false;
|
||||
return [];
|
||||
}
|
||||
|
||||
@@ -498,6 +502,7 @@ export async function searchDatasetData(props: {
|
||||
|
||||
return mergeResult;
|
||||
} catch (error) {
|
||||
usingReRank = false;
|
||||
return [];
|
||||
}
|
||||
};
|
||||
@@ -585,66 +590,6 @@ export async function searchDatasetData(props: {
|
||||
fullTextRecallResults: fullTextRecallResList[0]
|
||||
};
|
||||
};
|
||||
const rrfConcat = (
|
||||
arr: { k: number; list: SearchDataResponseItemType[] }[]
|
||||
): SearchDataResponseItemType[] => {
|
||||
arr = arr.filter((item) => item.list.length > 0);
|
||||
|
||||
if (arr.length === 0) return [];
|
||||
if (arr.length === 1) return arr[0].list;
|
||||
|
||||
const map = new Map<string, SearchDataResponseItemType & { rrfScore: number }>();
|
||||
|
||||
// rrf
|
||||
arr.forEach((item) => {
|
||||
const k = item.k;
|
||||
|
||||
item.list.forEach((data, index) => {
|
||||
const rank = index + 1;
|
||||
const score = 1 / (k + rank);
|
||||
|
||||
const record = map.get(data.id);
|
||||
if (record) {
|
||||
// 合并两个score,有相同type的score,取最大值
|
||||
const concatScore = [...record.score];
|
||||
for (const dataItem of data.score) {
|
||||
const sameScore = concatScore.find((item) => item.type === dataItem.type);
|
||||
if (sameScore) {
|
||||
sameScore.value = Math.max(sameScore.value, dataItem.value);
|
||||
} else {
|
||||
concatScore.push(dataItem);
|
||||
}
|
||||
}
|
||||
|
||||
map.set(data.id, {
|
||||
...record,
|
||||
score: concatScore,
|
||||
rrfScore: record.rrfScore + score
|
||||
});
|
||||
} else {
|
||||
map.set(data.id, {
|
||||
...data,
|
||||
rrfScore: score
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// sort
|
||||
const mapArray = Array.from(map.values());
|
||||
const results = mapArray.sort((a, b) => b.rrfScore - a.rrfScore);
|
||||
|
||||
return results.map((item, index) => {
|
||||
item.score.push({
|
||||
type: SearchScoreTypeEnum.rrf,
|
||||
value: item.rrfScore,
|
||||
index
|
||||
});
|
||||
// @ts-ignore
|
||||
delete item.rrfScore;
|
||||
return item;
|
||||
});
|
||||
};
|
||||
|
||||
/* main step */
|
||||
// count limit
|
||||
@@ -681,7 +626,7 @@ export async function searchDatasetData(props: {
|
||||
})();
|
||||
|
||||
// embedding recall and fullText recall rrf concat
|
||||
const rrfConcatResults = rrfConcat([
|
||||
const rrfConcatResults = datasetSearchResultConcat([
|
||||
{ k: 60, list: embeddingRecallResults },
|
||||
{ k: 64, list: fullTextRecallResults },
|
||||
{ k: 60, list: reRankResults }
|
||||
@@ -709,9 +654,8 @@ export async function searchDatasetData(props: {
|
||||
});
|
||||
}
|
||||
if (searchMode === DatasetSearchModeEnum.embedding) {
|
||||
usingSimilarityFilter = true;
|
||||
return filterSameDataResults.filter((item) => {
|
||||
usingSimilarityFilter = true;
|
||||
|
||||
const embeddingScore = item.score.find(
|
||||
(item) => item.type === SearchScoreTypeEnum.embedding
|
||||
);
|
||||
|
@@ -14,6 +14,7 @@ import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api
|
||||
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
|
||||
import { lockTrainingDataByTeamId } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
|
||||
import { getLLMModel } from '../core/ai/model';
|
||||
|
||||
const reduceQueue = () => {
|
||||
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
|
||||
@@ -111,7 +112,7 @@ export async function generateQA(): Promise<any> {
|
||||
|
||||
try {
|
||||
const startTime = Date.now();
|
||||
const model = data.model ?? global.qaModels[0].model;
|
||||
const model = getLLMModel(data.model)?.model;
|
||||
const prompt = `${data.prompt || Prompt_AgentQA.description}
|
||||
${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
|
||||
|
||||
@@ -123,7 +124,9 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
|
||||
}
|
||||
];
|
||||
|
||||
const ai = getAIApi(undefined, 600000);
|
||||
const ai = getAIApi({
|
||||
timeout: 600000
|
||||
});
|
||||
const chatResponse = await ai.chat.completions.create({
|
||||
model,
|
||||
temperature: 0.3,
|
||||
|
@@ -8,8 +8,8 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { ModelTypeEnum, getCQModel } from '@/service/core/ai/model';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { getHistories } from '../utils';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
@@ -32,14 +32,14 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
const {
|
||||
user,
|
||||
histories,
|
||||
inputs: { model, history = 6, agents, userChatInput }
|
||||
params: { model, history = 6, agents, userChatInput }
|
||||
} = props as Props;
|
||||
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const cqModel = getCQModel(model);
|
||||
const cqModel = getLLMModel(model);
|
||||
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
@@ -64,7 +64,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
model: cqModel.model,
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.cq
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -86,8 +86,8 @@ async function toolChoice({
|
||||
user,
|
||||
cqModel,
|
||||
histories,
|
||||
inputs: { agents, systemPrompt, userChatInput }
|
||||
}: Props & { cqModel: FunctionModelItemType }) {
|
||||
params: { agents, systemPrompt, userChatInput }
|
||||
}: Props & { cqModel: LLMModelItemType }) {
|
||||
const messages: ChatItemType[] = [
|
||||
...histories,
|
||||
{
|
||||
@@ -112,7 +112,7 @@ ${systemPrompt}
|
||||
// function body
|
||||
const agentFunction = {
|
||||
name: agentFunName,
|
||||
description: '根据对话记录及补充的背景知识,对问题进行分类,并返回对应的类型字段',
|
||||
description: '根据对话记录及背景知识,对问题进行分类,并返回对应的类型字段',
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -127,7 +127,10 @@ ${systemPrompt}
|
||||
required: ['type']
|
||||
}
|
||||
};
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const response = await ai.chat.completions.create({
|
||||
model: cqModel.model,
|
||||
@@ -170,12 +173,12 @@ async function completions({
|
||||
cqModel,
|
||||
user,
|
||||
histories,
|
||||
inputs: { agents, systemPrompt = '', userChatInput }
|
||||
}: Props & { cqModel: FunctionModelItemType }) {
|
||||
params: { agents, systemPrompt = '', userChatInput }
|
||||
}: Props & { cqModel: LLMModelItemType }) {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
|
||||
value: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
|
||||
systemPrompt: systemPrompt || 'null',
|
||||
typeList: agents
|
||||
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
|
||||
@@ -186,7 +189,10 @@ async function completions({
|
||||
}
|
||||
];
|
||||
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const data = await ai.chat.completions.create({
|
||||
model: cqModel.model,
|
||||
|
@@ -8,9 +8,9 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getHistories } from '../utils';
|
||||
import { ModelTypeEnum, getExtractModel } from '@/service/core/ai/model';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
@@ -33,14 +33,14 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const {
|
||||
user,
|
||||
histories,
|
||||
inputs: { content, history = 6, model, description, extractKeys }
|
||||
params: { content, history = 6, model, description, extractKeys }
|
||||
} = props;
|
||||
|
||||
if (!content) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const extractModel = getExtractModel(model);
|
||||
const extractModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const { arg, inputTokens, outputTokens } = await (async () => {
|
||||
@@ -84,7 +84,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
model: extractModel.model,
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.extract
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -109,8 +109,8 @@ async function toolChoice({
|
||||
extractModel,
|
||||
user,
|
||||
histories,
|
||||
inputs: { content, extractKeys, description }
|
||||
}: Props & { extractModel: FunctionModelItemType }) {
|
||||
params: { content, extractKeys, description }
|
||||
}: Props & { extractModel: LLMModelItemType }) {
|
||||
const messages: ChatItemType[] = [
|
||||
...histories,
|
||||
{
|
||||
@@ -162,7 +162,10 @@ ${description || '根据用户要求获取适当的 JSON 字符串。'}
|
||||
}
|
||||
};
|
||||
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const response = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
@@ -202,12 +205,12 @@ async function completions({
|
||||
extractModel,
|
||||
user,
|
||||
histories,
|
||||
inputs: { content, extractKeys, description }
|
||||
}: Props & { extractModel: FunctionModelItemType }) {
|
||||
params: { content, extractKeys, description }
|
||||
}: Props & { extractModel: LLMModelItemType }) {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: replaceVariable(extractModel.functionPrompt || Prompt_ExtractJson, {
|
||||
value: replaceVariable(extractModel.customExtractPrompt || Prompt_ExtractJson, {
|
||||
description,
|
||||
json: extractKeys
|
||||
.map(
|
||||
@@ -223,7 +226,10 @@ Human: ${content}`
|
||||
}
|
||||
];
|
||||
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const data = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
|
@@ -7,7 +7,7 @@ import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
import type { ChatModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { postTextCensor } from '@/service/common/censor';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
@@ -18,11 +18,12 @@ import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d'
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import { getLLMModel, ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { getHistories } from '../utils';
|
||||
import { filterSearchResultsByMaxChars } from '@fastgpt/global/core/dataset/search/utils';
|
||||
|
||||
export type ChatProps = ModuleDispatchProps<
|
||||
AIChatModuleProps & {
|
||||
@@ -46,7 +47,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
user,
|
||||
histories,
|
||||
outputs,
|
||||
inputs: {
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
maxToken = 4000,
|
||||
@@ -68,7 +69,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
// temperature adapt
|
||||
const modelConstantsData = getChatModel(model);
|
||||
const modelConstantsData = getLLMModel(model);
|
||||
|
||||
if (!modelConstantsData) {
|
||||
return Promise.reject('The chat model is undefined, you need to select a chat model.');
|
||||
@@ -107,7 +108,10 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
// FastGPT temperature range: 1~10
|
||||
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
|
||||
temperature = Math.max(temperature, 0.01);
|
||||
const ai = getAIApi(user.openaiAccount, 480000);
|
||||
const ai = getAIApi({
|
||||
userKey: user.openaiAccount,
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const concatMessages = [
|
||||
...(modelConstantsData.defaultSystemChatPrompt
|
||||
@@ -134,14 +138,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
|
||||
const response = await ai.chat.completions.create(
|
||||
{
|
||||
model,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
...modelConstantsData?.defaultConfig,
|
||||
model: modelConstantsData.model,
|
||||
temperature,
|
||||
max_tokens,
|
||||
stream,
|
||||
presence_penalty: 0,
|
||||
frequency_penalty: 0,
|
||||
top_p: 1,
|
||||
// seed: temperature < 0.3 ? 1 : undefined,
|
||||
messages: concatMessages
|
||||
},
|
||||
{
|
||||
@@ -204,7 +207,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
model,
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.chat
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
@@ -229,8 +232,8 @@ function filterQuote({
|
||||
model,
|
||||
quoteTemplate
|
||||
}: {
|
||||
quoteQA: ChatProps['inputs']['quoteQA'];
|
||||
model: ChatModelItemType;
|
||||
quoteQA: ChatProps['params']['quoteQA'];
|
||||
model: LLMModelItemType;
|
||||
quoteTemplate?: string;
|
||||
}) {
|
||||
function getValue(item: SearchDataResponseItemType, index: number) {
|
||||
@@ -243,16 +246,8 @@ function filterQuote({
|
||||
});
|
||||
}
|
||||
|
||||
const sliceResult = sliceMessagesTB({
|
||||
maxTokens: model.quoteMaxToken,
|
||||
messages: quoteQA.map((item, index) => ({
|
||||
obj: ChatRoleEnum.System,
|
||||
value: getValue(item, index).trim()
|
||||
}))
|
||||
});
|
||||
|
||||
// slice filterSearch
|
||||
const filterQuoteQA = quoteQA.slice(0, sliceResult.length);
|
||||
const filterQuoteQA = filterSearchResultsByMaxChars(quoteQA, model.quoteMaxToken);
|
||||
|
||||
// filterQuoteQA按collectionId聚合在一起后,再按chunkIndex从小到大排序
|
||||
const sortQuoteQAMap: Record<string, SearchDataResponseItemType[]> = {};
|
||||
@@ -263,21 +258,21 @@ function filterQuote({
|
||||
sortQuoteQAMap[item.collectionId] = [item];
|
||||
}
|
||||
});
|
||||
const sortQuoteQAList = Object.values(sortQuoteQAMap).flat();
|
||||
sortQuoteQAList.sort((a, b) => {
|
||||
if (a.collectionId === b.collectionId) {
|
||||
return a.chunkIndex - b.chunkIndex;
|
||||
}
|
||||
return 0;
|
||||
const sortQuoteQAList = Object.values(sortQuoteQAMap);
|
||||
|
||||
sortQuoteQAList.forEach((qaList) => {
|
||||
qaList.sort((a, b) => a.chunkIndex - b.chunkIndex);
|
||||
});
|
||||
|
||||
const flatQuoteList = sortQuoteQAList.flat();
|
||||
|
||||
const quoteText =
|
||||
sortQuoteQAList.length > 0
|
||||
? `${sortQuoteQAList.map((item, index) => getValue(item, index)).join('\n')}`
|
||||
flatQuoteList.length > 0
|
||||
? `${flatQuoteList.map((item, index) => getValue(item, index)).join('\n')}`
|
||||
: '';
|
||||
|
||||
return {
|
||||
filterQuoteQA: sortQuoteQAList,
|
||||
filterQuoteQA: flatQuoteList,
|
||||
quoteText
|
||||
};
|
||||
}
|
||||
@@ -294,7 +289,7 @@ function getChatMessages({
|
||||
histories: ChatItemType[];
|
||||
systemPrompt: string;
|
||||
userChatInput: string;
|
||||
model: ChatModelItemType;
|
||||
model: LLMModelItemType;
|
||||
}) {
|
||||
const question = quoteText
|
||||
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
|
||||
@@ -337,9 +332,10 @@ function getMaxTokens({
|
||||
filterMessages = []
|
||||
}: {
|
||||
maxToken: number;
|
||||
model: ChatModelItemType;
|
||||
model: LLMModelItemType;
|
||||
filterMessages: ChatItemType[];
|
||||
}) {
|
||||
maxToken = Math.min(maxToken, model.maxResponse);
|
||||
const tokensLimit = model.maxContext;
|
||||
|
||||
/* count response max token */
|
||||
@@ -407,7 +403,7 @@ async function streamResponse({
|
||||
}
|
||||
|
||||
if (!answer) {
|
||||
return Promise.reject('core.chat API is error or undefined');
|
||||
return Promise.reject('core.chat.Chat API is error or undefined');
|
||||
}
|
||||
|
||||
return { answer };
|
||||
|
35
projects/app/src/service/moduleDispatch/dataset/concat.ts
Normal file
35
projects/app/src/service/moduleDispatch/dataset/concat.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
|
||||
import { filterSearchResultsByMaxChars } from '@fastgpt/global/core/dataset/search/utils';
|
||||
|
||||
type DatasetConcatProps = ModuleDispatchProps<
|
||||
{
|
||||
[ModuleInputKeyEnum.datasetMaxTokens]: number;
|
||||
} & { [key: string]: SearchDataResponseItemType[] }
|
||||
>;
|
||||
type DatasetConcatResponse = {
|
||||
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
|
||||
};
|
||||
|
||||
export async function dispatchDatasetConcat(
|
||||
props: DatasetConcatProps
|
||||
): Promise<DatasetConcatResponse> {
|
||||
const {
|
||||
params: { limit = 1500, ...quoteMap }
|
||||
} = props as DatasetConcatProps;
|
||||
|
||||
const quoteList = Object.values(quoteMap).filter((list) => Array.isArray(list));
|
||||
|
||||
const rrfConcatResults = datasetSearchResultConcat(
|
||||
quoteList.map((list) => ({
|
||||
k: 60,
|
||||
list
|
||||
}))
|
||||
);
|
||||
|
||||
return {
|
||||
[ModuleOutputKeyEnum.datasetQuoteQA]: filterSearchResultsByMaxChars(rrfConcatResults, limit)
|
||||
};
|
||||
}
|
@@ -3,7 +3,7 @@ import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import { ModelTypeEnum, getVectorModel } from '@/service/core/ai/model';
|
||||
import { searchDatasetData } from '@/service/core/dataset/data/controller';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
@@ -11,7 +11,7 @@ import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
|
||||
[ModuleInputKeyEnum.datasetSimilarity]: number;
|
||||
[ModuleInputKeyEnum.datasetLimit]: number;
|
||||
[ModuleInputKeyEnum.datasetMaxTokens]: number;
|
||||
[ModuleInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
||||
[ModuleInputKeyEnum.userChatInput]: string;
|
||||
[ModuleInputKeyEnum.datasetSearchUsingReRank]: boolean;
|
||||
@@ -28,7 +28,7 @@ export async function dispatchDatasetSearch(
|
||||
): Promise<DatasetSearchResponse> {
|
||||
const {
|
||||
teamId,
|
||||
inputs: { datasets = [], similarity, limit = 1500, usingReRank, searchMode, userChatInput }
|
||||
params: { datasets = [], similarity, limit = 1500, usingReRank, searchMode, userChatInput }
|
||||
} = props as DatasetSearchProps;
|
||||
|
||||
if (!Array.isArray(datasets)) {
|
||||
@@ -44,11 +44,11 @@ export async function dispatchDatasetSearch(
|
||||
}
|
||||
|
||||
// get vector
|
||||
const vectorModel = datasets[0]?.vectorModel || global.vectorModels[0];
|
||||
const vectorModel = getVectorModel(datasets[0]?.vectorModel?.model);
|
||||
|
||||
// const { queries: extensionQueries } = await searchQueryExtension({
|
||||
// query: userChatInput,
|
||||
// model: global.chatModels[0].model
|
||||
// model: global.llmModels[0].model
|
||||
// });
|
||||
const concatQueries = [userChatInput];
|
||||
|
||||
|
@@ -16,6 +16,7 @@ import { dispatchHistory } from './init/history';
|
||||
import { dispatchChatInput } from './init/userChatInput';
|
||||
import { dispatchChatCompletion } from './chat/oneapi';
|
||||
import { dispatchDatasetSearch } from './dataset/search';
|
||||
import { dispatchDatasetConcat } from './dataset/concat';
|
||||
import { dispatchAnswer } from './tools/answer';
|
||||
import { dispatchClassifyQuestion } from './agent/classifyQuestion';
|
||||
import { dispatchContentExtract } from './agent/extract';
|
||||
@@ -25,6 +26,7 @@ import { dispatchCFR } from './tools/cfr';
|
||||
import { dispatchRunPlugin } from './plugin/run';
|
||||
import { dispatchPluginInput } from './plugin/runInput';
|
||||
import { dispatchPluginOutput } from './plugin/runOutput';
|
||||
import { valueTypeFormat } from './utils';
|
||||
|
||||
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
|
||||
@@ -32,6 +34,7 @@ const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.answerNode]: dispatchAnswer,
|
||||
[FlowNodeTypeEnum.chatNode]: dispatchChatCompletion,
|
||||
[FlowNodeTypeEnum.datasetSearchNode]: dispatchDatasetSearch,
|
||||
[FlowNodeTypeEnum.datasetConcatNode]: dispatchDatasetConcat,
|
||||
[FlowNodeTypeEnum.classifyQuestion]: dispatchClassifyQuestion,
|
||||
[FlowNodeTypeEnum.contentExtract]: dispatchContentExtract,
|
||||
[FlowNodeTypeEnum.httpRequest]: dispatchHttpRequest,
|
||||
@@ -126,7 +129,6 @@ export async function dispatchModules({
|
||||
): Promise<any> {
|
||||
pushStore(module, result);
|
||||
|
||||
//
|
||||
const nextRunModules: RunningModuleItemType[] = [];
|
||||
|
||||
// Assign the output value to the next module
|
||||
@@ -163,6 +165,7 @@ export async function dispatchModules({
|
||||
return Promise.all(
|
||||
modules.map((module) => {
|
||||
if (!module.inputs.find((item: any) => item.value === undefined)) {
|
||||
// remove switch
|
||||
moduleInput(module, { [ModuleInputKeyEnum.switch]: undefined });
|
||||
return moduleRun(module);
|
||||
}
|
||||
@@ -182,9 +185,10 @@ export async function dispatchModules({
|
||||
|
||||
// get module running params
|
||||
const params: Record<string, any> = {};
|
||||
module.inputs.forEach((item: any) => {
|
||||
params[item.key] = item.value;
|
||||
module.inputs.forEach((item) => {
|
||||
params[item.key] = valueTypeFormat(item.value, item.valueType);
|
||||
});
|
||||
|
||||
const dispatchData: ModuleDispatchProps<Record<string, any>> = {
|
||||
...props,
|
||||
res,
|
||||
@@ -194,7 +198,8 @@ export async function dispatchModules({
|
||||
stream,
|
||||
detail,
|
||||
outputs: module.outputs,
|
||||
inputs: params
|
||||
inputs: module.inputs,
|
||||
params
|
||||
};
|
||||
|
||||
// run module
|
||||
@@ -286,19 +291,13 @@ function loadModules(
|
||||
item.value !== undefined
|
||||
) // filter unconnected target input
|
||||
.map((item) => {
|
||||
if (typeof item.value !== 'string') {
|
||||
return {
|
||||
key: item.key,
|
||||
value: item.value
|
||||
};
|
||||
}
|
||||
|
||||
// variables replace
|
||||
const replacedVal = replaceVariable(item.value, variables);
|
||||
const replace = ['string'].includes(typeof item.value);
|
||||
|
||||
return {
|
||||
key: item.key,
|
||||
value: replacedVal
|
||||
// variables replace
|
||||
value: replace ? replaceVariable(item.value, variables) : item.value,
|
||||
valueType: item.valueType
|
||||
};
|
||||
}),
|
||||
outputs: module.outputs
|
||||
@@ -306,6 +305,7 @@ function loadModules(
|
||||
key: item.key,
|
||||
answer: item.key === ModuleOutputKeyEnum.answerText,
|
||||
value: undefined,
|
||||
valueType: item.valueType,
|
||||
targets: item.targets
|
||||
}))
|
||||
.sort((a, b) => {
|
||||
|
@@ -10,7 +10,7 @@ export type HistoryProps = ModuleDispatchProps<{
|
||||
export const dispatchHistory = (props: Record<string, any>) => {
|
||||
const {
|
||||
histories,
|
||||
inputs: { maxContext }
|
||||
params: { maxContext }
|
||||
} = props as HistoryProps;
|
||||
|
||||
return {
|
||||
|
@@ -6,7 +6,7 @@ export type UserChatInputProps = ModuleDispatchProps<{
|
||||
|
||||
export const dispatchChatInput = (props: Record<string, any>) => {
|
||||
const {
|
||||
inputs: { userChatInput }
|
||||
params: { userChatInput }
|
||||
} = props as UserChatInputProps;
|
||||
return {
|
||||
userChatInput
|
||||
|
@@ -24,11 +24,11 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
mode,
|
||||
teamId,
|
||||
tmbId,
|
||||
inputs: { pluginId, ...data }
|
||||
params: { pluginId, ...data }
|
||||
} = props;
|
||||
|
||||
if (!pluginId) {
|
||||
return Promise.reject('Input is empty');
|
||||
return Promise.reject('pluginId can not find');
|
||||
}
|
||||
|
||||
await authPluginCanUse({ id: pluginId, teamId, tmbId });
|
||||
@@ -72,7 +72,6 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
if (output) {
|
||||
output.moduleLogo = plugin.avatar;
|
||||
}
|
||||
console.log(responseData.length);
|
||||
|
||||
return {
|
||||
answerText,
|
||||
|
@@ -5,7 +5,7 @@ export type PluginInputProps = ModuleDispatchProps<{
|
||||
}>;
|
||||
|
||||
export const dispatchPluginInput = (props: PluginInputProps) => {
|
||||
const { inputs } = props;
|
||||
const { params } = props;
|
||||
|
||||
return inputs;
|
||||
return params;
|
||||
};
|
||||
|
@@ -10,12 +10,12 @@ export type PluginOutputResponse = {
|
||||
};
|
||||
|
||||
export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResponse => {
|
||||
const { inputs } = props;
|
||||
const { params } = props;
|
||||
|
||||
return {
|
||||
responseData: {
|
||||
price: 0,
|
||||
pluginOutput: inputs
|
||||
pluginOutput: params
|
||||
}
|
||||
};
|
||||
};
|
||||
|
@@ -15,7 +15,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
||||
res,
|
||||
detail,
|
||||
stream,
|
||||
inputs: { text = '' }
|
||||
params: { text = '' }
|
||||
} = props as AnswerProps;
|
||||
|
||||
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
|
||||
|
@@ -4,7 +4,7 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
|
||||
import { getHistories } from '../utils';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { ModelTypeEnum, getExtractModel } from '@/service/core/ai/model';
|
||||
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
|
||||
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
@@ -20,7 +20,7 @@ type Response = {
|
||||
|
||||
export const dispatchCFR = async ({
|
||||
histories,
|
||||
inputs: { model, systemPrompt, history, userChatInput }
|
||||
params: { model, systemPrompt, history, userChatInput }
|
||||
}: Props): Promise<Response> => {
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Question is empty');
|
||||
@@ -34,7 +34,7 @@ export const dispatchCFR = async ({
|
||||
};
|
||||
}
|
||||
|
||||
const extractModel = getExtractModel(model);
|
||||
const extractModel = getLLMModel(model);
|
||||
const chatHistories = getHistories(history, histories);
|
||||
|
||||
const systemFewShot = systemPrompt
|
||||
@@ -51,7 +51,9 @@ A: ${systemPrompt}
|
||||
|
||||
const concatFewShot = `${systemFewShot}${historyFewShot}`.trim();
|
||||
|
||||
const ai = getAIApi(undefined, 480000);
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const result = await ai.chat.completions.create({
|
||||
model: extractModel.model,
|
||||
@@ -85,7 +87,7 @@ A: ${systemPrompt}
|
||||
model: extractModel.model,
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
type: ModelTypeEnum.extract
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
return {
|
||||
|
@@ -2,8 +2,8 @@ import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
import axios from 'axios';
|
||||
import { flatDynamicParams } from '../utils';
|
||||
import { isIPv6 } from 'net';
|
||||
import { flatDynamicParams, valueTypeFormat } from '../utils';
|
||||
import { SERVICE_LOCAL_HOST } from '@fastgpt/service/common/system/tools';
|
||||
|
||||
export type HttpRequestProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.abandon_httpUrl]: string;
|
||||
@@ -24,7 +24,8 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
variables,
|
||||
inputs: {
|
||||
outputs,
|
||||
params: {
|
||||
system_httpMethod: httpMethod,
|
||||
url: abandonUrl,
|
||||
system_httpReqUrl: httpReqUrl,
|
||||
@@ -96,13 +97,21 @@ export const dispatchHttpRequest = async (props: HttpRequestProps): Promise<Http
|
||||
query: requestQuery
|
||||
});
|
||||
|
||||
// format output value type
|
||||
const results: Record<string, any> = {};
|
||||
for (const key in response) {
|
||||
const output = outputs.find((item) => item.key === key);
|
||||
if (!output) continue;
|
||||
results[key] = valueTypeFormat(response[key], output.valueType);
|
||||
}
|
||||
|
||||
return {
|
||||
responseData: {
|
||||
price: 0,
|
||||
body: formatBody,
|
||||
httpResult: response
|
||||
},
|
||||
...response
|
||||
...results
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
@@ -133,12 +142,7 @@ async function fetchData({
|
||||
}): Promise<Record<string, any>> {
|
||||
const { data: response } = await axios<Record<string, any>>({
|
||||
method,
|
||||
baseURL: `http://${
|
||||
process.env.HOSTNAME && isIPv6(process.env.HOSTNAME)
|
||||
? `[${process.env.HOSTNAME}]:${process.env.PORT || 3000}`
|
||||
: `${process.env.HOSTNAME || 'localhost'}:${process.env.PORT || 3000}`
|
||||
}`,
|
||||
|
||||
baseURL: `http://${SERVICE_LOCAL_HOST}`,
|
||||
url,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
@@ -154,33 +158,68 @@ async function fetchData({
|
||||
user: {
|
||||
name: 'xxx',
|
||||
age: 12
|
||||
}
|
||||
},
|
||||
list: [
|
||||
{
|
||||
name: 'xxx',
|
||||
age: 50
|
||||
},
|
||||
[{ test: 22 }]
|
||||
],
|
||||
psw: 'xxx'
|
||||
}
|
||||
|
||||
result: {
|
||||
'user': {
|
||||
name: 'xxx',
|
||||
age: 12
|
||||
},
|
||||
'user': { name: 'xxx', age: 12 },
|
||||
'user.name': 'xxx',
|
||||
'user.age': 12,
|
||||
'list': [ { name: 'xxx', age: 50 }, [ [Object] ] ],
|
||||
'list[0]': { name: 'xxx', age: 50 },
|
||||
'list[0].name': 'xxx',
|
||||
'list[0].age': 50,
|
||||
'list[1]': [ { test: 22 } ],
|
||||
'list[1][0]': { test: 22 },
|
||||
'list[1][0].test': 22,
|
||||
'psw': 'xxx'
|
||||
}
|
||||
*/
|
||||
const parseJson = (obj: Record<string, any>, prefix = '') => {
|
||||
let result: Record<string, any> = {};
|
||||
for (const key in obj) {
|
||||
if (typeof obj[key] === 'object') {
|
||||
result[key] = obj[key];
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[key], `${prefix}${key}.`)
|
||||
};
|
||||
} else {
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
for (let i = 0; i < obj.length; i++) {
|
||||
result[`${prefix}[${i}]`] = obj[i];
|
||||
|
||||
if (Array.isArray(obj[i])) {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[i], `${prefix}[${i}]`)
|
||||
};
|
||||
} else if (typeof obj[i] === 'object') {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[i], `${prefix}[${i}].`)
|
||||
};
|
||||
}
|
||||
}
|
||||
} else if (typeof obj == 'object') {
|
||||
for (const key in obj) {
|
||||
result[`${prefix}${key}`] = obj[key];
|
||||
|
||||
if (Array.isArray(obj[key])) {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[key], `${prefix}${key}`)
|
||||
};
|
||||
} else if (typeof obj[key] === 'object') {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[key], `${prefix}${key}.`)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
@@ -28,7 +28,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
stream,
|
||||
detail,
|
||||
histories,
|
||||
inputs: { userChatInput, history, app }
|
||||
params: { userChatInput, history, app }
|
||||
} = props;
|
||||
|
||||
if (!userChatInput) {
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { DYNAMIC_INPUT_KEY } from '@fastgpt/global/core/module/constants';
|
||||
import { DYNAMIC_INPUT_KEY, ModuleIOValueTypeEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
|
||||
if (!history) return [];
|
||||
@@ -18,3 +18,17 @@ export const flatDynamicParams = (params: Record<string, any>) => {
|
||||
[DYNAMIC_INPUT_KEY]: undefined
|
||||
};
|
||||
};
|
||||
|
||||
/* value type format */
|
||||
export const valueTypeFormat = (value: any, type?: `${ModuleIOValueTypeEnum}`) => {
|
||||
if (value === undefined) return;
|
||||
|
||||
if (type === 'string') {
|
||||
if (typeof value !== 'object') return String(value);
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
if (type === 'number') return Number(value);
|
||||
if (type === 'boolean') return Boolean(value);
|
||||
|
||||
return value;
|
||||
};
|
||||
|
@@ -8,6 +8,7 @@ import { exit } from 'process';
|
||||
import { initVectorStore } from '@fastgpt/service/common/vectorStore/controller';
|
||||
import { getInitConfig } from '@/pages/api/common/system/getInitData';
|
||||
import { startCron } from './common/system/cron';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
|
||||
/**
|
||||
* connect MongoDB and init data
|
||||
@@ -39,23 +40,30 @@ async function initRootUser() {
|
||||
|
||||
let rootId = rootUser?._id || '';
|
||||
|
||||
// init root user
|
||||
if (rootUser) {
|
||||
await MongoUser.findOneAndUpdate(
|
||||
{ username: 'root' },
|
||||
{
|
||||
password: hashStr(psw)
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const { _id } = await MongoUser.create({
|
||||
username: 'root',
|
||||
password: hashStr(psw)
|
||||
});
|
||||
rootId = _id;
|
||||
}
|
||||
// init root team
|
||||
await createDefaultTeam({ userId: rootId, maxSize: 1, balance: 9999 * PRICE_SCALE });
|
||||
await mongoSessionRun(async (session) => {
|
||||
// init root user
|
||||
if (rootUser) {
|
||||
await MongoUser.findOneAndUpdate(
|
||||
{ username: 'root' },
|
||||
{
|
||||
password: hashStr(psw)
|
||||
}
|
||||
);
|
||||
} else {
|
||||
const [{ _id }] = await MongoUser.create(
|
||||
[
|
||||
{
|
||||
username: 'root',
|
||||
password: hashStr(psw)
|
||||
}
|
||||
],
|
||||
{ session }
|
||||
);
|
||||
rootId = _id;
|
||||
}
|
||||
// init root team
|
||||
await createDefaultTeam({ userId: rootId, maxSize: 1, balance: 9999 * PRICE_SCALE, session });
|
||||
});
|
||||
|
||||
console.log(`root user init:`, {
|
||||
username: 'root',
|
||||
|
@@ -66,7 +66,7 @@ export const pushQABill = async ({
|
||||
const { total } = formatModelPrice2Store({
|
||||
model,
|
||||
inputLen: charsLength,
|
||||
type: ModelTypeEnum.qa
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
concatBill({
|
||||
@@ -143,12 +143,12 @@ export const pushQuestionGuideBill = ({
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
}) => {
|
||||
const qgModel = global.qgModels[0];
|
||||
const qgModel = global.llmModels[0];
|
||||
const { total, modelName } = formatModelPrice2Store({
|
||||
inputLen: inputTokens,
|
||||
outputLen: outputTokens,
|
||||
model: qgModel.model,
|
||||
type: ModelTypeEnum.qg
|
||||
type: ModelTypeEnum.llm
|
||||
});
|
||||
|
||||
createBill({
|
||||
|
1
projects/app/src/service/support/wallet/sub/utils.ts
Normal file
1
projects/app/src/service/support/wallet/sub/utils.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const getStandardSubPlan = () => global.subPlans?.standard;
|
Reference in New Issue
Block a user