mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
v4.5.2 (#439)
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import { AppModuleItemType } from '@/types/app';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
|
||||
export const getChatModelNameListByModules = (modules: AppModuleItemType[]): string[] => {
|
||||
const chatModules = modules.filter((item) => item.flowType === FlowModuleTypeEnum.chatNode);
|
||||
export const getChatModelNameListByModules = (modules: ModuleItemType[]): string[] => {
|
||||
const chatModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.chatNode);
|
||||
return chatModules
|
||||
.map((item) => {
|
||||
const model = item.inputs.find((input) => input.key === 'model')?.value;
|
||||
|
@@ -11,6 +11,7 @@ import { splitText2Chunks } from '@/global/common/string/tools';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { Prompt_AgentQA } from '@/global/core/prompt/agent';
|
||||
import { pushDataToDatasetCollection } from '@/pages/api/core/dataset/data/pushData';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
const reduceQueue = () => {
|
||||
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
|
||||
@@ -116,7 +117,8 @@ export async function generateQA(): Promise<any> {
|
||||
console.log('openai error: 生成QA错误');
|
||||
console.log(err.response?.status, err.response?.statusText, err.response?.data);
|
||||
} else {
|
||||
addLog.error('生成 QA 错误', err);
|
||||
console.log(err);
|
||||
addLog.error(getErrText(err, '生成 QA 错误'));
|
||||
}
|
||||
|
||||
// message error or openai account error
|
||||
|
@@ -90,6 +90,7 @@ export async function generateVector(): Promise<any> {
|
||||
data: err.response?.data
|
||||
});
|
||||
} else {
|
||||
console.log(err);
|
||||
addLog.error(getErrText(err, '生成向量错误'));
|
||||
}
|
||||
|
||||
|
@@ -1,19 +0,0 @@
|
||||
import { connectionMongo, type Model } from '@fastgpt/service/common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { CollectionSchema as CollectionType } from '@/types/mongoSchema';
|
||||
|
||||
const CollectionSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: true
|
||||
},
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'model',
|
||||
required: true
|
||||
}
|
||||
});
|
||||
|
||||
export const Collection: Model<CollectionType> =
|
||||
models['collection'] || model('collection', CollectionSchema);
|
@@ -1,16 +0,0 @@
|
||||
import { connectionMongo, type Model } from '@fastgpt/service/common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
|
||||
const ImageSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: true
|
||||
},
|
||||
binary: {
|
||||
type: Buffer
|
||||
}
|
||||
});
|
||||
|
||||
export const Image: Model<{ userId: string; binary: Buffer }> =
|
||||
models['image'] || model('image', ImageSchema);
|
@@ -1,41 +0,0 @@
|
||||
import { connectionMongo, type Model } from '@fastgpt/service/common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { informSchema } from '@/types/mongoSchema';
|
||||
import { InformTypeMap } from '@/constants/user';
|
||||
|
||||
const InformSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: true
|
||||
},
|
||||
time: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
enum: Object.keys(InformTypeMap)
|
||||
},
|
||||
title: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
content: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
read: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
InformSchema.index({ time: -1 });
|
||||
InformSchema.index({ userId: 1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const Inform: Model<informSchema> = models['inform'] || model('inform', InformSchema);
|
@@ -1,30 +0,0 @@
|
||||
import { connectionMongo, type Model } from '@fastgpt/service/common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { PaySchema as PayType } from '@/types/mongoSchema';
|
||||
const PaySchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: true
|
||||
},
|
||||
createTime: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
},
|
||||
price: {
|
||||
type: Number,
|
||||
required: true
|
||||
},
|
||||
orderId: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
status: {
|
||||
// 支付的状态
|
||||
type: String,
|
||||
default: 'NOTPAY',
|
||||
enum: ['SUCCESS', 'REFUND', 'NOTPAY', 'CLOSED']
|
||||
}
|
||||
});
|
||||
|
||||
export const Pay: Model<PayType> = models['pay'] || model('pay', PaySchema);
|
@@ -1,32 +0,0 @@
|
||||
import { connectionMongo, type Model } from '@fastgpt/service/common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { PromotionRecordSchema as PromotionRecordType } from '@/types/mongoSchema';
|
||||
|
||||
const PromotionRecordSchema = new Schema({
|
||||
userId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: true
|
||||
},
|
||||
objUId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: 'user',
|
||||
required: false
|
||||
},
|
||||
createTime: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
},
|
||||
type: {
|
||||
type: String,
|
||||
required: true,
|
||||
enum: ['pay', 'register']
|
||||
},
|
||||
amount: {
|
||||
type: Number,
|
||||
required: true
|
||||
}
|
||||
});
|
||||
|
||||
export const promotionRecord: Model<PromotionRecordType> =
|
||||
models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema);
|
@@ -1,12 +1,11 @@
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ClassifyQuestionAgentItemType } from '@/types/app';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { SpecialInputKeyEnum } from '@/constants/flow';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import { FlowNodeSpecialInputKeyEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { Prompt_CQJson } from '@/global/core/prompt/agent';
|
||||
@@ -18,10 +17,10 @@ type Props = ModuleDispatchProps<{
|
||||
systemPrompt?: string;
|
||||
history?: ChatItemType[];
|
||||
[SystemInputEnum.userChatInput]: string;
|
||||
[SpecialInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
|
||||
[FlowNodeSpecialInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
|
||||
}>;
|
||||
type CQResponse = {
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
@@ -30,7 +29,6 @@ const agentFunName = 'agent_user_question';
|
||||
/* request openai chat */
|
||||
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
|
||||
const {
|
||||
moduleName,
|
||||
user,
|
||||
inputs: { model, agents, userChatInput }
|
||||
} = props as Props;
|
||||
@@ -59,8 +57,6 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
return {
|
||||
[result.key]: 1,
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.classifyQuestion,
|
||||
moduleName,
|
||||
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
|
||||
model: cqModel.name || '',
|
||||
tokens,
|
||||
|
@@ -1,11 +1,10 @@
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import type { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
import type { ContextExtractAgentItemType } from '@/types/app';
|
||||
import { ContextExtractEnum } from '@/constants/flow/flowField';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
@@ -21,14 +20,13 @@ type Response = {
|
||||
[ContextExtractEnum.success]?: boolean;
|
||||
[ContextExtractEnum.failed]?: boolean;
|
||||
[ContextExtractEnum.fields]: string;
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
};
|
||||
|
||||
const agentFunName = 'agent_extract_data';
|
||||
|
||||
export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const {
|
||||
moduleName,
|
||||
user,
|
||||
inputs: { content, description, extractKeys }
|
||||
} = props;
|
||||
@@ -77,8 +75,6 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[ContextExtractEnum.fields]: JSON.stringify(arg),
|
||||
...arg,
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.contentExtract,
|
||||
moduleName,
|
||||
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
|
||||
model: extractModel.name || '',
|
||||
tokens,
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatItemType } from '@/types/chat';
|
||||
import type { ChatHistoryItemResType } from '@/types/chat';
|
||||
import type { ChatItemType, moduleDispatchResType } from '@/types/chat';
|
||||
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
@@ -11,13 +10,12 @@ import { countModelPrice } from '@/service/common/bill/push';
|
||||
import { ChatModelItemType } from '@/types/model';
|
||||
import { postTextCensor } from '@/web/common/plusApi/censor';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constant';
|
||||
import { AppModuleItemType } from '@/types/app';
|
||||
import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
|
||||
import { countMessagesTokens, sliceMessagesTB } from '@/global/common/tiktoken';
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
|
||||
import type { AIChatProps } from '@/types/core/aiChat';
|
||||
import { replaceVariable } from '@/global/common/string/tools';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
|
||||
@@ -33,7 +31,7 @@ export type ChatProps = ModuleDispatchProps<
|
||||
>;
|
||||
export type ChatResponse = {
|
||||
[TaskResponseKeyEnum.answerText]: string;
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
[TaskResponseKeyEnum.history]: ChatItemType[];
|
||||
};
|
||||
|
||||
@@ -41,7 +39,6 @@ export type ChatResponse = {
|
||||
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
|
||||
let {
|
||||
res,
|
||||
moduleName,
|
||||
stream = false,
|
||||
detail = false,
|
||||
user,
|
||||
@@ -180,8 +177,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
return {
|
||||
[TaskResponseKeyEnum.answerText]: answerText,
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.chatNode,
|
||||
moduleName,
|
||||
price: user.openaiAccount?.key
|
||||
? 0
|
||||
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
|
||||
@@ -205,17 +200,21 @@ function filterQuote({
|
||||
model: ChatModelItemType;
|
||||
quoteTemplate?: string;
|
||||
}) {
|
||||
function getValue(item: SearchDataResponseItemType, index: number) {
|
||||
return replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
source: item.sourceName,
|
||||
sourceId: String(item.sourceId || 'UnKnow'),
|
||||
index: index + 1,
|
||||
score: item.score.toFixed(4)
|
||||
});
|
||||
}
|
||||
const sliceResult = sliceMessagesTB({
|
||||
maxTokens: model.quoteMaxToken,
|
||||
messages: quoteQA.map((item, index) => ({
|
||||
obj: ChatRoleEnum.System,
|
||||
value: replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
source: item.sourceName,
|
||||
sourceId: item.sourceId || 'UnKnow',
|
||||
index: index + 1
|
||||
})
|
||||
value: getValue(item, index)
|
||||
}))
|
||||
});
|
||||
|
||||
@@ -224,17 +223,7 @@ function filterQuote({
|
||||
|
||||
const quoteText =
|
||||
filterQuoteQA.length > 0
|
||||
? `${filterQuoteQA
|
||||
.map((item, index) =>
|
||||
replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
source: item.sourceName,
|
||||
sourceId: item.sourceId || 'UnKnow',
|
||||
index: index + 1
|
||||
})
|
||||
)
|
||||
.join('\n')}`
|
||||
? `${filterQuoteQA.map((item, index) => getValue(item, index)).join('\n')}`
|
||||
: '';
|
||||
|
||||
return {
|
||||
@@ -330,7 +319,7 @@ function targetResponse({
|
||||
detail
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
outputs: AppModuleItemType['outputs'];
|
||||
outputs: ModuleItemType['outputs'];
|
||||
detail: boolean;
|
||||
}) {
|
||||
const targets =
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { PgClient } from '@/service/pg';
|
||||
import type { ChatHistoryItemResType } from '@/types/chat';
|
||||
import type { moduleDispatchResType } from '@/types/chat';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
||||
import { countModelPrice } from '@/service/common/bill/push';
|
||||
@@ -9,7 +9,6 @@ import type {
|
||||
SearchDataResultItemType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { ModelTypeEnum } from '@/service/core/ai/model';
|
||||
import { getDatasetDataItemInfo } from '@/pages/api/core/dataset/data/getDataById';
|
||||
@@ -21,15 +20,14 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
||||
userChatInput: string;
|
||||
}>;
|
||||
export type KBSearchResponse = {
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
isEmpty?: boolean;
|
||||
unEmpty?: boolean;
|
||||
quoteQA: SearchDataResponseItemType[];
|
||||
};
|
||||
|
||||
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
|
||||
export async function dispatchDatasetSearch(props: Record<string, any>): Promise<KBSearchResponse> {
|
||||
const {
|
||||
moduleName,
|
||||
user,
|
||||
inputs: { datasets = [], similarity = 0.4, limit = 5, userChatInput }
|
||||
} = props as DatasetSearchProps;
|
||||
@@ -77,8 +75,6 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
|
||||
unEmpty: searchRes.length > 0 ? true : undefined,
|
||||
quoteQA: searchRes,
|
||||
responseData: {
|
||||
moduleType: FlowModuleTypeEnum.datasetSearchNode,
|
||||
moduleName,
|
||||
price: countModelPrice({
|
||||
model: vectorModel.model,
|
||||
tokens: tokenLen,
|
@@ -1,9 +1,12 @@
|
||||
export * from './init/history';
|
||||
export * from './init/userChatInput';
|
||||
export * from './chat/oneapi';
|
||||
export * from './kb/search';
|
||||
export * from './dataset/search';
|
||||
export * from './tools/answer';
|
||||
export * from './tools/http';
|
||||
export * from './tools/runApp';
|
||||
export * from './agent/classifyQuestion';
|
||||
export * from './agent/extract';
|
||||
export * from './plugin/run';
|
||||
export * from './plugin/runInput';
|
||||
export * from './plugin/runOutput';
|
||||
|
63
projects/app/src/service/moduleDispatch/plugin/run.ts
Normal file
63
projects/app/src/service/moduleDispatch/plugin/run.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||
import {
|
||||
FlowNodeSpecialInputKeyEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '@fastgpt/global/core/module/node/constant';
|
||||
import { getOnePluginDetail } from '@fastgpt/service/core/plugin/controller';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
|
||||
type RunPluginProps = ModuleDispatchProps<{
|
||||
[FlowNodeSpecialInputKeyEnum.pluginId]: string;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type RunPluginResponse = {
|
||||
answerText: string;
|
||||
[TaskResponseKeyEnum.responseData]?: moduleDispatchResType[];
|
||||
};
|
||||
|
||||
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
|
||||
const {
|
||||
res,
|
||||
variables,
|
||||
user,
|
||||
stream,
|
||||
detail,
|
||||
inputs: { pluginId, ...data }
|
||||
} = props;
|
||||
|
||||
if (!pluginId) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const plugin = await getOnePluginDetail({ id: pluginId, userId: user._id });
|
||||
if (!plugin) {
|
||||
return Promise.reject('Plugin not found');
|
||||
}
|
||||
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
res,
|
||||
modules: plugin.modules,
|
||||
user,
|
||||
variables,
|
||||
params: data,
|
||||
stream,
|
||||
detail
|
||||
});
|
||||
|
||||
const output = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.pluginOutput);
|
||||
|
||||
if (output) {
|
||||
output.moduleLogo = plugin.avatar;
|
||||
}
|
||||
|
||||
return {
|
||||
answerText,
|
||||
// [TaskResponseKeyEnum.responseData]: output,
|
||||
[TaskResponseKeyEnum.responseData]: responseData.filter(
|
||||
(item) => item.moduleType !== FlowNodeTypeEnum.pluginOutput
|
||||
),
|
||||
...(output ? output.pluginOutput : {})
|
||||
};
|
||||
};
|
11
projects/app/src/service/moduleDispatch/plugin/runInput.ts
Normal file
11
projects/app/src/service/moduleDispatch/plugin/runInput.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
|
||||
export type PluginInputProps = ModuleDispatchProps<{
|
||||
[key: string]: any;
|
||||
}>;
|
||||
|
||||
export const dispatchPluginInput = (props: PluginInputProps) => {
|
||||
const { inputs } = props;
|
||||
|
||||
return inputs;
|
||||
};
|
21
projects/app/src/service/moduleDispatch/plugin/runOutput.ts
Normal file
21
projects/app/src/service/moduleDispatch/plugin/runOutput.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
|
||||
export type PluginOutputProps = ModuleDispatchProps<{
|
||||
[key: string]: any;
|
||||
}>;
|
||||
export type PluginOutputResponse = {
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
};
|
||||
|
||||
export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResponse => {
|
||||
const { inputs } = props;
|
||||
|
||||
return {
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
price: 0,
|
||||
pluginOutput: inputs
|
||||
}
|
||||
};
|
||||
};
|
@@ -24,7 +24,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: formatText
|
||||
text: `\n${formatText}`
|
||||
})
|
||||
});
|
||||
}
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { HttpPropsEnum } from '@/constants/flow/flowField';
|
||||
import { ChatHistoryItemResType } from '@/types/chat';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import { moduleDispatchResType } from '@/types/chat';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
export type HttpRequestProps = ModuleDispatchProps<{
|
||||
[HttpPropsEnum.url]: string;
|
||||
@@ -9,13 +8,12 @@ export type HttpRequestProps = ModuleDispatchProps<{
|
||||
}>;
|
||||
export type HttpResponse = {
|
||||
[HttpPropsEnum.failed]?: boolean;
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
|
||||
[key: string]: any;
|
||||
};
|
||||
|
||||
export const dispatchHttpRequest = async (props: Record<string, any>): Promise<HttpResponse> => {
|
||||
const {
|
||||
moduleName,
|
||||
variables,
|
||||
inputs: { url, ...body }
|
||||
} = props as HttpRequestProps;
|
||||
@@ -33,8 +31,6 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
|
||||
|
||||
return {
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.httpRequest,
|
||||
moduleName,
|
||||
price: 0,
|
||||
body: requestBody,
|
||||
httpResult: response
|
||||
@@ -45,8 +41,6 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
|
||||
return {
|
||||
[HttpPropsEnum.failed]: true,
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.httpRequest,
|
||||
moduleName,
|
||||
price: 0,
|
||||
body: requestBody,
|
||||
httpResult: { error }
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import { moduleDispatchResType, ChatItemType } from '@/types/chat';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { SelectAppItemType } from '@/types/core/app/flow';
|
||||
import { SelectAppItemType } from '@fastgpt/global/core/module/type';
|
||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||
import { App } from '@/service/mongo';
|
||||
import { responseWrite } from '@fastgpt/service/common/response';
|
||||
@@ -13,7 +13,7 @@ type Props = ModuleDispatchProps<{
|
||||
app: SelectAppItemType;
|
||||
}>;
|
||||
type Response = {
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType[];
|
||||
[TaskResponseKeyEnum.responseData]: moduleDispatchResType[];
|
||||
[TaskResponseKeyEnum.answerText]: string;
|
||||
[TaskResponseKeyEnum.history]: ChatItemType[];
|
||||
};
|
||||
|
@@ -60,8 +60,3 @@ export * from './models/chat';
|
||||
export * from './models/chatItem';
|
||||
export * from './models/app';
|
||||
export * from './common/bill/schema';
|
||||
export * from './models/pay';
|
||||
export * from './models/promotionRecord';
|
||||
export * from './models/collection';
|
||||
export * from './models/inform';
|
||||
export * from './models/image';
|
||||
|
Reference in New Issue
Block a user