mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 20:27:45 +00:00
perf: response key
This commit is contained in:
@@ -10,7 +10,7 @@ import type { ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
|
||||
import { SpecificInputEnum } from '@/constants/app';
|
||||
import { TaskResponseKeyEnum } from '@/constants/app';
|
||||
import { getChatModel } from '@/service/utils/data';
|
||||
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
@@ -27,7 +27,7 @@ export type Props = {
|
||||
limitPrompt?: string;
|
||||
billId?: string;
|
||||
};
|
||||
export type Response = { [SpecificInputEnum.answerText]: string; totalTokens: number };
|
||||
export type Response = { [TaskResponseKeyEnum.answerText]: string; totalTokens: number };
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
let { model, stream } = req.body as Props;
|
||||
|
@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { withNextCors } from '@/service/utils/tools';
|
||||
import type { ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, rawSearchKey } from '@/constants/chat';
|
||||
import { ChatRoleEnum, rawSearchKey, responseDataKey } from '@/constants/chat';
|
||||
import { modelToolMap } from '@/utils/plugin';
|
||||
import { getVector } from '@/pages/api/openapi/plugin/vector';
|
||||
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
|
||||
@@ -29,7 +29,9 @@ type Props = {
|
||||
billId?: string;
|
||||
};
|
||||
type Response = {
|
||||
[rawSearchKey]: QuoteItemType[];
|
||||
[responseDataKey]: {
|
||||
[rawSearchKey]: QuoteItemType[];
|
||||
};
|
||||
isEmpty?: boolean;
|
||||
quotePrompt?: string;
|
||||
};
|
||||
@@ -112,7 +114,6 @@ export async function kbSearch({
|
||||
// filter part quote by maxToken
|
||||
const sliceResult = modelToolMap
|
||||
.tokenSlice({
|
||||
model: 'gpt-3.5-turbo',
|
||||
maxToken,
|
||||
messages: searchRes.map((item, i) => ({
|
||||
obj: ChatRoleEnum.System,
|
||||
@@ -128,7 +129,9 @@ export async function kbSearch({
|
||||
|
||||
return {
|
||||
isEmpty: rawSearch.length === 0 ? true : undefined,
|
||||
rawSearch,
|
||||
quotePrompt: sliceResult ? `知识库:\n${sliceResult}` : undefined
|
||||
quotePrompt: sliceResult ? `知识库:\n${sliceResult}` : undefined,
|
||||
responseData: {
|
||||
rawSearch
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -10,7 +10,7 @@ import { getChatHistory } from './getHistory';
|
||||
import { saveChat } from '@/pages/api/chat/saveChat';
|
||||
import { sseResponse } from '@/service/utils/tools';
|
||||
import { type ChatCompletionRequestMessage } from 'openai';
|
||||
import { SpecificInputEnum, AppModuleItemTypeEnum } from '@/constants/app';
|
||||
import { TaskResponseKeyEnum, AppModuleItemTypeEnum } from '@/constants/app';
|
||||
import { Types } from 'mongoose';
|
||||
import { moduleFetch } from '@/service/api/request';
|
||||
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
|
||||
@@ -223,34 +223,23 @@ export async function dispatchModules({
|
||||
}) {
|
||||
const runningModules = loadModules(modules, variables);
|
||||
|
||||
let storeData: Record<string, any> = {}; // after module used
|
||||
let responseData: Record<string, any> = {}; // response request and save to database
|
||||
// let storeData: Record<string, any> = {}; // after module used
|
||||
let chatResponse: Record<string, any> = {}; // response request and save to database
|
||||
let answerText = ''; // AI answer
|
||||
|
||||
function pushStore({
|
||||
isResponse = false,
|
||||
answer,
|
||||
data = {}
|
||||
responseData = {}
|
||||
}: {
|
||||
isResponse?: boolean;
|
||||
answer?: string;
|
||||
data?: Record<string, any>;
|
||||
responseData?: Record<string, any>;
|
||||
}) {
|
||||
if (isResponse) {
|
||||
responseData = {
|
||||
...responseData,
|
||||
...data
|
||||
};
|
||||
}
|
||||
|
||||
if (answer) {
|
||||
answerText += answer;
|
||||
}
|
||||
|
||||
storeData = {
|
||||
...storeData,
|
||||
...data
|
||||
chatResponse = {
|
||||
...chatResponse,
|
||||
...responseData
|
||||
};
|
||||
|
||||
answerText += answer;
|
||||
}
|
||||
function moduleInput(
|
||||
module: RunningModuleItemType,
|
||||
@@ -282,20 +271,13 @@ export async function dispatchModules({
|
||||
module: RunningModuleItemType,
|
||||
result: Record<string, any> = {}
|
||||
): Promise<any> {
|
||||
pushStore(result);
|
||||
return Promise.all(
|
||||
module.outputs.map((outputItem) => {
|
||||
if (result[outputItem.key] === undefined) return;
|
||||
/* update output value */
|
||||
outputItem.value = result[outputItem.key];
|
||||
|
||||
pushStore({
|
||||
isResponse: outputItem.response,
|
||||
answer: outputItem.answer ? outputItem.value : '',
|
||||
data: {
|
||||
[outputItem.key]: outputItem.value
|
||||
}
|
||||
});
|
||||
|
||||
/* update target */
|
||||
return Promise.all(
|
||||
outputItem.targets.map((target: any) => {
|
||||
@@ -315,7 +297,7 @@ export async function dispatchModules({
|
||||
// direct answer
|
||||
if (module.type === AppModuleItemTypeEnum.answer) {
|
||||
const text =
|
||||
module.inputs.find((item) => item.key === SpecificInputEnum.answerText)?.value || '';
|
||||
module.inputs.find((item) => item.key === TaskResponseKeyEnum.answerText)?.value || '';
|
||||
pushStore({
|
||||
answer: text
|
||||
});
|
||||
@@ -365,7 +347,7 @@ export async function dispatchModules({
|
||||
await Promise.all(initModules.map((module) => moduleInput(module, params)));
|
||||
|
||||
return {
|
||||
responseData,
|
||||
responseData: chatResponse,
|
||||
answerText
|
||||
};
|
||||
}
|
||||
@@ -402,7 +384,7 @@ function loadModules(
|
||||
}),
|
||||
outputs: module.outputs.map((item) => ({
|
||||
key: item.key,
|
||||
answer: item.key === SpecificInputEnum.answerText,
|
||||
answer: item.key === TaskResponseKeyEnum.answerText,
|
||||
response: item.response,
|
||||
value: undefined,
|
||||
targets: item.targets
|
||||
|
Reference in New Issue
Block a user