Optimize chat reponse data (#322)

This commit is contained in:
Archer
2023-09-19 16:10:30 +08:00
committed by GitHub
parent 0a4a1def1e
commit ce7da2db66
22 changed files with 157 additions and 169 deletions

View File

@@ -14,7 +14,6 @@
"UnKnow": "UnKnow",
"Warning": "Warning",
"app": {
"Quote Prompt Settings": "Quote Prompt Settings",
"Advance App TestTip": "The current application is advanced editing mode \n. If you need to switch to [simple mode], please click the save button on the left",
"App Detail": "App Detail",
"Chat Logs Tips": "Logs record the app's online, shared, and API(chatId is existing) conversations",
@@ -40,7 +39,14 @@
"My Apps": "My Apps",
"Output Field Settings": "Output Field Settings",
"Paste Config": "Paste Config",
"Variable Key Repeat Tip": "Variable Key Repeat"
"Quote Prompt Settings": "Quote Prompt Settings",
"Variable Key Repeat Tip": "Variable Key Repeat",
"module": {
"Custom Title Tip": "The title name is displayed during the conversation"
},
"modules": {
"Title is required": "Title is required"
}
},
"chat": {
"Admin Mark Content": "Corrected response",
@@ -81,6 +87,7 @@
"Copy": "Copy",
"Copy Successful": "Copy Successful",
"Course": "",
"Custom Title": "Custom Title",
"Delete": "Delete",
"Delete Failed": "Delete Failed",
"Delete Success": "Delete Successful",

View File

@@ -14,7 +14,6 @@
"UnKnow": "未知",
"Warning": "提示",
"app": {
"Quote Prompt Settings": "引用提示词配置",
"Advance App TestTip": "当前应用为高级编排模式\n如需切换为【简易模式】请点击左侧保存按键",
"App Detail": "应用详情",
"Chat Logs Tips": "日志会记录该应用的在线、分享和 API(需填写 chatId) 对话记录",
@@ -40,7 +39,14 @@
"My Apps": "我的应用",
"Output Field Settings": "输出字段编辑",
"Paste Config": "粘贴配置",
"Variable Key Repeat Tip": "变量 key 重复"
"Quote Prompt Settings": "引用提示词配置",
"Variable Key Repeat Tip": "变量 key 重复",
"module": {
"Custom Title Tip": "该标题名字会展示在对话过程中"
},
"modules": {
"Title is required": "模块名不能为空"
}
},
"chat": {
"Admin Mark Content": "纠正后的回复",
@@ -81,6 +87,7 @@
"Copy": "复制",
"Copy Successful": "复制成功",
"Course": "",
"Custom Title": "自定义标题",
"Delete": "删除",
"Delete Failed": "删除失败",
"Delete Success": "删除成功",

View File

@@ -1,8 +1,12 @@
import { GET, POST } from './request';
export const textCensor = (data: { text: string }) =>
POST<{ code?: number; message: string }>('/plugins/censor/text_baidu', data).then((res) => {
POST<{ code?: number; message: string }>('/plugins/censor/text_baidu', data)
.then((res) => {
if (res?.code === 5000) {
return Promise.reject(res.message);
}
})
.catch((err) => {
return Promise.resolve('');
});

View File

@@ -1,5 +1,4 @@
import React, { useCallback, useMemo, useState } from 'react';
import { ChatModuleEnum } from '@/constants/chat';
import { ChatHistoryItemResType, ChatItemType, QuoteItemType } from '@/types/chat';
import { Flex, BoxProps, useDisclosure } from '@chakra-ui/react';
import { useTranslation } from 'react-i18next';
@@ -7,6 +6,8 @@ import { useGlobalStore } from '@/store/global';
import dynamic from 'next/dynamic';
import Tag from '../Tag';
import MyTooltip from '../MyTooltip';
import { FlowModuleTypeEnum } from '@/constants/flow';
const QuoteModal = dynamic(() => import('./QuoteModal'), { ssr: false });
const ContextModal = dynamic(() => import('./ContextModal'), { ssr: false });
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'), { ssr: false });
@@ -32,15 +33,15 @@ const ResponseTags = ({
const {
quoteList = [],
completeMessages = [],
tokens = 0
historyPreview = [],
runningTime = 0
} = useMemo(() => {
const chatData = responseData.find((item) => item.moduleName === ChatModuleEnum.AIChat);
const chatData = responseData.find((item) => item.moduleType === FlowModuleTypeEnum.chatNode);
if (!chatData) return {};
return {
quoteList: chatData.quoteList,
completeMessages: chatData.completeMessages,
tokens: responseData.reduce((sum, item) => sum + (item.tokens || 0), 0)
historyPreview: chatData.historyPreview,
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0)
};
}, [responseData]);
@@ -65,21 +66,21 @@ const ResponseTags = ({
</Tag>
</MyTooltip>
)}
{completeMessages.length > 0 && (
{historyPreview.length > 0 && (
<MyTooltip label={'点击查看完整对话记录'}>
<Tag
colorSchema="green"
cursor={'pointer'}
{...TagStyles}
onClick={() => setContextModalData(completeMessages)}
onClick={() => setContextModalData(historyPreview)}
>
{completeMessages.length}
{historyPreview.length}
</Tag>
</MyTooltip>
)}
{isPc && tokens > 0 && (
{isPc && runningTime > 0 && (
<Tag colorSchema="purple" cursor={'default'} {...TagStyles}>
{tokens}Tokens
{runningTime}s
</Tag>
)}
<MyTooltip label={'点击查看完整响应值'}>

View File

@@ -21,7 +21,7 @@ const ResponseModal = ({
() =>
response.map((item) => {
const copy = { ...item };
delete copy.completeMessages;
delete copy.historyPreview;
delete copy.quoteList;
return copy;
}),

View File

@@ -94,7 +94,7 @@ const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconName } &
const [IconComponent, setIconComponent] = useState<any>(null);
useEffect(() => {
iconPaths[name]()
iconPaths[name]?.()
.then((icon) => {
setIconComponent({ as: icon.default });
})

View File

@@ -52,14 +52,6 @@ export const ChatSourceMap = {
}
};
export enum ChatModuleEnum {
'AIChat' = 'AI Chat',
'KBSearch' = 'KB Search',
'CQ' = 'Classify Question',
'Extract' = 'Content Extract',
'Http' = 'Http'
}
export enum OutLinkTypeEnum {
'share' = 'share',
'iframe' = 'iframe'

View File

@@ -26,12 +26,12 @@ export const welcomeTextTip =
'每次对话开始前,发送一个初始内容。支持标准 Markdown 语法,可使用的额外标记:\n[快捷按键]: 用户点击后可以直接发送该问题';
export const VariableModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.variable,
logo: '/imgs/module/variable.png',
name: '全局变量',
intro: '可以在对话开始前,要求用户填写一些内容作为本轮对话的变量。该模块位于开场引导之后。',
description:
'全局变量可以通过 {{变量key}} 的形式注入到其他模块 string 类型的输入中,例如:提示词、限定词等',
flowType: FlowModuleTypeEnum.variable,
inputs: [
{
key: SystemInputEnum.variables,
@@ -43,10 +43,10 @@ export const VariableModule: FlowModuleTemplateType = {
outputs: []
};
export const UserGuideModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.userGuide,
logo: '/imgs/module/userGuide.png',
name: '用户引导',
intro: userGuideTip,
flowType: FlowModuleTypeEnum.userGuide,
inputs: [
{
key: SystemInputEnum.welcomeText,
@@ -57,10 +57,10 @@ export const UserGuideModule: FlowModuleTemplateType = {
outputs: []
};
export const UserInputModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.questionInput,
logo: '/imgs/module/userChatInput.png',
name: '用户问题(对话入口)',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
flowType: FlowModuleTypeEnum.questionInput,
inputs: [
{
key: SystemInputEnum.userChatInput,
@@ -79,10 +79,10 @@ export const UserInputModule: FlowModuleTemplateType = {
]
};
export const HistoryModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.historyNode,
logo: '/imgs/module/history.png',
name: '聊天记录',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
flowType: FlowModuleTypeEnum.historyNode,
inputs: [
{
key: 'maxContext',
@@ -110,10 +110,10 @@ export const HistoryModule: FlowModuleTemplateType = {
};
export const ChatModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.chatNode,
logo: '/imgs/module/AI.png',
name: 'AI 对话',
intro: 'AI 大模型对话',
flowType: FlowModuleTypeEnum.chatNode,
showStatus: true,
inputs: [
{
@@ -209,10 +209,10 @@ export const ChatModule: FlowModuleTemplateType = {
};
export const KBSearchModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.kbSearchNode,
logo: '/imgs/module/db.png',
name: '知识库搜索',
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: FlowModuleTypeEnum.kbSearchNode,
showStatus: true,
inputs: [
{
@@ -280,11 +280,11 @@ export const KBSearchModule: FlowModuleTemplateType = {
};
export const AnswerModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.answerNode,
logo: '/imgs/module/reply.png',
name: '指定回复',
intro: '该模块可以直接回复一段指定的内容。常用于引导、提示',
description: '该模块可以直接回复一段指定的内容。常用于引导、提示',
flowType: FlowModuleTypeEnum.answerNode,
inputs: [
Input_Template_TFSwitch,
{
@@ -308,40 +308,13 @@ export const AnswerModule: FlowModuleTemplateType = {
}
]
};
export const TFSwitchModule: FlowModuleTemplateType = {
logo: '',
name: 'TF开关',
intro: '可以判断输入的内容为 True 或者 False从而执行不同操作。',
flowType: FlowModuleTypeEnum.tfSwitchNode,
inputs: [
{
key: SystemInputEnum.switch,
type: FlowInputItemTypeEnum.target,
label: '输入'
}
],
outputs: [
{
key: 'true',
label: 'True',
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
key: 'false',
label: 'False',
type: FlowOutputItemTypeEnum.source,
targets: []
}
]
};
export const ClassifyQuestionModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.classifyQuestion,
logo: '/imgs/module/cq.png',
name: '问题分类',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
description:
'根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于 laf 通用问题\n类型3: 关于 laf 代码问题\n类型4: 其他问题',
flowType: FlowModuleTypeEnum.classifyQuestion,
showStatus: true,
inputs: [
{
@@ -398,11 +371,11 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
]
};
export const ContextExtractModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.contentExtract,
logo: '/imgs/module/extract.png',
name: '文本内容提取',
intro: '从文本中提取出指定格式的数据',
description: '可从文本中提取指定的数据例如sql语句、搜索关键词、代码等',
flowType: FlowModuleTypeEnum.contentExtract,
showStatus: true,
inputs: [
Input_Template_TFSwitch,
@@ -458,11 +431,11 @@ export const ContextExtractModule: FlowModuleTemplateType = {
]
};
export const HttpModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.httpRequest,
logo: '/imgs/module/http.png',
name: 'HTTP模块',
intro: '可以发出一个 HTTP POST 请求,实现更为复杂的操作(联网搜索、数据库查询等)',
description: '可以发出一个 HTTP POST 请求,实现更为复杂的操作(联网搜索、数据库查询等)',
flowType: FlowModuleTypeEnum.httpRequest,
showStatus: true,
inputs: [
{
@@ -487,11 +460,11 @@ export const HttpModule: FlowModuleTemplateType = {
]
};
export const EmptyModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.empty,
logo: '/imgs/module/cq.png',
name: '该模块已被移除',
intro: '',
description: '',
flowType: FlowModuleTypeEnum.empty,
inputs: [],
outputs: []
};

View File

@@ -154,11 +154,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
stream,
detail
});
// console.log(responseData, '===', answerText);
// if (!answerText) {
// throw new Error('回复内容为空,可能模块编排出现问题');
// }
// save chat
if (chatId) {
@@ -284,6 +279,7 @@ export async function dispatchModules({
// let storeData: Record<string, any> = {}; // after module used
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
let chatAnswerText = ''; // AI answer
let runningTime = Date.now();
function pushStore({
answerText = '',
@@ -292,7 +288,13 @@ export async function dispatchModules({
answerText?: string;
responseData?: ChatHistoryItemResType;
}) {
responseData && chatResponse.push(responseData);
const time = Date.now();
responseData &&
chatResponse.push({
...responseData,
runningTime: +((time - runningTime) / 1000).toFixed(2)
});
runningTime = time;
chatAnswerText += answerText;
}
function moduleInput(

View File

@@ -6,7 +6,8 @@ import type { FlowModuleItemType } from '@/types/flow';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { useTranslation } from 'react-i18next';
import { useCopyData } from '@/utils/tools';
import { useEditTitle } from '@/hooks/useEditTitle';
import { useToast } from '@/hooks/useToast';
type Props = FlowModuleItemType & {
children?: React.ReactNode | React.ReactNode[] | string;
@@ -22,29 +23,48 @@ const NodeCard = (props: Props) => {
minW = '300px',
onCopyNode,
onDelNode,
onChangeNode,
moduleId
} = props;
const { copyData } = useCopyData();
const { t } = useTranslation();
const theme = useTheme();
const { toast } = useToast();
// custom title edit
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
title: t('common.Custom Title'),
placeholder: t('app.module.Custom Title Tip') || ''
});
const menuList = useMemo(
() => [
{
icon: 'edit',
label: t('common.Rename'),
onClick: () =>
onOpenModal({
defaultVal: name,
onSuccess: (e) => {
if (!e) {
return toast({
title: t('app.modules.Title is required'),
status: 'warning'
});
}
onChangeNode({
moduleId,
type: 'attr',
key: 'name',
value: e
});
}
})
},
{
icon: 'copy',
label: t('common.Copy'),
onClick: () => onCopyNode(moduleId)
},
// {
// icon: 'settingLight',
// label: t('app.Copy Module Config'),
// onClick: () => {
// const copyProps = { ...props };
// delete copyProps.children;
// delete copyProps.children;
// console.log(copyProps);
// }
// },
{
icon: 'delete',
label: t('common.Delete'),
@@ -100,6 +120,7 @@ const NodeCard = (props: Props) => {
</Menu>
</Flex>
{children}
<EditTitleModal />
</Box>
);
};

View File

@@ -276,49 +276,28 @@ const AppEdit = ({ app, onCloseSettings }: Props) => {
setNodes((nodes) =>
nodes.map((node) => {
if (node.id !== moduleId) return node;
const updateObj: Record<string, any> = {};
if (type === 'inputs') {
return {
...node,
data: {
...node.data,
inputs: node.data.inputs.map((item) => (item.key === key ? value : item))
}
};
}
if (type === 'addInput') {
updateObj.inputs = node.data.inputs.map((item) => (item.key === key ? value : item));
} else if (type === 'addInput') {
const input = node.data.inputs.find((input) => input.key === value.key);
if (input) {
toast({
status: 'warning',
title: 'key 重复'
});
return {
...node,
data: {
...node.data,
inputs: node.data.inputs
updateObj.inputs = node.data.inputs;
} else {
updateObj.inputs = node.data.inputs.concat(value);
}
};
}
return {
...node,
data: {
...node.data,
inputs: node.data.inputs.concat(value)
}
};
}
if (type === 'delInput') {
} else if (type === 'delInput') {
onDelEdge({ moduleId, targetHandle: key });
return {
...node,
data: {
...node.data,
inputs: node.data.inputs.filter((item) => item.key !== key)
}
};
}
updateObj.inputs = node.data.inputs.filter((item) => item.key !== key);
} else if (type === 'attr') {
updateObj[key] = value;
} else if (type === 'outputs') {
// del output connect
const delOutputs = node.data.outputs.filter(
(item) => !value.find((output: FlowOutputTargetItemType) => output.key === item.key)
@@ -326,12 +305,14 @@ const AppEdit = ({ app, onCloseSettings }: Props) => {
delOutputs.forEach((output) => {
onDelEdge({ moduleId, sourceHandle: output.key });
});
updateObj.outputs = value;
}
return {
...node,
data: {
...node.data,
outputs: value
...updateObj
}
};
})

View File

@@ -30,7 +30,7 @@ export const pushTaskBill = async ({
total,
source,
list: response.map((item) => ({
moduleName: item.moduleName,
moduleType: item.moduleType,
amount: item.price || 0,
model: item.model,
tokenLen: item.tokens

View File

@@ -62,24 +62,7 @@ const ChatSchema = new Schema({
default: ''
},
[TaskResponseKeyEnum.responseData]: {
type: [
{
moduleName: String,
price: String,
model: String,
tokens: Number,
question: String,
answer: String,
temperature: Number,
maxToken: Number,
quoteList: Array,
completeMessages: Array,
similarity: Number,
limit: Number,
cqList: Array,
cqResult: String
}
],
type: Array,
default: []
}
}

View File

@@ -1,7 +1,7 @@
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice } from '@/service/events/pushBill';
@@ -9,6 +9,7 @@ import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type CQProps = {
systemPrompt?: string;
@@ -95,7 +96,7 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
return {
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.CQ,
moduleType: FlowModuleTypeEnum.classifyQuestion,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model: agentModel, tokens }),
model: getModel(agentModel)?.name || agentModel,
tokens,

View File

@@ -1,13 +1,14 @@
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { countModelPrice } from '@/service/events/pushBill';
import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type Props = {
userOpenaiAccount: UserModelSchema['openaiAccount'];
@@ -118,7 +119,7 @@ export async function dispatchContentExtract({
[ContextExtractEnum.fields]: JSON.stringify(arg),
...arg,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.Extract,
moduleType: FlowModuleTypeEnum.contentExtract,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model: agentModel, tokens }),
model: getModel(agentModel)?.name || agentModel,
tokens,

View File

@@ -3,7 +3,7 @@ import { sseResponse } from '@/service/utils/tools';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
@@ -20,6 +20,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/prompts/core/AIChat';
import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type ChatProps = AIChatProps & {
res: NextApiResponse;
@@ -175,15 +176,14 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
return {
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.AIChat,
moduleType: FlowModuleTypeEnum.chatNode,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
answer: answerText,
maxToken: max_tokens,
quoteList: filterQuoteQA,
completeMessages
historyPreview: getHistoryPreview(completeMessages)
},
finish: true
};
@@ -371,3 +371,14 @@ async function streamResponse({
answer
};
}
function getHistoryPreview(completeMessages: ChatItemType[]) {
return completeMessages.map((item, i) => {
if (item.obj === ChatRoleEnum.System) return item;
if (i >= completeMessages.length - 2) return item;
return {
...item,
value: item.value.length > 15 ? `${item.value.slice(0, 15)}...` : item.value
};
});
}

View File

@@ -1,11 +1,12 @@
import { PgClient } from '@/service/pg';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatModuleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice } from '@/service/events/pushBill';
import type { SelectedKbType } from '@/types/plugin';
import type { QuoteItemType } from '@/types/chat';
import { PgDatasetTableName } from '@/constants/plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
type KBSearchProps = {
kbList: SelectedKbType;
@@ -57,7 +58,7 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
responseData: {
moduleName: ChatModuleEnum.KBSearch,
moduleType: FlowModuleTypeEnum.kbSearchNode,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
model: vectorModel.name,
tokens: tokenLen,

View File

@@ -1,7 +1,8 @@
import { ChatModuleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { HttpPropsEnum } from '@/constants/flow/flowField';
import { ChatHistoryItemResType } from '@/types/chat';
import type { NextApiResponse } from 'next';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type HttpRequestProps = {
res: NextApiResponse;
@@ -26,7 +27,7 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
return {
[HttpPropsEnum.finish]: true,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.Http,
moduleType: FlowModuleTypeEnum.httpRequest,
price: 0,
httpResult: response
},
@@ -37,7 +38,7 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
[HttpPropsEnum.finish]: true,
[HttpPropsEnum.failed]: true,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.Http,
moduleType: FlowModuleTypeEnum.httpRequest,
price: 0,
httpResult: {}
}

View File

@@ -38,9 +38,9 @@ export const jsonRes = <T = any>(
}
// another error
let msg = error?.response?.statusText || error?.message || '请求错误';
let msg = '';
if ((code < 200 || code >= 400) && !message) {
msg = error?.message || '请求错误';
msg = error?.response?.statusText || error?.message || '请求错误';
if (typeof error === 'string') {
msg = error;
} else if (proxyError[error?.code]) {
@@ -59,7 +59,7 @@ export const jsonRes = <T = any>(
res.status(code).json({
code,
statusText: '',
message: msg,
message: message || msg,
data: data !== undefined ? data : null
});
};

View File

@@ -4,6 +4,7 @@ import { TaskResponseKeyEnum } from '@/constants/chat';
import { ClassifyQuestionAgentItemType } from './app';
import { ChatItemSchema } from './mongoSchema';
import { KbDataItemType } from './plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type ExportChatType = 'md' | 'pdf' | 'html';
@@ -46,19 +47,20 @@ export type QuoteItemType = KbDataItemType & {
kb_id: string;
};
// response data
export type ChatHistoryItemResType = {
moduleName: string;
moduleType: `${FlowModuleTypeEnum}`;
price: number;
runningTime?: number;
model?: string;
tokens?: number;
// chat
answer?: string;
question?: string;
temperature?: number;
maxToken?: number;
quoteList?: QuoteItemType[];
completeMessages?: ChatItemType[];
historyPreview?: ChatItemType[]; // completion context array. history will slice
// kb search
similarity?: number;

View File

@@ -10,7 +10,7 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
export type FlowModuleItemChangeProps = {
moduleId: string;
type: 'inputs' | 'outputs' | 'addInput' | 'delInput';
type: 'attr' | 'inputs' | 'outputs' | 'addInput' | 'delInput';
key: string;
value: any;
};
@@ -48,12 +48,12 @@ export type FlowOutputItemType = {
};
export type FlowModuleTemplateType = {
flowType: `${FlowModuleTypeEnum}`; // unique
logo: string;
name: string;
description?: string;
intro: string;
flowType: `${FlowModuleTypeEnum}`;
showStatus?: boolean;
showStatus?: boolean; // chatting response step status
inputs: FlowInputItemType[];
outputs: FlowOutputItemType[];
};

View File

@@ -92,8 +92,8 @@ export const appModule2FlowNode = ({
// replace item data
const moduleItem: FlowModuleItemType = {
...item,
...template,
...item,
inputs: concatInputs.map((templateInput) => {
// use latest inputs
const itemInput = item.inputs.find((item) => item.key === templateInput.key) || templateInput;