perf: template

This commit is contained in:
archer
2023-07-23 16:08:00 +08:00
parent 5e0b147048
commit b7d18e38d1
7 changed files with 923 additions and 1338 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -31,6 +31,10 @@ export enum FlowModuleTypeEnum {
classifyQuestion = 'classifyQuestion'
}
export enum SpecialInputKeyEnum {
'answerText' = 'text'
}
export const initModuleType: Record<string, boolean> = {
[FlowModuleTypeEnum.historyNode]: true,
[FlowModuleTypeEnum.questionInput]: true
@@ -38,8 +42,8 @@ export const initModuleType: Record<string, boolean> = {
export const edgeOptions = {
style: {
strokeWidth: 1,
strokeWidth: 1.5,
stroke: '#5A646Es'
}
};
export const connectionLineStyle = { strokeWidth: 1, stroke: '#5A646Es' };
export const connectionLineStyle = { strokeWidth: 1.5, stroke: '#5A646Es' };

View File

@@ -41,15 +41,19 @@ const NodeCard = ({
</MyTooltip>
)}
<Box flex={1} />
<MyIcon
<Flex
className={'nodrag'}
name="delete"
cursor={'pointer'}
w={'22px'}
h={'22px'}
alignItems={'center'}
justifyContent={'center'}
color={'myGray.600'}
w={'16px'}
_hover={{ color: 'red.600' }}
cursor={'pointer'}
onClick={() => onDelNode(moduleId)}
/>
>
<MyIcon name="delete" w={'16px'} />
</Flex>
</Flex>
{children}
</Box>

View File

@@ -283,7 +283,6 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
useEffect(() => {
initData(JSON.parse(JSON.stringify(app)));
}, [app, initData]);
console.log(flow2AppModules());
return (
<>

View File

@@ -51,7 +51,7 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
// function body
const agentFunction = {
name: agentFunName,
description: '判断用户问题的类型,并返回指定值',
description: '判断用户问题的类型属于哪方面,返回对应的枚举字段',
parameters: {
type: 'object',
properties: {
@@ -81,13 +81,9 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
if (!arg.type) {
throw new Error('');
}
const tokens = response.data.usage?.total_tokens || 0;
const result = agents.find((item) => item.key === arg.type) || agents[0];
const result = agents.find((item) => item.key === arg?.type) || agents[0];
return {
[result.key]: 1,

View File

@@ -31,7 +31,7 @@ const simplifyStr = (str = '') =>
.replace(/[^\S\r\n]+/g, ' ') // 连续空白内容
.trim();
/* 聊天上下文 tokens 截断 */
/* slice chat context by tokens */
export const ChatContextFilter = ({
model,
prompts,
@@ -41,33 +41,19 @@ export const ChatContextFilter = ({
prompts: ChatItemType[];
maxTokens: number;
}) => {
const systemPrompts: ChatItemType[] = [];
const chatPrompts: ChatItemType[] = [];
const rawTextLen = prompts.reduce((sum, item) => sum + item.value.length, 0);
let rawTextLen = 0;
prompts.forEach((item) => {
const val = simplifyStr(item.value);
rawTextLen += val.length;
const data = {
_id: item._id,
obj: item.obj,
value: val
};
if (item.obj === ChatRoleEnum.System) {
systemPrompts.push(data);
} else {
chatPrompts.push(data);
}
});
// 长度太小时,不需要进行 token 截断
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
return [...systemPrompts, ...chatPrompts];
return prompts;
}
// 去掉 system 的 token
// filter startWith system prompt
const chatStartIndex = prompts.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = prompts.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = prompts.slice(chatStartIndex);
// reduce token of systemPrompt
maxTokens -= modelToolMap.countTokens({
model,
messages: systemPrompts

View File

@@ -1,7 +1,8 @@
import type { AppModuleInputItemType, AppModuleItemType, VariableItemType } from '@/types/app';
import { chatModelList, vectorModelList } from '@/store/static';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { FlowModuleTypeEnum, SpecialInputKeyEnum } from '@/constants/flow';
import { SystemInputEnum } from '@/constants/app';
import { TaskResponseKeyEnum } from '@/constants/chat';
import type { SelectedKbType } from '@/types/plugin';
export type EditFormType = {
@@ -128,7 +129,7 @@ export const appModules2Form = (modules: AppModuleItemType[]) => {
if (emptyOutput) {
const target = modules.find((item) => item.moduleId === emptyOutput.moduleId);
defaultAppForm.kb.searchEmptyText =
target?.inputs?.find((item) => item.key === 'answerText')?.value || '';
target?.inputs?.find((item) => item.key === SpecialInputKeyEnum.answerText)?.value || '';
}
} else if (module.flowType === FlowModuleTypeEnum.userGuide) {
const val =
@@ -173,6 +174,10 @@ const chatModelInput = (formData: EditFormType): AppModuleInputItemType[] => [
value: formData.chatModel.limitPrompt,
connected: true
},
{
key: 'switch',
connected: formData.kb.list.length > 0
},
{
key: 'quoteQA',
connected: formData.kb.list.length > 0
@@ -289,7 +294,7 @@ const simpleChatTemplate = (formData: EditFormType): AppModuleItemType[] => [
inputs: chatModelInput(formData),
outputs: [
{
key: 'answerText',
key: TaskResponseKeyEnum.answerText,
targets: []
}
],
@@ -439,7 +444,7 @@ const kbTemplate = (formData: EditFormType): AppModuleItemType[] => [
connected: true
},
{
key: 'answerText',
key: SpecialInputKeyEnum.answerText,
value: formData.kb.searchEmptyText,
connected: true
}
@@ -458,7 +463,7 @@ const kbTemplate = (formData: EditFormType): AppModuleItemType[] => [
inputs: chatModelInput(formData),
outputs: [
{
key: 'answerText',
key: TaskResponseKeyEnum.answerText,
targets: []
}
],