perf: variabel replace;Feat: prompt optimizer code (#5453)

* feat: add prompt optimizer (#5444)

* feat: add prompt optimizer

* fix

* perf: variabel replace

* perf: prompt optimizer code

* feat: init charts shell

* perf: user error remove

---------

Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2025-08-14 15:48:22 +08:00
committed by GitHub
parent 6a02d2a2e5
commit 9fbfabac61
35 changed files with 1968 additions and 202 deletions

View File

@@ -0,0 +1,326 @@
import { useMemo, useRef, useState } from 'react';
import type { FlexProps } from '@chakra-ui/react';
import { Box, Button, Flex, Textarea, useDisclosure } from '@chakra-ui/react';
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
import Avatar from '@fastgpt/web/components/common/Avatar';
import MyPopover from '@fastgpt/web/components/common/MyPopover';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useTranslation } from 'next-i18next';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useLocalStorageState } from 'ahooks';
import AIModelSelector from '../../../Select/AIModelSelector';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { onOptimizePrompt } from '@/web/common/api/fetch';
export type OptimizerPromptProps = {
onChangeText: (text: string) => void;
defaultPrompt?: string;
};
export type OnOptimizePromptProps = {
originalPrompt?: string;
input: string;
model: string;
onResult: (result: string) => void;
abortController?: AbortController;
};
const OptimizerPopover = ({
onChangeText,
iconButtonStyle,
defaultPrompt
}: OptimizerPromptProps & {
iconButtonStyle?: FlexProps;
}) => {
const { t } = useTranslation();
const { llmModelList, defaultModels } = useSystemStore();
const [optimizerInput, setOptimizerInput] = useState('');
const [optimizedResult, setOptimizedResult] = useState('');
const [selectedModel = '', setSelectedModel] = useLocalStorageState<string>(
'prompt-editor-selected-model',
{
defaultValue: defaultModels.llm?.model || ''
}
);
const [abortController, setAbortController] = useState<AbortController | null>(null);
const { isOpen: isConfirmOpen, onOpen: onOpenConfirm, onClose: onCloseConfirm } = useDisclosure();
const closePopoverRef = useRef<() => void>();
const modelOptions = useMemo(() => {
return llmModelList.map((model) => {
// const provider = getModelProvider(model.model)
return {
label: (
<Flex alignItems={'center'}>
<Avatar
src={model.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
mr={1.5}
w={5}
/>
<Box fontWeight={'normal'} fontSize={'14px'} color={'myGray.900'}>
{model.name}
</Box>
</Flex>
),
value: model.model
};
});
}, [llmModelList]);
const isEmptyOptimizerInput = useMemo(() => {
return !optimizerInput.trim();
}, [optimizerInput]);
const { runAsync: handleSendOptimization, loading } = useRequest2(async (isAuto?: boolean) => {
if (isEmptyOptimizerInput && !isAuto) return;
setOptimizedResult('');
setOptimizerInput('');
const controller = new AbortController();
setAbortController(controller);
await onOptimizePrompt({
originalPrompt: defaultPrompt,
input: optimizerInput,
model: selectedModel,
onResult: (result: string) => {
if (!controller.signal.aborted) {
setOptimizedResult((prev) => prev + result);
}
},
abortController: controller
});
setAbortController(null);
});
const handleStopRequest = () => {
if (abortController) {
abortController.abort();
setAbortController(null);
}
};
const handleKeyDown = (e: React.KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey && !e.nativeEvent.isComposing) {
e.preventDefault();
if (!loading) {
handleSendOptimization();
}
}
};
return (
<>
<MyPopover
Trigger={
<Flex {...iconButtonStyle}>
<MyIcon name={'optimizer'} w={'18px'} />
</Flex>
}
trigger="click"
placement={'auto'}
w="482px"
onBackdropClick={() => {
if (optimizedResult) {
onOpenConfirm();
} else {
closePopoverRef.current?.();
}
}}
>
{({ onClose }) => {
closePopoverRef.current = onClose;
return (
<Box p={optimizedResult ? 8 : 4}>
{/* Result */}
{optimizedResult && (
<Box
px={'10px'}
maxHeight={'300px'}
overflowY={'auto'}
fontSize={'14px'}
color={'gray.700'}
whiteSpace={'pre-wrap'}
wordBreak={'break-word'}
mb={4}
>
{optimizedResult}
</Box>
)}
{/* Button */}
<Flex mb={3} alignItems={'center'} gap={3}>
{!loading && (
<>
{!optimizedResult && !!defaultPrompt && (
<Button
variant={'whiteBase'}
size={'sm'}
color={'myGray.600'}
onClick={() => handleSendOptimization(true)}
>
{t('app:AutoOptimize')}
</Button>
)}
{optimizedResult && (
<>
<Button
variant={'primaryGhost'}
size={'sm'}
px={2}
border={'0.5px solid'}
color={'primary.600'}
onClick={() => {
onChangeText?.(optimizedResult);
setOptimizedResult('');
setOptimizerInput('');
onClose();
}}
>
{t('app:Optimizer_Replace')}
</Button>
<Button
variant={'whiteBase'}
size={'sm'}
fontSize={'12px'}
onClick={() => {
setOptimizedResult('');
handleSendOptimization();
}}
>
{t('app:Optimizer_Reoptimize')}
</Button>
</>
)}
</>
)}
<Box flex={1} />
{modelOptions && modelOptions.length > 0 && (
<AIModelSelector
borderColor={'transparent'}
_hover={{
border: '1px solid',
borderColor: 'primary.400'
}}
size={'sm'}
value={selectedModel}
list={modelOptions}
onChange={setSelectedModel}
/>
)}
</Flex>
{/* Input */}
<Flex
alignItems={'center'}
gap={2}
border={'1px solid'}
borderColor={'gray.200'}
borderRadius={'md'}
p={2}
mb={3}
_focusWithin={{ borderColor: 'primary.600' }}
>
<MyIcon name={'optimizer'} alignSelf={'flex-start'} mt={0.5} w={5} />
<Textarea
placeholder={t('app:Optimizer_Placeholder')}
resize={'none'}
rows={1}
minHeight={'24px'}
lineHeight={'24px'}
maxHeight={'96px'}
overflowY={'hidden'}
border={'none'}
_focus={{
boxShadow: 'none'
}}
fontSize={'sm'}
p={0}
borderRadius={'none'}
value={optimizerInput}
autoFocus
onKeyDown={handleKeyDown}
isDisabled={loading}
onChange={(e) => {
const textarea = e.target;
setOptimizerInput(e.target.value);
textarea.style.height = '24px';
const maxHeight = 96;
const newHeight = Math.min(textarea.scrollHeight, maxHeight);
textarea.style.height = `${newHeight}px`;
if (textarea.scrollHeight > maxHeight) {
textarea.style.overflowY = 'auto';
} else {
textarea.style.overflowY = 'hidden';
}
}}
flex={1}
/>
<MyIcon
name={loading ? 'stop' : 'core/chat/sendLight'}
w={'1rem'}
alignSelf={'flex-end'}
mb={1}
color={loading || !isEmptyOptimizerInput ? 'primary.600' : 'gray.400'}
cursor={loading || !isEmptyOptimizerInput ? 'pointer' : 'not-allowed'}
onClick={() => {
if (loading) {
handleStopRequest();
} else {
void handleSendOptimization();
}
}}
/>
</Flex>
</Box>
);
}}
</MyPopover>
<MyModal
isOpen={isConfirmOpen}
onClose={onCloseConfirm}
title={t('app:Optimizer_CloseConfirm')}
iconSrc={'common/confirm/deleteTip'}
size="md"
zIndex={2000}
>
<Box p={4}>
<Box fontSize={'sm'} color={'myGray.700'} mb={4}>
{t('app:Optimizer_CloseConfirmText')}
</Box>
<Flex justifyContent={'flex-end'} gap={3}>
<Button variant={'whiteBase'} onClick={onCloseConfirm}>
{t('common:Cancel')}
</Button>
<Button
variant={'dangerFill'}
onClick={() => {
setOptimizedResult('');
setOptimizerInput('');
if (abortController) {
abortController.abort();
setAbortController(null);
}
onCloseConfirm();
closePopoverRef.current?.();
}}
>
{t('app:Optimizer_CloseConfirm')}
</Button>
</Flex>
</Box>
</MyModal>
</>
);
};
export default OptimizerPopover;

View File

@@ -73,6 +73,7 @@ const InputRender = (props: InputRenderProps) => {
maxLength={props.maxLength}
minH={100}
maxH={300}
ExtensionPopover={props.ExtensionPopover}
/>
);
}

View File

@@ -5,6 +5,7 @@ import type {
import type { InputTypeEnum } from './constant';
import type { UseFormReturn } from 'react-hook-form';
import type { BoxProps } from '@chakra-ui/react';
import type { EditorProps } from '@fastgpt/web/components/common/Textarea/PromptEditor/Editor';
type CommonRenderProps = {
placeholder?: string;
@@ -18,14 +19,16 @@ type CommonRenderProps = {
} & Omit<BoxProps, 'onChange' | 'list' | 'value'>;
type SpecificProps =
| {
| ({
// input & textarea
inputType: InputTypeEnum.input | InputTypeEnum.textarea;
variables?: EditorVariablePickerType[];
variableLabels?: EditorVariableLabelPickerType[];
title?: string;
maxLength?: number;
}
} & {
ExtensionPopover?: EditorProps['ExtensionPopover'];
})
| {
// numberInput
inputType: InputTypeEnum.numberInput;

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo, useTransition } from 'react';
import React, { useCallback, useEffect, useMemo, useTransition } from 'react';
import {
Box,
Flex,
@@ -31,6 +31,8 @@ import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip';
import { getWebLLMModel } from '@/web/common/system/utils';
import ToolSelect from './components/ToolSelect';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import OptimizerPopover from '@/components/common/PromptEditor/OptimizerPopover';
const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal'));
const DatasetParamsModal = dynamic(() => import('@/components/core/app/DatasetParamsModal'));
@@ -69,6 +71,7 @@ const EditForm = ({
const { appDetail } = useContextSelector(AppContext, (v) => v);
const selectDatasets = useMemo(() => appForm?.dataset?.datasets, [appForm]);
const [, startTst] = useTransition();
const { llmModelList, defaultModels } = useSystemStore();
const {
isOpen: isOpenDatasetSelect,
@@ -126,6 +129,27 @@ const EditForm = ({
}
}, [selectedModel, setAppForm]);
const OptimizerPopverComponent = useCallback(
({ iconButtonStyle }: { iconButtonStyle: Record<string, any> }) => {
return (
<OptimizerPopover
iconButtonStyle={iconButtonStyle}
defaultPrompt={appForm.aiSettings.systemPrompt}
onChangeText={(e) => {
setAppForm((state) => ({
...state,
aiSettings: {
...state.aiSettings,
systemPrompt: e
}
}));
}}
/>
);
},
[appForm.aiSettings.systemPrompt, setAppForm]
);
return (
<>
<Box>
@@ -196,6 +220,7 @@ const EditForm = ({
variables={formatVariables}
placeholder={t('common:core.app.tip.systemPromptTip')}
title={t('common:core.ai.Prompt')}
ExtensionPopover={[OptimizerPopverComponent]}
/>
</Box>
</Box>

View File

@@ -13,6 +13,8 @@ import { getEditorVariables } from '@/pageComponents/app/detail/WorkflowComponen
import { InputTypeEnum } from '@/components/core/app/formRender/constant';
import { llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { getWebDefaultLLMModel } from '@/web/common/system/utils';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import OptimizerPopover from '@/components/common/PromptEditor/OptimizerPopover';
const CommonInputForm = ({ item, nodeId }: RenderInputProps) => {
const { t } = useTranslation();
@@ -80,6 +82,22 @@ const CommonInputForm = ({ item, nodeId }: RenderInputProps) => {
return item.value;
}, [inputType, item.value, defaultModel, handleChange]);
const canOptimizePrompt = item.key === NodeInputKeyEnum.aiSystemPrompt;
const OptimizerPopverComponent = useCallback(
({ iconButtonStyle }: { iconButtonStyle: Record<string, any> }) => {
return (
<OptimizerPopover
iconButtonStyle={iconButtonStyle}
defaultPrompt={item.value}
onChangeText={(e) => {
handleChange(e);
}}
/>
);
},
[item.value, handleChange]
);
return (
<InputRender
inputType={inputType}
@@ -93,6 +111,7 @@ const CommonInputForm = ({ item, nodeId }: RenderInputProps) => {
max={item.max}
list={item.list}
modelList={modelList}
ExtensionPopover={canOptimizePrompt ? [OptimizerPopverComponent] : undefined}
/>
);
};

View File

@@ -0,0 +1,167 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { MongoAppChatLog } from '@fastgpt/service/core/app/logs/chatLogsSchema';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { addLog } from '@fastgpt/service/common/system/log';
import type { ChatSchemaType } from '@fastgpt/global/core/chat/type';
export type SyncAppChatLogQuery = {};
export type SyncAppChatLogBody = {
batchSize?: number;
};
export type SyncAppChatLogResponse = {};
/*
将 chats 表全部扫一遍,来获取统计数据
*/
async function handler(
req: ApiRequestProps<SyncAppChatLogBody, SyncAppChatLogQuery>,
res: ApiResponseType<SyncAppChatLogResponse>
) {
await authCert({ req, authRoot: true });
const { batchSize = 10 } = req.body;
console.log('开始同步AppChatLog数据...');
console.log(`批处理大小: ${batchSize}`);
let success = 0;
const total = await MongoChat.countDocuments({});
console.log(`总共需要处理的chat记录数: ${total}`);
res.json({
data: '同步任务已开始,可在日志中看到进度'
});
while (true) {
console.log(`对话同步处理进度: ${success}/${total}`);
try {
const chats = await MongoChat.find({
initStatistics: { $exists: false }
})
.sort({ _id: -1 })
.limit(batchSize)
.lean();
if (chats.length === 0) break;
const result = await Promise.allSettled(chats.map((chat) => processChatRecord(chat)));
success += result.filter((r) => r.status === 'fulfilled').length;
} catch (error) {
addLog.error('处理chat记录失败', error);
}
}
console.log('同步对话完成');
}
async function processChatRecord(chat: ChatSchemaType) {
async function calculateChatItemStats(chatId: string) {
const chatItems = await MongoChatItem.find({ chatId }).lean();
let chatItemCount = chatItems.length;
let errorCount = 0;
let totalPoints = 0;
let goodFeedbackCount = 0;
let badFeedbackCount = 0;
let totalResponseTime = 0;
for (const item of chatItems) {
const itemData = item as any;
if (itemData.userGoodFeedback && itemData.userGoodFeedback.trim() !== '') {
goodFeedbackCount++;
}
if (itemData.userBadFeedback && itemData.userBadFeedback.trim() !== '') {
badFeedbackCount++;
}
if (itemData.durationSeconds) {
totalResponseTime += itemData.durationSeconds;
} else if (
itemData[DispatchNodeResponseKeyEnum.nodeResponse] &&
Array.isArray(itemData[DispatchNodeResponseKeyEnum.nodeResponse])
) {
for (const response of itemData[DispatchNodeResponseKeyEnum.nodeResponse]) {
if (response.runningTime) {
totalResponseTime += response.runningTime / 1000;
}
}
}
if (
itemData[DispatchNodeResponseKeyEnum.nodeResponse] &&
Array.isArray(itemData[DispatchNodeResponseKeyEnum.nodeResponse])
) {
for (const response of itemData[DispatchNodeResponseKeyEnum.nodeResponse]) {
if (response.errorText) {
errorCount++;
break;
}
if (response.totalPoints) {
totalPoints += response.totalPoints;
}
}
}
}
return {
chatItemCount,
errorCount,
totalPoints,
goodFeedbackCount,
badFeedbackCount,
totalResponseTime
};
}
async function checkIsFirstChat(chat: any): Promise<boolean> {
const earliestChat = await MongoChat.findOne(
{
userId: chat.userId,
appId: chat.appId
},
{},
{ sort: { createTime: 1 } }
).lean();
return earliestChat?._id.toString() === chat._id.toString();
}
const chatItemStats = await calculateChatItemStats(chat.chatId);
const isFirstChat = await checkIsFirstChat(chat);
const chatLogData = {
appId: chat.appId,
teamId: chat.teamId,
chatId: chat.chatId,
userId: String(chat.outLinkUid || chat.tmbId),
source: chat.source,
sourceName: chat.sourceName,
createTime: chat.createTime,
updateTime: chat.updateTime,
chatItemCount: chatItemStats.chatItemCount,
errorCount: chatItemStats.errorCount,
totalPoints: chatItemStats.totalPoints,
goodFeedbackCount: chatItemStats.goodFeedbackCount,
badFeedbackCount: chatItemStats.badFeedbackCount,
totalResponseTime: chatItemStats.totalResponseTime,
isFirstChat
};
await MongoAppChatLog.updateOne(
{ appId: chat.appId, chatId: chat.chatId },
{ $set: chatLogData },
{ upsert: true }
);
await MongoChat.updateOne({ _id: chat._id }, { $set: { initStatistics: true } });
}
export default NextAPI(handler);

View File

@@ -0,0 +1,223 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { responseWrite } from '@fastgpt/service/common/response';
import { sseErrRes } from '@fastgpt/service/common/response';
import { createChatCompletion } from '@fastgpt/service/core/ai/config';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { loadRequestMessages } from '@fastgpt/service/core/chat/utils';
import { llmCompletionsBodyFormat, parseLLMStreamResponse } from '@fastgpt/service/core/ai/utils';
import { countGptMessagesTokens } from '@fastgpt/service/common/string/tiktoken/index';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { createUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { addLog } from '@fastgpt/service/common/system/log';
type OptimizePromptBody = {
originalPrompt: string;
optimizerInput: string;
model: string;
};
const getPromptOptimizerSystemPrompt = () => {
return `# Role
Prompt工程师
## Skills
- 了解LLM的技术原理和局限性包括它的训练数据、构建方式等以便更好地设计Prompt
- 具有丰富的自然语言处理经验能够设计出符合语法、语义的高质量Prompt
- 迭代优化能力强能通过不断调整和测试Prompt的表现持续改进Prompt质量
- 能结合具体业务需求设计Prompt使LLM生成的内容符合业务要求
- 擅长分析用户需求设计结构清晰、逻辑严谨的Prompt框架
## Goals
- 分析用户的Prompt理解其核心需求和意图
- 设计一个结构清晰、符合逻辑的Prompt框架
- 生成高质量的结构化Prompt
- 提供针对性的优化建议
## Constrains
- 确保所有内容符合各个学科的最佳实践
- 在任何情况下都不要跳出角色
- 不要胡说八道和编造事实
- 保持专业性和准确性
- 输出必须包含优化建议部分
## Suggestions
- 深入分析用户原始Prompt的核心意图避免表面理解
- 采用结构化思维,确保各个部分逻辑清晰且相互呼应
- 优先考虑实用性生成的Prompt应该能够直接使用
- 注重细节完善,每个部分都要有具体且有价值的内容
- 保持专业水准确保输出的Prompt符合行业最佳实践
- **特别注意**Suggestions部分应该专注于角色内在的工作方法而不是与用户互动的策略`;
};
const getPromptOptimizerUserPrompt = (originalPrompt: string, optimizerInput: string) => {
return `请严格遵循用户的优化需求:
<OptimizerInput>
${optimizerInput}
</OptimizerInput>
分析并优化以下Prompt将其转化为结构化的高质量Prompt
<OriginalPrompt>
${originalPrompt}
</OriginalPrompt>
## 注意事项:
- 直接输出优化后的Prompt不要添加解释性文字不要用代码块包围
- 每个部分都要有具体内容,不要使用占位符
- **数量要求**Skills、Goals、Constrains、Workflow、Suggestions各部分需要5个要点OutputFormat需要3个要点
- **Suggestions是给角色的内在工作方法论**,专注于角色自身的技能提升和工作优化方法,避免涉及与用户互动的建议
- **必须包含完整结构**确保包含Role、Background、Attention、Profile、Skills、Goals、Constrains、Workflow、OutputFormat、Suggestions、Initialization等所有部分
- 保持内容的逻辑性和连贯性,各部分之间要相互呼应`;
};
async function handler(req: ApiRequestProps<OptimizePromptBody>, res: ApiResponseType) {
try {
const { originalPrompt, optimizerInput, model } = req.body;
const { teamId, tmbId } = await authCert({
req,
authToken: true,
authApiKey: true
});
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
const messages: ChatCompletionMessageParam[] = [
{
role: 'system',
content: getPromptOptimizerSystemPrompt()
},
{
role: 'user',
content: getPromptOptimizerUserPrompt(originalPrompt, optimizerInput)
}
];
const requestMessages = await loadRequestMessages({
messages,
useVision: false
});
const { response, isStreamResponse } = await createChatCompletion({
body: llmCompletionsBodyFormat(
{
model,
messages: requestMessages,
temperature: 0.1,
max_tokens: 2000,
stream: true
},
model
)
});
const { inputTokens, outputTokens } = await (async () => {
if (isStreamResponse) {
const { parsePart, getResponseData } = parseLLMStreamResponse();
let optimizedText = '';
for await (const part of response) {
const { responseContent } = parsePart({
part,
parseThinkTag: true,
retainDatasetCite: false
});
if (responseContent) {
optimizedText += responseContent;
responseWrite({
res,
event: SseResponseEventEnum.answer,
data: JSON.stringify({
choices: [
{
delta: {
content: responseContent
}
}
]
})
});
}
}
const { content: answer, usage } = getResponseData();
return {
content: answer,
inputTokens: usage?.prompt_tokens || (await countGptMessagesTokens(requestMessages)),
outputTokens:
usage?.completion_tokens ||
(await countGptMessagesTokens([{ role: 'assistant', content: optimizedText }]))
};
} else {
const usage = response.usage;
const content = response.choices?.[0]?.message?.content || '';
responseWrite({
res,
event: SseResponseEventEnum.answer,
data: JSON.stringify({
choices: [
{
delta: {
content
}
}
]
})
});
return {
content,
inputTokens: usage?.prompt_tokens || (await countGptMessagesTokens(requestMessages)),
outputTokens:
usage?.completion_tokens ||
(await countGptMessagesTokens([{ role: 'assistant', content: content }]))
};
}
})();
responseWrite({
res,
event: SseResponseEventEnum.answer,
data: '[DONE]'
});
const { totalPoints, modelName } = formatModelChars2Points({
model,
inputTokens,
outputTokens,
modelType: ModelTypeEnum.llm
});
createUsage({
teamId,
tmbId,
appName: i18nT('common:support.wallet.usage.Optimize Prompt'),
totalPoints,
source: UsageSourceEnum.optimize_prompt,
list: [
{
moduleName: i18nT('common:support.wallet.usage.Optimize Prompt'),
amount: totalPoints,
model: modelName,
inputTokens,
outputTokens
}
]
});
} catch (error: any) {
addLog.error('Optimize prompt error', error);
sseErrRes(res, error);
}
res.end();
}
export default NextAPI(handler);

View File

@@ -1,4 +1,4 @@
import { type ChatHistoryItemResType, type ChatSchema } from '@fastgpt/global/core/chat/type';
import { type ChatHistoryItemResType, type ChatSchemaType } from '@fastgpt/global/core/chat/type';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { type AuthModeType } from '@fastgpt/service/support/permission/type';
import { authOutLink } from './outLink';
@@ -51,7 +51,7 @@ export async function authChatCrud({
teamId: string;
tmbId: string;
uid: string;
chat?: ChatSchema;
chat?: ChatSchemaType;
responseDetail: boolean;
showNodeStatus: boolean;
showRawSource: boolean;

View File

@@ -10,7 +10,6 @@ import {
} from '@fastgpt/global/core/workflow/type/io.d';
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
import type { FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node';
import type { ChatSchema } from '@fastgpt/global/core/chat/type';
import type { AppSchema } from '@fastgpt/global/core/app/type';
import { ChatModelType } from '@/constants/model';
import { TeamMemberStatusEnum } from '@fastgpt/global/support/user/team/constant';

View File

@@ -10,6 +10,7 @@ import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import { useSystemStore } from '../system/useSystemStore';
import { formatTime2YMDHMW } from '@fastgpt/global/common/string/time';
import { getWebReqUrl } from '@fastgpt/web/common/system/utils';
import type { OnOptimizePromptProps } from '@/components/common/PromptEditor/OptimizerPopover';
type StreamFetchProps = {
url?: string;
@@ -272,3 +273,27 @@ export const streamFetch = ({
failedFinish(err);
}
});
export const onOptimizePrompt = async ({
originalPrompt,
model,
input,
onResult,
abortController
}: OnOptimizePromptProps) => {
const controller = abortController || new AbortController();
await streamFetch({
url: '/api/core/ai/optimizePrompt',
data: {
originalPrompt,
optimizerInput: input,
model
},
onMessage: ({ event, text }) => {
if (event === SseResponseEventEnum.answer && text) {
onResult(text);
}
},
abortCtrl: controller
});
};

View File

@@ -103,7 +103,7 @@ export const useSystemStore = create<State>()(
return null;
},
gitStar: 20000,
gitStar: 25000,
async loadGitStar() {
if (!get().feConfigs?.show_git) return;
try {