This commit is contained in:
Archer
2023-10-07 18:02:20 +08:00
committed by GitHub
parent c65a36d3ab
commit 98ce5103a0
56 changed files with 868 additions and 282 deletions

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.0 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 1000
weight: 850
---
如果您是**从旧版本升级到 V4**,由于新版 MongoDB 表变更比较大,需要按照本文档的说明执行一些初始化脚本。

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.1 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 999
weight: 849
---
如果您是**从旧版本升级到 V4.1**,由于新版重新设置了对话存储结构,需要初始化原来的存储内容。

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.2 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 998
weight: 848
---
99.9%用户不影响,升级 4.2 主要是修改了配置文件中 QAModel 的格式。从原先的数组改成对象:

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.2.1 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 997
weight: 847
---
私有部署,如果添加了配置文件,需要在配置文件中修改 `VectorModels` 字段。增加 defaultToken 和 maxToken分别对应直接分段时的默认 token 数量和该模型支持的 token 上限(通常不建议超过 3000

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.3 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 996
weight: 846
---
## 执行初始化 API

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.4 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 995
weight: 845
---
## 执行初始化 API

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.4.1 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 994
weight: 844
---
## 执行初始化 API

View File

@@ -4,7 +4,7 @@ description: 'FastGPT 从旧版本升级到 V4.4.2 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 993
weight: 843
---
## 执行初始化 API

View File

@@ -4,7 +4,7 @@ description: 'FastGPT V4.4.5 更新(需执行升级脚本)'
icon: 'upgrade'
draft: false
toc: true
weight: 992
weight: 842
---
## 执行初始化 API

View File

@@ -0,0 +1,14 @@
---
title: 'V4.4.6'
description: 'FastGPT V4.4.6 更新'
icon: 'upgrade'
draft: false
toc: true
weight: 841
---
## 功能介绍
1. 高级编排新增模块 - 应用调用,可调用其他应用。
2. 新增 - 必要连接校验
3. 修复 - 下一步指引在免登录中身份问题。

View File

@@ -0,0 +1,57 @@
import { ChatCompletionRequestMessage } from '../type';
import { getAIChatApi } from '../config';
export const Prompt_QuestionGuide = `我不太清楚问你什么问题,请帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
export async function createQuestionGuide({
messages,
model
}: {
messages: ChatCompletionRequestMessage[];
model: string;
}) {
const chatAPI = getAIChatApi();
const { data } = await chatAPI.createChatCompletion({
model: model,
temperature: 0,
max_tokens: 200,
messages: [
...messages,
{
role: 'user',
content: Prompt_QuestionGuide
}
],
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
const start = answer.indexOf('[');
const end = answer.lastIndexOf(']');
if (start === -1 || end === -1) {
return {
result: [],
tokens: totalTokens
};
}
const jsonStr = answer
.substring(start, end + 1)
.replace(/(\\n|\\)/g, '')
.replace(/ /g, '');
try {
return {
result: JSON.parse(jsonStr),
tokens: totalTokens
};
} catch (error) {
return {
result: [],
tokens: totalTokens
};
}
}

2
pnpm-lock.yaml generated
View File

@@ -1,4 +1,4 @@
lockfileVersion: '6.0'
lockfileVersion: '6.1'
settings:
autoInstallPeers: true

View File

@@ -1,6 +1,6 @@
{
"name": "app",
"version": "4.4.5",
"version": "4.4.6",
"private": false,
"scripts": {
"dev": "next dev",

View File

@@ -1,10 +1,9 @@
### Fast GPT V4.4.5
### Fast GPT V4.4.6
1. 新增 - 下一步指引选项,可以通过模型生成 3 个预测问题。
2. 新增 - 分享链接 hook 身份校验
3. 新增 - Api Key 使用。增加别名、额度限制和过期时间。自带 appId无需额外连接
4. 去除 - 限定词。目前旧应用仍生效9/25 后全面去除,请及时替换
5. 新增 - 引用模板/引用提示词设置,可以 DIY 引用内容的格式,从而更好的适配场景。[参考文档](https://doc.fastgpt.run/docs/use-cases/prompt/)
6. [使用文档](https://doc.fastgpt.run/docs/intro/)
7. [点击查看高级编排介绍文档](https://doc.fastgpt.run/docs/workflow)
8. [点击查看商业版](https://doc.fastgpt.run/docs/commercial/)
1. 高级编排新增模块 - 应用调用
2. 新增 - 必要连接校验
3. 新增 - 下一步指引选项,可以通过模型生成 3 个预测问题
4. 新增 - 分享链接 hook 身份校验
5. [使用文档](https://doc.fastgpt.run/docs/intro/)
6. [点击查看高级编排介绍文档](https://doc.fastgpt.run/docs/workflow)
7. [点击查看商业版](https://doc.fastgpt.run/docs/commercial/)

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 KiB

View File

@@ -96,6 +96,7 @@
"module question": "Question",
"module quoteList": "Quotes",
"module runningTime": "Time",
"module search response": "Search Result",
"module similarity": "Similarity",
"module temperature": "Temperature",
"module time": "Running Time",

View File

@@ -96,6 +96,7 @@
"module question": "问题",
"module quoteList": "引用内容",
"module runningTime": "运行时长",
"module search response": "搜索结果",
"module similarity": "相似度",
"module temperature": "温度",
"module time": "运行时长",

View File

@@ -1,5 +1,6 @@
import { ChatCompletionRequestMessage } from '@fastgpt/core/aiApi/type';
import { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
export type CreateQuestionGuideProps = {
messages: ChatCompletionRequestMessage[];
shareId?: string;
};

View File

@@ -12,15 +12,7 @@ const QuoteModal = dynamic(() => import('./QuoteModal'), { ssr: false });
const ContextModal = dynamic(() => import('./ContextModal'), { ssr: false });
const WholeResponseModal = dynamic(() => import('./WholeResponseModal'), { ssr: false });
const ResponseTags = ({
chatId,
contentId,
responseData = []
}: {
chatId?: string;
contentId?: string;
responseData?: ChatHistoryItemResType[];
}) => {
const ResponseTags = ({ responseData = [] }: { responseData?: ChatHistoryItemResType[] }) => {
const { isPc } = useGlobalStore();
const { t } = useTranslation();
const [quoteModalData, setQuoteModalData] = useState<QuoteItemType[]>();
@@ -41,9 +33,12 @@ const ResponseTags = ({
return {
chatAccount: responseData.filter((item) => item.moduleType === FlowModuleTypeEnum.chatNode)
.length,
quoteList: chatData?.quoteList,
quoteList: responseData
.filter((item) => item.moduleType === FlowModuleTypeEnum.chatNode)
.map((item) => item.quoteList)
.flat(),
historyPreview: chatData?.historyPreview,
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
runningTime: +responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
};
}, [responseData]);
@@ -56,20 +51,20 @@ const ResponseTags = ({
return responseData.length === 0 ? null : (
<Flex alignItems={'center'} mt={2} flexWrap={'wrap'}>
{quoteList.length > 0 && (
<MyTooltip label="查看引用">
<Tag
colorSchema="blue"
cursor={'pointer'}
{...TagStyles}
onClick={() => setQuoteModalData(quoteList)}
>
{quoteList.length}
</Tag>
</MyTooltip>
)}
{chatAccount === 1 && (
<>
{quoteList.length > 0 && (
<MyTooltip label="查看引用">
<Tag
colorSchema="blue"
cursor={'pointer'}
{...TagStyles}
onClick={() => setQuoteModalData(quoteList)}
>
{quoteList.length}
</Tag>
</MyTooltip>
)}
{historyPreview.length > 0 && (
<MyTooltip label={'点击查看完整对话记录'}>
<Tag
@@ -120,4 +115,4 @@ const ResponseTags = ({
);
};
export default ResponseTags;
export default React.memo(ResponseTags);

View File

@@ -34,7 +34,7 @@ import { feConfigs } from '@/store/static';
import { event } from '@/utils/plugin/eventbus';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { useMarkdown } from '@/hooks/useMarkdown';
import { AppModuleItemType, VariableItemType } from '@/types/app';
import { AppModuleItemType } from '@/types/app';
import { VariableInputEnum } from '@/constants/app';
import { useForm } from 'react-hook-form';
import type { MessageItemType } from '@/types/core/chat/type';
@@ -81,7 +81,7 @@ export type StartChatFnProps = {
export type ComponentRef = {
getChatHistory: () => ChatSiteItemType[];
resetVariables: (data?: Record<string, any>) => void;
resetHistory: (chatId: ChatSiteItemType[]) => void;
resetHistory: (history: ChatSiteItemType[]) => void;
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;
};
@@ -96,7 +96,6 @@ type Props = {
showMarkIcon?: boolean; // admin mark dataset
showVoiceIcon?: boolean;
showEmptyIntro?: boolean;
chatId?: string;
appAvatar?: string;
userAvatar?: string;
userGuideModule?: AppModuleItemType;
@@ -116,7 +115,6 @@ const ChatBox = (
showMarkIcon = false,
showVoiceIcon = true,
showEmptyIntro = false,
chatId,
appAvatar,
userAvatar,
userGuideModule,
@@ -265,7 +263,8 @@ const ChatBox = (
const result = await postQuestionGuide(
{
messages: adaptChat2GptMessages({ messages: history, reserveId: false }).slice(-6)
messages: adaptChat2GptMessages({ messages: history, reserveId: false }).slice(-6),
shareId: router.query.shareId as string
},
abortSignal
);
@@ -277,7 +276,7 @@ const ChatBox = (
}
} catch (error) {}
},
[questionGuide, scrollToBottom]
[questionGuide, scrollToBottom, router.query.shareId]
);
/**
@@ -743,11 +742,7 @@ const ChatBox = (
source={item.value}
isChatting={index === chatHistory.length - 1 && isChatting}
/>
<ResponseTags
chatId={chatId}
contentId={item.dataId}
responseData={item.responseData}
/>
<ResponseTags responseData={item.responseData} />
{/* question guide */}
{index === chatHistory.length - 1 &&
!isChatting &&

View File

@@ -20,19 +20,17 @@ function MyLink(e: any) {
{text}
</Link>
) : (
<Box as={'ul'} mt={'0 !important'}>
<Box as={'li'} mb={1}>
<Box
as={'span'}
color={'blue.600'}
textDecoration={'underline'}
cursor={'pointer'}
onClick={() => {
event.emit('guideClick', { text });
}}
>
{text}
</Box>
<Box as={'li'} mb={1}>
<Box
as={'span'}
color={'blue.600'}
textDecoration={'underline'}
cursor={'pointer'}
onClick={() => {
event.emit('guideClick', { text });
}}
>
{text}
</Box>
</Box>
);
@@ -40,9 +38,10 @@ function MyLink(e: any) {
const Guide = ({ text }: { text: string }) => {
const formatText = useMemo(
() => text.replace(/\[(.*?)\]($|\n)/g, '[$1]()\n').replace(/\\n/g, '\n&nbsp;'),
() => text.replace(/\[(.*?)\]($|\n)/g, '[$1]()').replace(/\\n/g, '\n&nbsp;'),
[text]
);
return (
<ReactMarkdown
className={`markdown ${styles.markdown}`}

View File

@@ -15,7 +15,8 @@ export enum ChatRoleEnum {
export enum TaskResponseKeyEnum {
'answerText' = 'answerText', // answer module text key
'responseData' = 'responseData'
'responseData' = 'responseData',
'history' = 'history'
}
export const ChatRoleMap = {

View File

@@ -21,7 +21,7 @@ export const ChatModelSystemTip =
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}';
export const ChatModelLimitTip =
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。不建议内容太长,会影响上下文,可使用变量,例如 {{language}}。可在文档中找到对应的限定例子';
export const userGuideTip = '可以添加特殊的对话前后引导模块,更好的让用户进行对话';
export const userGuideTip = '可以在对话前设置引导语,设置全局变量,设置下一步指引';
export const welcomeTextTip =
'每次对话开始前,发送一个初始内容。支持标准 Markdown 语法,可使用的额外标记:\n[快捷按键]: 用户点击后可以直接发送该问题';
export const variableTip =
@@ -203,6 +203,14 @@ export const ChatModule: FlowModuleTemplateType = {
Input_Template_UserChatInput
],
outputs: [
{
key: TaskResponseKeyEnum.history,
label: '新的上下文',
description: '将本次回复内容拼接上历史记录,作为新的上下文返回',
valueType: FlowValueTypeEnum.chatHistory,
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
@@ -483,6 +491,51 @@ export const EmptyModule: FlowModuleTemplateType = {
inputs: [],
outputs: []
};
export const AppModule: FlowModuleTemplateType = {
flowType: FlowModuleTypeEnum.app,
logo: '/imgs/module/app.png',
name: '应用调用(测试版)',
intro: '可以选择一个其他应用进行调用',
description: '可以选择一个其他应用进行调用',
showStatus: true,
inputs: [
Input_Template_TFSwitch,
{
key: 'app',
type: FlowInputItemTypeEnum.selectApp,
label: '选择一个应用',
description: '选择一个其他应用进行调用',
required: true
},
Input_Template_History,
Input_Template_UserChatInput
],
outputs: [
{
key: TaskResponseKeyEnum.history,
label: '新的上下文',
description: '将该应用回复内容拼接到历史记录中,作为新的上下文返回',
valueType: FlowValueTypeEnum.chatHistory,
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
description: '将在应用完全结束后触发',
valueType: FlowValueTypeEnum.string,
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
key: 'finish',
label: '请求结束',
valueType: FlowValueTypeEnum.boolean,
type: FlowOutputItemTypeEnum.source,
targets: []
}
]
};
export const ModuleTemplates = [
{
@@ -498,11 +551,11 @@ export const ModuleTemplates = [
list: [ChatModule, AnswerModule]
},
{
label: '知识库模块',
list: [KBSearchModule]
label: '核心调用',
list: [KBSearchModule, AppModule]
},
{
label: 'Agent',
label: '函数模块',
list: [ClassifyQuestionModule, ContextExtractModule, HttpModule]
}
];
@@ -517,7 +570,8 @@ export const ModuleTemplatesFlat = [
ClassifyQuestionModule,
ContextExtractModule,
HttpModule,
EmptyModule
EmptyModule,
AppModule
];
// template
@@ -528,6 +582,25 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
name: '简单的对话',
intro: '一个极其简单的 AI 对话应用',
modules: [
{
moduleId: 'userGuide',
name: '用户引导',
flowType: 'userGuide',
position: {
x: 454.98510354678695,
y: 721.4016845336229
},
inputs: [
{
key: 'welcomeText',
type: 'input',
label: '开场白',
value: '',
connected: true
}
],
outputs: []
},
{
moduleId: 'userChatInput',
name: '用户问题(对话入口)',
@@ -1382,10 +1455,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
moduleId: 'remuj3',
key: 'userChatInput'
},
{
moduleId: 'nlfwkc',
key: 'userChatInput'
},
{
moduleId: 'fljhzy',
key: 'userChatInput'
@@ -1399,8 +1468,8 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
name: '聊天记录',
flowType: 'historyNode',
position: {
x: 194.99102398958047,
y: 1801.3545999721096
x: 1770.497690708367,
y: 1820.2355054321215
},
inputs: [
{
@@ -1444,16 +1513,23 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
y: 1077.557793775116
},
inputs: [
{
key: 'switch',
type: 'target',
label: '触发器',
valueType: 'any',
connected: false
},
{
key: 'systemPrompt',
type: 'textarea',
valueType: 'string',
value:
'laf 是云开发平台,可以快速的开发应用\nlaf 是一个开源的 BaaS 开发平台Backend as a Service)\nlaf 是一个开箱即用的 serverless 开发平台\nlaf 是一个集「函数计算」、「数据库」、「对象存储」等于一身的一站式开发平台\nlaf 可以是开源版的腾讯云开发、开源版的 Google Firebase、开源版的 UniCloud',
label: '系统提示词',
description:
'你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统',
value:
'laf 是云开发平台,可以快速的开发应用\nlaf 是一个开源的 BaaS 开发平台Backend as a Service)\nlaf 是一个开箱即用的 serverless 开发平台\nlaf 是一个集「函数计算」、「数据库」、「对象存储」等于一身的一站式开发平台\nlaf 可以是开源版的腾讯云开发、开源版的 Google Firebase、开源版的 UniCloud',
connected: true
},
{
@@ -1561,16 +1637,25 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'text',
value: '你好,我是 laf 助手,有什么可以帮助你的?',
type: 'textarea',
valueType: 'string',
value: '你好,我是 laf 助手,有什么可以帮助你的?',
label: '回复的内容',
description:
'可以使用 \\n 来实现换行。可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
'可以使用 \\n 来实现连续换行。\n\n可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
connected: true
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: 'boolean',
type: 'source',
targets: []
}
]
},
{
moduleId: 'iejcou',
@@ -1590,16 +1675,25 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'text',
value: '你好,我仅能回答 laf 相关问题,请问你有什么问题么?',
type: 'textarea',
valueType: 'string',
value: '你好,我仅能回答 laf 相关问题,请问你有什么问题么?',
label: '回复的内容',
description:
'可以使用 \\n 来实现换行。可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
'可以使用 \\n 来实现连续换行。\n\n可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
connected: true
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: 'boolean',
type: 'source',
targets: []
}
]
},
{
moduleId: 'nlfwkc',
@@ -1607,7 +1701,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
flowType: 'chatNode',
showStatus: true,
position: {
x: 1821.979893659983,
x: 2260.436476009152,
y: 1104.6583548423682
},
inputs: [
@@ -1616,7 +1710,48 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
type: 'custom',
label: '对话模型',
value: 'gpt-3.5-turbo-16k',
list: [],
list: [
{
label: 'FastAI-4k',
value: 'gpt-3.5-turbo'
},
{
label: 'FastAI-instruct',
value: 'gpt-3.5-turbo-instruct'
},
{
label: 'FastAI-16k',
value: 'gpt-3.5-turbo-16k'
},
{
label: 'FastAI-Plus-8k',
value: 'gpt-4'
},
{
label: 'FastAI-Plus-32k',
value: 'gpt-4-32k'
},
{
label: '百川2-13B(测试)',
value: 'baichuan2-13b'
},
{
label: '文心一言(QPS 5)',
value: 'ERNIE-Bot'
},
{
label: '星火2.0(QPS 2)',
value: 'SparkDesk'
},
{
label: 'chatglm_pro(QPS 5)',
value: 'chatglm_pro'
},
{
label: '通义千问(QPS 5)',
value: 'qwen-v1'
}
],
connected: true
},
{
@@ -1663,6 +1798,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
key: 'systemPrompt',
type: 'textarea',
label: '系统提示词',
max: 300,
valueType: 'string',
description:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
@@ -1671,6 +1807,22 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
value: '知识库是关于 laf 的内容。',
connected: true
},
{
key: 'quoteTemplate',
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
key: 'quotePrompt',
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
{
key: 'switch',
type: 'target',
@@ -1680,8 +1832,9 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'quoteQA',
type: 'target',
type: 'custom',
label: '引用内容',
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
valueType: 'kb_quote',
connected: true
},
@@ -1705,8 +1858,9 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
{
key: 'answerText',
label: '模型回复',
description: '直接响应,无需配置',
type: 'hidden',
description: '将在 stream 回复完毕后触发',
valueType: 'string',
type: 'source',
targets: []
},
{
@@ -1716,6 +1870,14 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
valueType: 'boolean',
type: 'source',
targets: []
},
{
key: 'history',
label: '新的上下文',
description: '将本次回复内容拼接上历史记录,作为新的上下文返回',
valueType: 'chat_history',
type: 'source',
targets: []
}
]
},
@@ -1725,7 +1887,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
flowType: 'historyNode',
position: {
x: 193.3803955457983,
y: 1116.251200765746
y: 1316.251200765746
},
inputs: [
{
@@ -1770,11 +1932,11 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
inputs: [
{
key: 'kbList',
type: 'custom',
label: '关联的知识库',
list: [],
key: 'kbList',
value: [],
list: [],
connected: true
},
{
@@ -1886,11 +2048,24 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
inputs: [
{
key: 'welcomeText',
type: 'input',
type: 'hidden',
label: '开场白',
value:
'你好,我是 laf 助手,有什么可以帮助你的?\n[laf 是什么?有什么用?]\n[laf 在线体验地址]\n[官网地址是多少]',
connected: true
},
{
key: 'variables',
type: 'hidden',
label: '对话框变量',
value: [],
connected: true
},
{
key: 'questionGuide',
type: 'switch',
label: '问题引导',
connected: true
}
],
outputs: []
@@ -1900,8 +2075,8 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
name: '指定回复',
flowType: 'answerNode',
position: {
x: 1828.4596416688908,
y: 765.3628156185887
x: 2262.720467249169,
y: 750.6776669274682
},
inputs: [
{
@@ -1913,16 +2088,25 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'text',
value: '对不起,我找不到你的问题,请更加详细的描述你的问题。',
type: 'textarea',
valueType: 'string',
value: '对不起,我找不到你的问题,请更加详细的描述你的问题。',
label: '回复的内容',
description:
'可以使用 \\n 来实现换行。可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
'可以使用 \\n 来实现连续换行。\n\n可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
connected: true
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: 'boolean',
type: 'source',
targets: []
}
]
},
{
moduleId: '5v78ap',
@@ -1942,16 +2126,56 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'text',
value: '这是一个商务问题',
type: 'textarea',
valueType: 'string',
value: '这是一个商务问题',
label: '回复的内容',
description:
'可以使用 \\n 来实现换行。可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
'可以使用 \\n 来实现连续换行。\n\n可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容',
connected: true
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: 'boolean',
type: 'source',
targets: []
}
]
},
{
moduleId: '9act94',
name: '用户问题(对话入口)',
flowType: 'questionInput',
position: {
x: 1827.2213090948171,
y: 2132.138812501788
},
inputs: [
{
key: 'userChatInput',
type: 'systemInput',
label: '用户问题',
connected: true
}
],
outputs: [
{
key: 'userChatInput',
label: '用户问题',
type: 'source',
valueType: 'string',
targets: [
{
moduleId: 'nlfwkc',
key: 'userChatInput'
}
]
}
]
}
]
}

View File

@@ -11,7 +11,8 @@ export enum FlowInputItemTypeEnum {
target = 'target',
none = 'none',
switch = 'switch',
hidden = 'hidden'
hidden = 'hidden',
selectApp = 'selectApp'
}
export enum FlowOutputItemTypeEnum {
@@ -33,7 +34,8 @@ export enum FlowModuleTypeEnum {
answerNode = 'answerNode',
classifyQuestion = 'classifyQuestion',
contentExtract = 'contentExtract',
httpRequest = 'httpRequest'
httpRequest = 'httpRequest',
app = 'app'
}
export enum SpecialInputKeyEnum {

View File

@@ -3,71 +3,39 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { CreateQuestionGuideProps } from '@/api/core/ai/agent/type';
import { getAIChatApi } from '@fastgpt/core/aiApi/config';
import { Prompt_QuestionGuide } from '@/prompts/core/agent';
import { pushQuestionGuideBill } from '@/service/common/bill/push';
import { defaultQGModel } from '@/pages/api/system/getInitData';
import { createQuestionGuide } from '@fastgpt/core/ai/functions/createQuestionGuide';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { messages } = req.body as CreateQuestionGuideProps;
const { user } = await authUser({ req, authToken: true, authApiKey: true, authBalance: true });
const { user } = await authUser({
req,
authOutLink: true,
authToken: true,
authApiKey: true,
authBalance: true
});
if (!user) {
throw new Error('user not found');
}
const qgModel = global.qgModel || defaultQGModel;
const chatAPI = getAIChatApi(user.openaiAccount);
const { data } = await chatAPI.createChatCompletion({
model: qgModel.model,
temperature: 0,
max_tokens: 200,
messages: [
...messages,
{
role: 'user',
content: Prompt_QuestionGuide
}
],
stream: false
const { result, tokens } = await createQuestionGuide({
messages,
model: (global.qgModel || defaultQGModel).model
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
jsonRes(res, {
data: result
});
const start = answer.indexOf('[');
const end = answer.lastIndexOf(']');
if (start === -1 || end === -1) {
return jsonRes(res, {
data: []
});
}
const jsonStr = answer
.substring(start, end + 1)
.replace(/(\\n|\\)/g, '')
.replace(/ /g, '');
try {
jsonRes(res, {
data: JSON.parse(jsonStr)
});
pushQuestionGuideBill({
tokens: totalTokens,
userId: user._id
});
return;
} catch (error) {
return jsonRes(res, {
data: []
});
}
pushQuestionGuideBill({
tokens: tokens,
userId: user._id
});
} catch (err) {
jsonRes(res, {
code: 500,

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authBalanceByUid, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { pushGenerateVectorBill } from '@/service/common/bill/push';
type Props = {

View File

@@ -5,7 +5,7 @@ import { User } from '@/service/models/user';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { UserUpdateParams } from '@/types/user';
import { axiosConfig, getAIChatApi, openaiBaseUrl } from '@fastgpt/core/aiApi/config';
import { axiosConfig, getAIChatApi, openaiBaseUrl } from '@fastgpt/core/ai/config';
/* update user info */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {

View File

@@ -12,9 +12,10 @@ import {
dispatchAnswer,
dispatchClassifyQuestion,
dispatchContentExtract,
dispatchHttpRequest
dispatchHttpRequest,
dispatchAppRequest
} from '@/service/moduleDispatch';
import type { CreateChatCompletionRequest } from '@fastgpt/core/aiApi/type';
import type { CreateChatCompletionRequest } from '@fastgpt/core/ai/type';
import type { MessageItemType } from '@/types/core/chat/type';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
@@ -325,14 +326,19 @@ export async function dispatchModules({
responseData
}: {
answerText?: string;
responseData?: ChatHistoryItemResType;
responseData?: ChatHistoryItemResType | ChatHistoryItemResType[];
}) {
const time = Date.now();
responseData &&
chatResponse.push({
...responseData,
runningTime: +((time - runningTime) / 1000).toFixed(2)
});
if (responseData) {
if (Array.isArray(responseData)) {
chatResponse = chatResponse.concat(responseData);
} else {
chatResponse.push({
...responseData,
runningTime: +((time - runningTime) / 1000).toFixed(2)
});
}
}
runningTime = time;
chatAnswerText += answerText;
}
@@ -411,7 +417,7 @@ export async function dispatchModules({
variables,
moduleName: module.name,
outputs: module.outputs,
userOpenaiAccount: user?.openaiAccount,
user,
inputs: params
};
@@ -424,7 +430,8 @@ export async function dispatchModules({
[FlowModuleTypeEnum.kbSearchNode]: dispatchKBSearch,
[FlowModuleTypeEnum.classifyQuestion]: dispatchClassifyQuestion,
[FlowModuleTypeEnum.contentExtract]: dispatchContentExtract,
[FlowModuleTypeEnum.httpRequest]: dispatchHttpRequest
[FlowModuleTypeEnum.httpRequest]: dispatchHttpRequest,
[FlowModuleTypeEnum.app]: dispatchAppRequest
};
if (callbackMap[module.flowType]) {
return callbackMap[module.flowType](props);

View File

@@ -0,0 +1,25 @@
import React from 'react';
import { NodeProps } from 'reactflow';
import NodeCard from '../modules/NodeCard';
import { FlowModuleItemType } from '@/types/core/app/flow';
import Divider from '../modules/Divider';
import Container from '../modules/Container';
import RenderInput from '../render/RenderInput';
import RenderOutput from '../render/RenderOutput';
const NodeAPP = ({ data }: NodeProps<FlowModuleItemType>) => {
const { moduleId, inputs, outputs } = data;
return (
<NodeCard minW={'350px'} {...data}>
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
<RenderInput moduleId={moduleId} flowInputList={inputs} />
</Container>
<Divider text="Output" />
<Container>
<RenderOutput moduleId={moduleId} flowOutputList={outputs} />
</Container>
</NodeCard>
);
};
export default React.memo(NodeAPP);

View File

@@ -35,6 +35,7 @@ const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
type OnChange<ChangesType> = (changes: ChangesType[]) => void;
export type useFlowStoreType = {
appId: string;
reactFlowWrapper: null | React.RefObject<HTMLDivElement>;
nodes: Node<FlowModuleItemType, string | undefined>[];
setNodes: Dispatch<SetStateAction<Node<FlowModuleItemType, string | undefined>[]>>;
@@ -58,6 +59,7 @@ export type useFlowStoreType = {
};
const StateContext = createContext<useFlowStoreType>({
appId: '',
reactFlowWrapper: null,
nodes: [],
setNodes: function (
@@ -109,7 +111,7 @@ const StateContext = createContext<useFlowStoreType>({
});
export const useFlowStore = () => useContext(StateContext);
export const FlowProvider = ({ children }: { children: React.ReactNode }) => {
export const FlowProvider = ({ appId, children }: { appId: string; children: React.ReactNode }) => {
const reactFlowWrapper = useRef<HTMLDivElement>(null);
const { t } = useTranslation();
const { toast } = useToast();
@@ -209,7 +211,6 @@ export const FlowProvider = ({ children }: { children: React.ReactNode }) => {
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
const mouseY = (position.y - reactFlowBounds.top - y) / zoom;
console.log(template);
setNodes((state) =>
state.concat(
appModule2FlowNode({
@@ -328,6 +329,7 @@ export const FlowProvider = ({ children }: { children: React.ReactNode }) => {
);
const value = {
appId,
reactFlowWrapper,
nodes,
setNodes,

View File

@@ -1,5 +1,5 @@
import React, { useState } from 'react';
import type { FlowInputItemType } from '@/types/core/app/flow';
import type { FlowInputItemType, SelectAppItemType } from '@/types/core/app/flow';
import {
Box,
Textarea,
@@ -9,7 +9,10 @@ import {
NumberInputStepper,
NumberIncrementStepper,
NumberDecrementStepper,
Flex
Flex,
useDisclosure,
Button,
useTheme
} from '@chakra-ui/react';
import { FlowInputItemTypeEnum } from '@/constants/flow';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
@@ -20,7 +23,9 @@ import MyTooltip from '@/components/MyTooltip';
import TargetHandle from './TargetHandle';
import MyIcon from '@/components/Icon';
const SetInputFieldModal = dynamic(() => import('../modules/SetInputFieldModal'));
const SelectAppModal = dynamic(() => import('../../../SelectAppModal'));
import { useFlowStore } from '../Provider';
import Avatar from '@/components/Avatar';
export const Label = ({
moduleId,
@@ -141,6 +146,7 @@ const RenderInput = ({
CustomComponent?: Record<string, (e: FlowInputItemType) => React.ReactNode>;
}) => {
const { onChangeNode } = useFlowStore();
return (
<>
{flowInputList.map(
@@ -253,6 +259,9 @@ const RenderInput = ({
{item.type === FlowInputItemTypeEnum.custom && CustomComponent[item.key] && (
<>{CustomComponent[item.key]({ ...item })}</>
)}
{item.type === FlowInputItemTypeEnum.selectApp && (
<RenderSelectApp app={item} moduleId={moduleId} />
)}
</Box>
</Box>
)
@@ -262,3 +271,54 @@ const RenderInput = ({
};
export default React.memo(RenderInput);
function RenderSelectApp({ app, moduleId }: { app: FlowInputItemType; moduleId: string }) {
const { onChangeNode, appId } = useFlowStore();
const theme = useTheme();
const {
isOpen: isOpenSelectApp,
onOpen: onOpenSelectApp,
onClose: onCloseSelectApp
} = useDisclosure();
const value = app.value as SelectAppItemType | undefined;
return (
<>
<Box onClick={onOpenSelectApp}>
{!value ? (
<Button variant={'base'} w={'100%'}>
</Button>
) : (
<Flex alignItems={'center'} border={theme.borders.base} borderRadius={'md'} px={3} py={2}>
<Avatar src={value?.logo} />
<Box fontWeight={'bold'} ml={1}>
{value?.name}
</Box>
</Flex>
)}
</Box>
{isOpenSelectApp && (
<SelectAppModal
defaultApps={app.value?.id ? [app.value.id] : []}
filterApps={[appId]}
onClose={onCloseSelectApp}
onSuccess={(e) => {
onChangeNode({
moduleId,
type: 'inputs',
key: 'app',
value: {
...app,
value: e[0]
}
});
}}
/>
)}
</>
);
}

View File

@@ -38,6 +38,7 @@ const NodeVariable = dynamic(() => import('./components/Nodes/NodeVariable'));
const NodeUserGuide = dynamic(() => import('./components/Nodes/NodeUserGuide'));
const NodeExtract = dynamic(() => import('./components/Nodes/NodeExtract'));
const NodeHttp = dynamic(() => import('./components/Nodes/NodeHttp'));
const NodeAPP = dynamic(() => import('./components/Nodes/NodeAPP'));
import 'reactflow/dist/style.css';
@@ -52,7 +53,8 @@ const nodeTypes = {
[FlowModuleTypeEnum.answerNode]: NodeAnswer,
[FlowModuleTypeEnum.classifyQuestion]: NodeCQNode,
[FlowModuleTypeEnum.contentExtract]: NodeExtract,
[FlowModuleTypeEnum.httpRequest]: NodeHttp
[FlowModuleTypeEnum.httpRequest]: NodeHttp,
[FlowModuleTypeEnum.app]: NodeAPP
// [FlowModuleTypeEnum.empty]: EmptyModule
};
const edgeTypes = {
@@ -116,8 +118,17 @@ function FlowHeader({ app, onCloseSettings }: Props & {}) {
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
const modules = flow2AppModules();
// check required connect
for (let i = 0; i < modules.length; i++) {
const item = modules[i];
if (item.inputs.find((input) => input.required && !input.connected)) {
return Promise.reject(`${item.name}】存在未连接的必填输入`);
}
}
return updateAppDetail(app._id, {
modules: flow2AppModules(),
modules: modules,
type: AppTypeEnum.advanced
});
},
@@ -314,7 +325,7 @@ const Flow = (data: Props) => {
return (
<Box h={'100%'} position={'fixed'} zIndex={999} top={0} left={0} right={0} bottom={0}>
<ReactFlowProvider>
<FlowProvider>
<FlowProvider appId={data?.app?._id}>
<Flex h={'100%'} flexDirection={'column'} bg={'#fff'}>
{!!data.app._id && <AppEdit {...data} />}
</Flex>

View File

@@ -69,7 +69,12 @@ export const DatasetSelectModal = ({
tips={'仅能选择同一个索引模型的知识库'}
onClose={onClose}
>
<ModalBody flex={['1 0 0', '0 0 auto']} maxH={'80vh'} overflowY={'auto'} userSelect={'none'}>
<ModalBody
flex={['1 0 0', '1 0 auto']}
maxH={'80vh'}
overflowY={['auto', 'unset']}
userSelect={'none'}
>
<Grid gridTemplateColumns={['repeat(1,1fr)', 'repeat(2,1fr)', 'repeat(3,1fr)']} gridGap={3}>
{filterKbList.selected.map((item) =>
(() => {

View File

@@ -287,7 +287,6 @@ function DetailLogsModal({
<Box pt={2} flex={'1 0 0'}>
<ChatBox
ref={ChatBoxRef}
chatId={chatId}
appAvatar={chat?.app.avatar}
userAvatar={HUMAN_ICON}
feedbackType={'admin'}

View File

@@ -0,0 +1,110 @@
import React, { useMemo } from 'react';
import { ModalBody, Flex, Box, useTheme, ModalFooter, Button } from '@chakra-ui/react';
import MyModal from '@/components/MyModal';
import { getMyModels } from '@/api/app';
import { useQuery } from '@tanstack/react-query';
import type { SelectAppItemType } from '@/types/core/app/flow';
import Avatar from '@/components/Avatar';
import { useTranslation } from 'react-i18next';
import { useLoading } from '@/hooks/useLoading';
const SelectAppModal = ({
defaultApps = [],
filterApps = [],
max = 1,
onClose,
onSuccess
}: {
defaultApps: string[];
filterApps?: string[];
max?: number;
onClose: () => void;
onSuccess: (e: SelectAppItemType[]) => void;
}) => {
const { t } = useTranslation();
const { Loading } = useLoading();
const theme = useTheme();
const [selectedApps, setSelectedApps] = React.useState<string[]>(defaultApps);
/* 加载模型 */
const { data = [], isLoading } = useQuery(['loadMyApos'], () => getMyModels());
const apps = useMemo(
() => data.filter((app) => !filterApps.includes(app._id)),
[data, filterApps]
);
return (
<MyModal
isOpen
title={`选择应用${max > 1 ? `(${selectedApps.length}/${max})` : ''}`}
onClose={onClose}
w={'700px'}
position={'relative'}
>
<ModalBody
minH={'300px'}
display={'grid'}
gridTemplateColumns={['1fr', 'repeat(3,1fr)']}
gridGap={4}
>
{apps.map((app) => (
<Flex
key={app._id}
alignItems={'center'}
border={theme.borders.base}
borderRadius={'md'}
px={1}
py={2}
cursor={'pointer'}
{...(selectedApps.includes(app._id)
? {
bg: 'myBlue.200',
onClick: () => {
setSelectedApps(selectedApps.filter((e) => e !== app._id));
}
}
: {
onClick: () => {
if (max === 1) {
setSelectedApps([app._id]);
} else if (selectedApps.length < max) {
setSelectedApps([...selectedApps, app._id]);
}
}
})}
>
<Avatar src={app.avatar} w={['16px', '22px']} />
<Box fontWeight={'bold'} ml={1}>
{app.name}
</Box>
</Flex>
))}
</ModalBody>
<ModalFooter>
<Button variant={'base'} onClick={onClose}>
{t('Cancel')}
</Button>
<Button
ml={2}
onClick={() => {
onSuccess(
apps
.filter((app) => selectedApps.includes(app._id))
.map((app) => ({
id: app._id,
name: app.name,
logo: app.avatar
}))
);
onClose();
}}
>
{t('Confirm')}
</Button>
</ModalFooter>
<Loading loading={isLoading} fixed={false} />
</MyModal>
);
};
export default React.memo(SelectAppModal);

View File

@@ -64,7 +64,7 @@ const MyApps = () => {
);
/* 加载模型 */
useQuery(['loadModels'], () => loadMyApps(true), {
useQuery(['loadApps'], () => loadMyApps(true), {
refetchOnMount: true
});

View File

@@ -8,7 +8,6 @@ import { useRouter } from 'next/router';
const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
const { onExportChat } = useChatBox();
const router = useRouter();
const { appId, shareId } = router.query;
const menuList = useMemo(
() => [
@@ -18,8 +17,8 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
onClick: () => {
router.replace({
query: {
appId,
shareId
...router.query,
chatId: ''
}
});
}
@@ -36,7 +35,7 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
},
{ icon: 'pdf', label: 'PDF导出', onClick: () => onExportChat({ type: 'pdf', history }) }
],
[appId, history, onExportChat, router, shareId]
[history, onExportChat, router]
);
return history.length > 0 ? (

View File

@@ -358,7 +358,6 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
<ChatBox
ref={ChatBoxRef}
showEmptyIntro
chatId={chatId}
appAvatar={chatData.app.avatar}
userAvatar={userInfo?.avatar}
userGuideModule={chatData.app?.userGuideModule}

View File

@@ -24,10 +24,12 @@ import { serviceSideProps } from '@/utils/web/i18n';
const OutLink = ({
shareId,
chatId,
showHistory,
authToken
}: {
shareId: string;
chatId: string;
showHistory: '0' | '1';
authToken?: string;
}) => {
const router = useRouter();
@@ -89,9 +91,8 @@ const OutLink = ({
forbidRefresh.current = true;
router.replace({
query: {
shareId,
chatId: completionChatId,
authToken
...router.query,
chatId: completionChatId
}
});
}
@@ -174,59 +175,58 @@ const OutLink = ({
<title>{shareChatData.app.name}</title>
</Head>
<Flex h={'100%'} flexDirection={['column', 'row']}>
{((children: React.ReactNode) => {
return isPc ? (
<SideBar>{children}</SideBar>
) : (
<Drawer
isOpen={isOpenSlider}
placement="left"
autoFocus={false}
size={'xs'}
onClose={onCloseSlider}
>
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
{children}
</DrawerContent>
</Drawer>
);
})(
<ChatHistorySlider
appName={shareChatData.app.name}
appAvatar={shareChatData.app.avatar}
activeChatId={chatId}
history={history.map((item) => ({
id: item.chatId,
title: item.title
}))}
onClose={onCloseSlider}
onChangeChat={(chatId) => {
console.log(chatId);
router.replace({
query: {
chatId: chatId || '',
shareId,
authToken
}
});
if (!isPc) {
onCloseSlider();
}
}}
onDelHistory={delOneShareHistoryByChatId}
onClearHistory={() => {
delManyShareChatHistoryByShareId(shareId);
router.replace({
query: {
shareId,
authToken
}
});
}}
/>
)}
{showHistory === '1'
? ((children: React.ReactNode) => {
return isPc ? (
<SideBar>{children}</SideBar>
) : (
<Drawer
isOpen={isOpenSlider}
placement="left"
autoFocus={false}
size={'xs'}
onClose={onCloseSlider}
>
<DrawerOverlay backgroundColor={'rgba(255,255,255,0.5)'} />
<DrawerContent maxWidth={'250px'} boxShadow={'2px 0 10px rgba(0,0,0,0.15)'}>
{children}
</DrawerContent>
</Drawer>
);
})(
<ChatHistorySlider
appName={shareChatData.app.name}
appAvatar={shareChatData.app.avatar}
activeChatId={chatId}
history={history.map((item) => ({
id: item.chatId,
title: item.title
}))}
onClose={onCloseSlider}
onChangeChat={(chatId) => {
router.replace({
query: {
...router.query,
chatId: chatId || ''
}
});
if (!isPc) {
onCloseSlider();
}
}}
onDelHistory={delOneShareHistoryByChatId}
onClearHistory={() => {
delManyShareChatHistoryByShareId(shareId);
router.replace({
query: {
...router.query,
chatId: ''
}
});
}}
/>
)
: null}
{/* chat container */}
<Flex
@@ -276,10 +276,11 @@ const OutLink = ({
export async function getServerSideProps(context: any) {
const shareId = context?.query?.shareId || '';
const chatId = context?.query?.chatId || '';
const showHistory = context?.query?.showHistory || '1';
const authToken = context?.query?.authToken || '';
return {
props: { shareId, chatId, authToken, ...(await serviceSideProps(context)) }
props: { shareId, chatId, showHistory, authToken, ...(await serviceSideProps(context)) }
};
}

View File

@@ -4,8 +4,8 @@ import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '@fastgpt/core/aiApi/config';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/aiApi/type';
import { axiosConfig, getAIChatApi } from '@fastgpt/core/ai/config';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
import { replaceVariable } from '@/utils/common/tools/text';

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
@@ -29,7 +29,7 @@ const agentFunName = 'agent_user_question';
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
moduleName,
userOpenaiAccount,
user,
inputs: { agents, userChatInput }
} = props as Props;
@@ -53,7 +53,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.classifyQuestion,
moduleName,
price: userOpenaiAccount?.key ? 0 : cqModel.price * tokens,
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
tokens,
cqList: agents,
@@ -63,7 +63,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
};
async function functionCall({
userOpenaiAccount,
user,
inputs: { agents, systemPrompt, history = [], userChatInput }
}: Props) {
const cqModel = global.cqModel;
@@ -105,7 +105,7 @@ async function functionCall({
required: ['type']
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -116,7 +116,7 @@ async function functionCall({
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -138,7 +138,7 @@ async function functionCall({
}
async function completions({
userOpenaiAccount,
user,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
}: Props) {
const extractModel = global.extractModel;
@@ -155,7 +155,7 @@ Human:${userChatInput}`
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
@@ -166,7 +166,7 @@ Human:${userChatInput}`
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { FlowModuleTypeEnum } from '@/constants/flow';
@@ -29,7 +29,7 @@ const agentFunName = 'agent_extract_data';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
moduleName,
userOpenaiAccount,
user,
inputs: { content, description, extractKeys }
} = props;
@@ -73,7 +73,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.contentExtract,
moduleName,
price: userOpenaiAccount?.key ? 0 : extractModel.price * tokens,
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
model: extractModel.name || '',
tokens,
extractDescription: description,
@@ -83,7 +83,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
async function functionCall({
userOpenaiAccount,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
@@ -126,7 +126,7 @@ async function functionCall({
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -137,7 +137,7 @@ async function functionCall({
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -157,7 +157,7 @@ async function functionCall({
}
async function completions({
userOpenaiAccount,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
@@ -181,7 +181,7 @@ Human: ${content}`
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
@@ -192,7 +192,7 @@ Human: ${content}`
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';

View File

@@ -5,13 +5,13 @@ import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
@@ -35,6 +35,7 @@ export type ChatProps = ModuleDispatchProps<
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[TaskResponseKeyEnum.history]: ChatItemType[];
finish: boolean;
};
@@ -45,7 +46,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
moduleName,
stream = false,
detail = false,
userOpenaiAccount,
user,
outputs,
inputs: {
model = global.chatModels[0]?.model,
@@ -105,7 +106,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
// FastGPT temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -128,7 +129,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
{
timeout: 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -179,7 +180,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
@@ -187,6 +188,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages)
},
[TaskResponseKeyEnum.history]: completeMessages,
finish: true
};
};

View File

@@ -4,5 +4,6 @@ export * from './chat/oneapi';
export * from './kb/search';
export * from './tools/answer';
export * from './tools/http';
export * from './tools/runApp';
export * from './agent/classifyQuestion';
export * from './agent/extract';

View File

@@ -6,7 +6,6 @@ export type AnswerProps = ModuleDispatchProps<{
text: string;
}>;
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};
@@ -29,7 +28,6 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
}
return {
[TaskResponseKeyEnum.answerText]: text,
finish: true
};
};

View File

@@ -0,0 +1,80 @@
import { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { SelectAppItemType } from '@/types/core/app/flow';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { App } from '@/service/mongo';
import { responseWrite } from '@/service/common/stream';
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
import { textAdaptGptResponse } from '@/utils/adapt';
type Props = ModuleDispatchProps<{
userChatInput: string;
history?: ChatItemType[];
app: SelectAppItemType;
}>;
type Response = {
finish: boolean;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType[];
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.history]: ChatItemType[];
};
export const dispatchAppRequest = async (props: Record<string, any>): Promise<Response> => {
const {
res,
variables,
user,
stream,
detail,
inputs: { userChatInput, history = [], app }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const appData = await App.findById(app.id);
if (!appData) {
return Promise.reject('App not found');
}
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
const { responseData, answerText } = await dispatchModules({
res,
modules: appData.modules,
user,
variables,
params: {
history,
userChatInput
},
stream,
detail
});
const completeMessages = history.concat([
{
obj: ChatRoleEnum.Human,
value: userChatInput
},
{
obj: ChatRoleEnum.AI,
value: answerText
}
]);
return {
finish: true,
responseData,
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.history]: completeMessages
};
};

View File

@@ -102,8 +102,23 @@ export async function authOutLinkLimit({
await authShareStart({ authToken, tokenUrl: outLink.limit.hookUrl, question });
}
export async function authOutLinkId({ id }: { id: string }) {
const outLink = await OutLink.findOne({
shareId: id
});
if (!outLink) {
return Promise.reject('分享链接无效');
}
return {
userId: String(outLink.userId)
};
}
type TokenAuthResponseType = {
success: boolean;
msg?: string;
message?: string;
};
@@ -119,7 +134,7 @@ export const authShareChatInit = async (authToken?: string, tokenUrl?: string) =
}
});
if (data?.success !== true) {
return Promise.reject(data?.message || '身份校验失败');
return Promise.reject(data?.message || data?.msg || '身份校验失败');
}
} catch (error) {
return Promise.reject('身份校验失败');
@@ -148,7 +163,7 @@ export const authShareStart = async ({
});
if (data?.success !== true) {
return Promise.reject(data?.message || '身份校验失败');
return Promise.reject(data?.message || data?.msg || '身份校验失败');
}
} catch (error) {
return Promise.reject('身份校验失败');

View File

@@ -1,15 +1,17 @@
import type { NextApiRequest } from 'next';
import Cookie from 'cookie';
import { App, OpenApi, User, KB } from '../mongo';
import { App, User, KB } from '../mongo';
import type { AppSchema, UserModelSchema } from '@/types/mongoSchema';
import { ERROR_ENUM } from '../errorCode';
import { authJWT } from './tools';
import { authOpenApiKey } from '../support/openapi/auth';
import { authOutLinkId } from '../support/outLink/auth';
export enum AuthUserTypeEnum {
token = 'token',
root = 'root',
apikey = 'apikey'
apikey = 'apikey',
outLink = 'outLink'
}
/* auth balance */
@@ -34,13 +36,15 @@ export const authUser = async ({
authToken = false,
authRoot = false,
authApiKey = false,
authBalance = false
authBalance = false,
authOutLink
}: {
req: NextApiRequest;
authToken?: boolean;
authRoot?: boolean;
authApiKey?: boolean;
authBalance?: boolean;
authOutLink?: boolean;
}) => {
const authCookieToken = async (cookie?: string, token?: string): Promise<string> => {
// 获取 cookie
@@ -107,13 +111,18 @@ export const authUser = async ({
userid?: string;
authorization?: string;
};
const { shareId } = (req?.body || {}) as { shareId?: string };
let uid = '';
let appId = '';
let openApiKey = apikey;
let authType: `${AuthUserTypeEnum}` = AuthUserTypeEnum.token;
if (authToken && (cookie || token)) {
if (authOutLink && shareId) {
const res = await authOutLinkId({ id: shareId });
uid = res.userId;
authType = AuthUserTypeEnum.outLink;
} else if (authToken && (cookie || token)) {
// user token(from fastgpt web)
uid = await authCookieToken(cookie, token);
authType = AuthUserTypeEnum.token;

View File

@@ -60,3 +60,9 @@ export type FlowModuleTemplateType = {
export type FlowModuleItemType = FlowModuleTemplateType & {
moduleId: string;
};
export type SelectAppItemType = {
id: string;
name: string;
logo: string;
};

View File

@@ -1,4 +1,4 @@
import type { ChatCompletionRequestMessage } from '@fastgpt/core/aiApi/type';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
import type { NextApiResponse } from 'next';
import { RunningModuleItemType } from '@/types/app';
import { UserModelSchema } from '@/types/mongoSchema';
@@ -13,6 +13,6 @@ export type ModuleDispatchProps<T> = {
detail: boolean;
variables: Record<string, any>;
outputs: RunningModuleItemType['outputs'];
userOpenaiAccount?: UserModelSchema['openaiAccount'];
user: UserModelSchema;
inputs: T;
};

View File

@@ -2,7 +2,7 @@ import { formatPrice } from '@fastgpt/common/bill/index';
import type { BillSchema } from '@/types/common/bill';
import type { UserBillType } from '@/types/user';
import { ChatItemType } from '@/types/chat';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/types/core/chat/type';
import type { AppModuleItemType } from '@/types/app';

View File

@@ -1,6 +1,6 @@
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
import type { MessageItemType } from '@/types/core/chat/type';
const chat2Message = {

View File

@@ -2,7 +2,7 @@
import { ChatItemType } from '@/types/chat';
import { Tiktoken } from 'js-tiktoken/lite';
import { adaptChat2GptMessages } from '../adapt/message';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
import encodingJson from './cl100k_base.json';
/* init tikToken obj */