mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-15 15:41:05 +00:00
4.7 doc update (#1068)
* fix: plugin update * feat: get current time plugin * fix: ts * perf: select app ux * fix: ts * perf: max w * move code * perf: inform tip * fix: inform * doc * fix: tool handle * perf: tmp file store * doc * fix: message file selector * feat: doc * perf: switch trigger * doc * fix: openapi import * rount the number * parse openapi schema * fix empty line after variables (#64) * doc image * image size * doc * doc * catch error --------- Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -3,6 +3,7 @@ import multer from 'multer';
|
||||
import path from 'path';
|
||||
import { BucketNameEnum, bucketNameMap } from '@fastgpt/global/common/file/constants';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { tmpFileDirPath } from './constants';
|
||||
|
||||
type FileType = {
|
||||
fieldname: string;
|
||||
@@ -23,9 +24,9 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
},
|
||||
preservePath: true,
|
||||
storage: multer.diskStorage({
|
||||
// destination: (_req, _file, cb) => {
|
||||
// cb(null, tmpFileDirPath);
|
||||
// },
|
||||
destination: (_req, _file, cb) => {
|
||||
cb(null, tmpFileDirPath);
|
||||
},
|
||||
filename: async (req, file, cb) => {
|
||||
const { ext } = path.parse(decodeURIComponent(file.originalname));
|
||||
cb(null, `${getNanoid()}${ext}`);
|
||||
|
@@ -27,3 +27,18 @@ export const guessBase64ImageType = (str: string) => {
|
||||
const firstChar = str.charAt(0);
|
||||
return imageTypeMap[firstChar] || defaultType;
|
||||
};
|
||||
|
||||
export const clearDirFiles = (dirPath: string) => {
|
||||
if (!fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
fs.readdirSync(dirPath).forEach((file) => {
|
||||
const curPath = `${dirPath}/${file}`;
|
||||
if (fs.lstatSync(curPath).isDirectory()) {
|
||||
clearDirFiles(curPath);
|
||||
} else {
|
||||
fs.unlinkSync(curPath);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@@ -19,20 +19,28 @@ const defaultPrompt = `作为一个向量检索助手,你的任务是结合历
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
A: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
"""
|
||||
原问题: 怎么下载
|
||||
检索词: ["FastGPT 如何下载?","下载 FastGPT 需要什么条件?","有哪些渠道可以下载 FastGPT?"]
|
||||
检索词: ["Nginx 如何下载?","下载 Nginx 需要什么条件?","有哪些渠道可以下载 Nginx?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
A: 当前对话是关于 Nginx 的介绍和使用等。
|
||||
Q: 报错 "no connection"
|
||||
A: 报错"no connection"可能是因为……
|
||||
"""
|
||||
原问题: 怎么解决
|
||||
检索词: ["FastGPT 报错"no connection"如何解决?", "造成 'no connection' 报错的原因。", "FastGPT提示'no connection',要怎么办?"]
|
||||
检索词: ["Nginx报错"no connection"如何解决?","造成'no connection'报错的原因。","Nginx提示'no connection',要怎么办?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 护产假多少天?
|
||||
A: 护产假的天数根据员工所在的城市而定。请提供您所在的城市,以便我回答您的问题。
|
||||
"""
|
||||
原问题: 沈阳
|
||||
检索词: ["沈阳的护产假多少天?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
@@ -42,14 +50,6 @@ A: FastGPT 的作者是 labring。
|
||||
原问题: Tell me about him
|
||||
检索词: ["Introduce labring, the author of FastGPT." ," Background information on author labring." "," Why does labring do FastGPT?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
"""
|
||||
原问题: 高级编排怎么用
|
||||
检索词: ["FastGPT的高级编排是什么?","FastGPT高级编排的使用教程。","FastGPT高级编排有什么用?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
|
@@ -13,13 +13,13 @@ export const Prompt_Tool_Call = `<Instruction>
|
||||
USER: 你好呀
|
||||
ANSWER: 0: 你好,有什么可以帮助你的么?
|
||||
USER: 今天杭州的天气如何
|
||||
ANSWER: 1: {"toolId":"w2121",arguments:{"city": "杭州"}}
|
||||
ANSWER: 1: {"toolId":"testToolId",arguments:{"city": "杭州"}}
|
||||
TOOL_RESPONSE: """
|
||||
晴天......
|
||||
"""
|
||||
ANSWER: 0: 今天杭州是晴天。
|
||||
USER: 今天杭州的天气适合去哪里玩?
|
||||
ANSWER: 1: {"toolId":"as21da",arguments:{"query": "杭州 天气 去哪里玩"}}
|
||||
ANSWER: 1: {"toolId":"testToolId2",arguments:{"query": "杭州 天气 去哪里玩"}}
|
||||
TOOL_RESPONSE: """
|
||||
晴天. 西湖、灵隐寺、千岛湖……
|
||||
"""
|
||||
|
@@ -131,7 +131,7 @@ export const runToolWithPromptCall = async (
|
||||
})();
|
||||
|
||||
const parseAnswerResult = parseAnswer(answer);
|
||||
// console.log(answer, '==11==');
|
||||
// console.log(parseAnswer, '==11==');
|
||||
// No tools
|
||||
if (typeof parseAnswerResult === 'string') {
|
||||
// No tool is invoked, indicating that the process is over
|
||||
@@ -270,7 +270,9 @@ export const runToolWithPromptCall = async (
|
||||
|
||||
// get the next user prompt
|
||||
lastMessage.content += `${answer}
|
||||
TOOL_RESPONSE: ${toolsRunResponse.toolResponsePrompt}
|
||||
TOOL_RESPONSE: """
|
||||
${toolsRunResponse.toolResponsePrompt}
|
||||
"""
|
||||
ANSWER: `;
|
||||
|
||||
/* check stop signal */
|
||||
|
@@ -73,7 +73,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
temperature = 0,
|
||||
maxToken = 4000,
|
||||
history = 6,
|
||||
quoteQA = [],
|
||||
quoteQA,
|
||||
userChatInput,
|
||||
isResponseAnswerText = true,
|
||||
systemPrompt = '',
|
||||
@@ -114,6 +114,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
const { filterMessages } = getChatMessages({
|
||||
model: modelConstantsData,
|
||||
histories: chatHistories,
|
||||
quoteQA,
|
||||
quoteText,
|
||||
quotePrompt,
|
||||
userChatInput,
|
||||
@@ -269,13 +270,13 @@ function filterQuote({
|
||||
: '';
|
||||
|
||||
return {
|
||||
filterQuoteQA: filterQuoteQA,
|
||||
quoteText
|
||||
};
|
||||
}
|
||||
function getChatMessages({
|
||||
quotePrompt,
|
||||
quoteText,
|
||||
quoteQA,
|
||||
histories = [],
|
||||
systemPrompt,
|
||||
userChatInput,
|
||||
@@ -284,18 +285,20 @@ function getChatMessages({
|
||||
}: {
|
||||
quotePrompt?: string;
|
||||
quoteText: string;
|
||||
quoteQA: ChatProps['params']['quoteQA'];
|
||||
histories: ChatItemType[];
|
||||
systemPrompt: string;
|
||||
userChatInput: string;
|
||||
inputFiles: UserChatItemValueItemType['file'][];
|
||||
model: LLMModelItemType;
|
||||
}) {
|
||||
const replaceInputValue = quoteText
|
||||
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
|
||||
quote: quoteText,
|
||||
question: userChatInput
|
||||
})
|
||||
: userChatInput;
|
||||
const replaceInputValue =
|
||||
quoteQA !== undefined
|
||||
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
|
||||
quote: quoteText,
|
||||
question: userChatInput
|
||||
})
|
||||
: userChatInput;
|
||||
|
||||
const messages: ChatItemType[] = [
|
||||
...getSystemPrompt(systemPrompt),
|
||||
|
Reference in New Issue
Block a user