mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
File input (#2270)
* doc * feat: file upload config * perf: chat box file params * feat: markdown show file * feat: chat file store and clear * perf: read file contentType * feat: llm vision config * feat: file url output * perf: plugin error text * perf: image load * feat: ai chat document * perf: file block ui * feat: read file node * feat: file read response field * feat: simple mode support read files * feat: tool call * feat: read file histories * perf: select file * perf: select file config * i18n * i18n * fix: ts; feat: tool response preview result
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
|
||||
import { getAIApi } from '../config';
|
||||
import { countGptMessagesTokens } from '../../../common/string/tiktoken/index';
|
||||
import { loadRequestMessages } from '../../chat/utils';
|
||||
|
||||
export const Prompt_QuestionGuide = `你是一个AI智能助手,可以回答和解决我的问题。请结合前面的对话记录,帮我生成 3 个问题,引导我继续提问。问题的长度应小于20个字符,按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
|
||||
|
||||
@@ -25,7 +26,10 @@ export async function createQuestionGuide({
|
||||
model: model,
|
||||
temperature: 0.1,
|
||||
max_tokens: 200,
|
||||
messages: concatMessages,
|
||||
messages: await loadRequestMessages({
|
||||
messages: concatMessages,
|
||||
useVision: false
|
||||
}),
|
||||
stream: false
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user