mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-18 17:44:23 +00:00

* feat: favorite apps & quick apps with their own configuration (#5515) * chore: extract chat history and drawer; fix model selector * feat: display favourite apps and make it configurable * feat: favorite apps & quick apps with their own configuration * fix: fix tab title and add loading state for searching * fix: cascade delete favorite app and quick app while deleting relative app * chore: make improvements * fix: favourite apps ui * fix: add permission for quick apps * chore: fix permission & clear redundant code * perf: chat home page code * chatbox ui * fix: 4.12.2-dev (#5520) * fix: add empty placeholder; fix app quick status; fix tag and layout * chore: add tab query for the setting tabs * chore: use `useConfirm` hook instead of `MyModal` * remove log * fix: fix modal padding (#5521) * perf: manage app * feat: enhance model provider handling and update icon references (#5493) * perf: model provider * sdk package * refactor: create llm response (#5499) * feat: add LLM response processing functions, including the creation of stream-based and complete responses * feat: add volta configuration for node and pnpm versions * refactor: update LLM response handling and event structure in tool choice logic * feat: update LLM response structure and integrate with tool choice logic * refactor: clean up imports and remove unused streamResponse function in chat and toolChoice modules * refactor: rename answer variable to answerBuffer for clarity in LLM response handling * feat: enhance LLM response handling with tool options and integrate tools into chat and tool choice logic * refactor: remove volta configuration from package.json * refactor: reorganize LLM response types and ensure default values for token counts * refactor: streamline LLM response handling by consolidating response structure and removing redundant checks * refactor: enhance LLM response handling by consolidating tool options and streamlining event callbacks * fix: build error * refactor: update tool type definitions for consistency in tool handling * feat: llm request function * fix: ts * fix: ts * fix: ahook ts * fix: variable name * update lock * ts version * doc * remove log * fix: translation type * perf: workflow status check * fix: ts * fix: prompt tool call * fix: fix missing plugin interact window & make tag draggable (#5527) * fix: incorrect select quick apps state; filter apps type (#5528) * fix: usesafe translation * perf: add quickapp modal --------- Co-authored-by: 伍闲犬 <whoeverimf5@gmail.com> Co-authored-by: Ctrlz <143257420+ctrlz526@users.noreply.github.com> Co-authored-by: francis <zhichengfan18@gmail.com>
77 lines
1.6 KiB
TypeScript
77 lines
1.6 KiB
TypeScript
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
|
|
import {
|
|
QuestionGuidePrompt,
|
|
QuestionGuideFooterPrompt
|
|
} from '@fastgpt/global/core/ai/prompt/agent';
|
|
import { addLog } from '../../../common/system/log';
|
|
import json5 from 'json5';
|
|
import { createLLMResponse } from '../llm/request';
|
|
|
|
export async function createQuestionGuide({
|
|
messages,
|
|
model,
|
|
customPrompt
|
|
}: {
|
|
messages: ChatCompletionMessageParam[];
|
|
model: string;
|
|
customPrompt?: string;
|
|
}): Promise<{
|
|
result: string[];
|
|
inputTokens: number;
|
|
outputTokens: number;
|
|
}> {
|
|
const concatMessages: ChatCompletionMessageParam[] = [
|
|
...messages,
|
|
{
|
|
role: 'user',
|
|
content: `${customPrompt || QuestionGuidePrompt}\n${QuestionGuideFooterPrompt}`
|
|
}
|
|
];
|
|
|
|
const {
|
|
answerText: answer,
|
|
usage: { inputTokens, outputTokens }
|
|
} = await createLLMResponse({
|
|
body: {
|
|
model,
|
|
temperature: 0.1,
|
|
max_tokens: 200,
|
|
messages: concatMessages,
|
|
stream: true
|
|
}
|
|
});
|
|
|
|
const start = answer.indexOf('[');
|
|
const end = answer.lastIndexOf(']');
|
|
|
|
if (start === -1 || end === -1) {
|
|
addLog.warn('Create question guide error', { answer });
|
|
return {
|
|
result: [],
|
|
inputTokens,
|
|
outputTokens
|
|
};
|
|
}
|
|
|
|
const jsonStr = answer
|
|
.substring(start, end + 1)
|
|
.replace(/(\\n|\\)/g, '')
|
|
.replace(/ /g, '');
|
|
|
|
try {
|
|
return {
|
|
result: json5.parse(jsonStr),
|
|
inputTokens,
|
|
outputTokens
|
|
};
|
|
} catch (error) {
|
|
console.log(error);
|
|
|
|
return {
|
|
result: [],
|
|
inputTokens,
|
|
outputTokens
|
|
};
|
|
}
|
|
}
|