Files
FastGPT/packages/service/core/ai/functions/createQuestionGuide.ts
Archer 34602b25df 4.6.8-alpha (#804)
* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
2024-02-01 21:57:41 +08:00

64 lines
1.4 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
import { getAIApi } from '../config';
export const Prompt_QuestionGuide = `我不太清楚问你什么问题,请帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
export async function createQuestionGuide({
messages,
model
}: {
messages: ChatMessageItemType[];
model: string;
}) {
const ai = getAIApi({
timeout: 480000
});
const data = await ai.chat.completions.create({
model: model,
temperature: 0.1,
max_tokens: 200,
messages: [
...messages,
{
role: 'user',
content: Prompt_QuestionGuide
}
],
stream: false
});
const answer = data.choices?.[0]?.message?.content || '';
const inputTokens = data.usage?.prompt_tokens || 0;
const outputTokens = data.usage?.completion_tokens || 0;
const start = answer.indexOf('[');
const end = answer.lastIndexOf(']');
if (start === -1 || end === -1) {
return {
result: [],
inputTokens,
outputTokens
};
}
const jsonStr = answer
.substring(start, end + 1)
.replace(/(\\n|\\)/g, '')
.replace(/ /g, '');
try {
return {
result: JSON.parse(jsonStr),
inputTokens,
outputTokens
};
} catch (error) {
return {
result: [],
inputTokens,
outputTokens
};
}
}