This commit is contained in:
Archer
2023-10-11 17:18:43 +08:00
committed by GitHub
parent d0041a98b4
commit bcf9491999
51 changed files with 852 additions and 460 deletions

View File

@@ -4,7 +4,7 @@ import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
@@ -58,8 +58,6 @@ export async function generateQA(): Promise<any> {
const startTime = Date.now();
const chatAPI = getAIChatApi();
// request LLM to get QA
const text = data.q;
const messages: ChatCompletionRequestMessage[] = [
@@ -73,19 +71,13 @@ export async function generateQA(): Promise<any> {
})
}
];
const { data: chatResponse } = await chatAPI.createChatCompletion(
{
model: global.qaModel.model,
temperature: 0.01,
messages,
stream: false
},
{
timeout: 480000,
...axiosConfig()
}
);
const ai = getAIApi(undefined, 480000);
const chatResponse = await ai.chat.completions.create({
model: global.qaModel.model,
temperature: 0.01,
messages,
stream: false
});
const answer = chatResponse.choices?.[0].message?.content;
const totalTokens = chatResponse.usage?.total_tokens || 0;