mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-27 08:25:07 +00:00
fix: 连续滚动重复请求;perf: 训练参数
This commit is contained in:
@@ -13,6 +13,7 @@ export type ModelConstantsData = {
|
||||
trainName: string; // 空字符串代表不能训练
|
||||
maxToken: number;
|
||||
maxTemperature: number;
|
||||
trainedMaxToken: number; // 训练后最大多少tokens
|
||||
price: number; // 多少钱 / 1token,单位: 0.00001元
|
||||
};
|
||||
|
||||
@@ -23,6 +24,7 @@ export const modelList: ModelConstantsData[] = [
|
||||
model: ChatModelNameEnum.GPT35,
|
||||
trainName: '',
|
||||
maxToken: 4000,
|
||||
trainedMaxToken: 2000,
|
||||
maxTemperature: 2,
|
||||
price: 3
|
||||
},
|
||||
@@ -32,6 +34,7 @@ export const modelList: ModelConstantsData[] = [
|
||||
model: ChatModelNameEnum.GPT3,
|
||||
trainName: 'davinci',
|
||||
maxToken: 4000,
|
||||
trainedMaxToken: 2000,
|
||||
maxTemperature: 2,
|
||||
price: 30
|
||||
}
|
||||
|
@@ -59,7 +59,10 @@ export const usePaging = <T = any>({
|
||||
[api, isLoadAll, pageSize, params, requesting, toast]
|
||||
);
|
||||
|
||||
const nextPage = useCallback(() => getData(pageNum + 1), [getData, pageNum]);
|
||||
const nextPage = useCallback(() => {
|
||||
if (requesting || isLoadAll) return;
|
||||
getData(pageNum + 1);
|
||||
}, [getData, isLoadAll, pageNum, requesting]);
|
||||
|
||||
useQuery(['init'], () => getData(1, true));
|
||||
|
||||
|
@@ -69,7 +69,17 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
// 获取 chatAPI
|
||||
const chatAPI = getOpenAIApi(userApiKey || systemKey);
|
||||
let startTime = Date.now();
|
||||
|
||||
// console.log({
|
||||
// model: model.service.chatModel,
|
||||
// temperature: temperature,
|
||||
// prompt: promptText,
|
||||
// stream: true,
|
||||
// max_tokens:
|
||||
// model.trainingTimes > 0 ? modelConstantsData.trainedMaxToken : modelConstantsData.maxToken,
|
||||
// presence_penalty: -0.5, // 越大,越容易出现新内容
|
||||
// frequency_penalty: 0.5, // 越大,重复内容越少
|
||||
// stop: [`</s>`, '。!?.!.']
|
||||
// });
|
||||
// 发出请求
|
||||
const chatResponse = await chatAPI.createCompletion(
|
||||
{
|
||||
@@ -77,9 +87,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
temperature: temperature,
|
||||
prompt: promptText,
|
||||
stream: true,
|
||||
max_tokens: modelConstantsData.maxToken,
|
||||
presence_penalty: 0, // 越大,越容易出现新内容
|
||||
frequency_penalty: 0, // 越大,重复内容越少
|
||||
max_tokens:
|
||||
model.trainingTimes > 0
|
||||
? modelConstantsData.trainedMaxToken
|
||||
: modelConstantsData.maxToken,
|
||||
presence_penalty: -0.5, // 越大,越容易出现新内容
|
||||
frequency_penalty: 0.5, // 越大,重复内容越少
|
||||
stop: [`</s>`, '。!?.!.']
|
||||
},
|
||||
{
|
||||
@@ -108,7 +121,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
const content: string = json?.choices?.[0].text || '';
|
||||
console.log('content:', content);
|
||||
// console.log('content:', content);
|
||||
if (!content || (responseContent === '' && content === '\n')) return;
|
||||
|
||||
responseContent += content;
|
||||
|
@@ -83,7 +83,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
{
|
||||
training_file: uploadFileId,
|
||||
model: trainingType,
|
||||
suffix: model.name
|
||||
suffix: model.name,
|
||||
n_epochs: 2
|
||||
},
|
||||
{ httpsAgent }
|
||||
);
|
||||
|
Reference in New Issue
Block a user