feat: maxtokens

This commit is contained in:
archer
2023-04-04 23:00:01 +08:00
parent 40189a6899
commit afc5947bfb
2 changed files with 2 additions and 9 deletions

View File

@@ -23,7 +23,6 @@ export type ModelConstantsData = {
maxToken: number; maxToken: number;
contextMaxToken: number; contextMaxToken: number;
maxTemperature: number; maxTemperature: number;
trainedMaxToken: number; // 训练后最大多少tokens
price: number; // 多少钱 / 1token单位: 0.00001元 price: number; // 多少钱 / 1token单位: 0.00001元
}; };
@@ -34,8 +33,7 @@ export const modelList: ModelConstantsData[] = [
model: ChatModelNameEnum.GPT35, model: ChatModelNameEnum.GPT35,
trainName: '', trainName: '',
maxToken: 4000, maxToken: 4000,
contextMaxToken: 7000, contextMaxToken: 7500,
trainedMaxToken: 2000,
maxTemperature: 2, maxTemperature: 2,
price: 3 price: 3
}, },
@@ -46,7 +44,6 @@ export const modelList: ModelConstantsData[] = [
trainName: 'vector', trainName: 'vector',
maxToken: 4000, maxToken: 4000,
contextMaxToken: 7000, contextMaxToken: 7000,
trainedMaxToken: 2000,
maxTemperature: 1, maxTemperature: 1,
price: 3 price: 3
} }
@@ -57,7 +54,6 @@ export const modelList: ModelConstantsData[] = [
// trainName: 'davinci', // trainName: 'davinci',
// maxToken: 4000, // maxToken: 4000,
// contextMaxToken: 7500, // contextMaxToken: 7500,
// trainedMaxToken: 2000,
// maxTemperature: 2, // maxTemperature: 2,
// price: 30 // price: 30
// } // }

View File

@@ -87,10 +87,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
temperature: temperature, temperature: temperature,
prompt: promptText, prompt: promptText,
stream: true, stream: true,
max_tokens: max_tokens: modelConstantsData.maxToken,
model.trainingTimes > 0
? modelConstantsData.trainedMaxToken
: modelConstantsData.maxToken,
presence_penalty: -0.5, // 越大,越容易出现新内容 presence_penalty: -0.5, // 越大,越容易出现新内容
frequency_penalty: 0.5, // 越大,重复内容越少 frequency_penalty: 0.5, // 越大,重复内容越少
stop: [`###`, '。!?.!.'] stop: [`###`, '。!?.!.']