feat: 添加自定义 temperature 和 top_p (#1260)

* 在设置的高级面板里自定义temperature和top_p

* change default temperature from 0.8 to 0.5

* pref: 检查代码,增加仅 api 的接口判断

* chore: 锁定 pnpm-lock.yaml

---------

Co-authored-by: ChenZhaoYu <790348264@qq.com>
This commit is contained in:
quzard
2023-04-07 14:32:09 +08:00
committed by GitHub
parent 1187d88593
commit 86bba7d8f3
10 changed files with 65 additions and 8 deletions

View File

@@ -27,6 +27,7 @@ const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT
const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true'
let apiModel: ApiModel
let model = 'gpt-3.5-turbo'
if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN))
throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable')
@@ -39,7 +40,7 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
if (isNotEmptyString(process.env.OPENAI_API_KEY)) {
const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL
const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL
const model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo'
model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo'
const options: ChatGPTAPIOptions = {
apiKey: process.env.OPENAI_API_KEY,
@@ -90,13 +91,14 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI
})()
async function chatReplyProcess(options: RequestOptions) {
const { message, lastContext, process, systemMessage } = options
const { message, lastContext, process, systemMessage, temperature, top_p } = options
try {
let options: SendMessageOptions = { timeoutMs }
if (apiModel === 'ChatGPTAPI') {
if (isNotEmptyString(systemMessage))
options.systemMessage = systemMessage
options.completionParams = { model, temperature, top_p }
}
if (lastContext != null) {

View File

@@ -5,6 +5,8 @@ export interface RequestOptions {
lastContext?: { conversationId?: string; parentMessageId?: string }
process?: (chat: ChatMessage) => void
systemMessage?: string
temperature?: number
top_p?: number
}
export interface BalanceResponse {

View File

@@ -23,7 +23,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
res.setHeader('Content-type', 'application/octet-stream')
try {
const { prompt, options = {}, systemMessage } = req.body as RequestProps
const { prompt, options = {}, systemMessage, temperature, top_p } = req.body as RequestProps
let firstChunk = true
await chatReplyProcess({
message: prompt,
@@ -33,6 +33,8 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
firstChunk = false
},
systemMessage,
temperature,
top_p,
})
}
catch (error) {

View File

@@ -4,6 +4,8 @@ export interface RequestProps {
prompt: string
options?: ChatContext
systemMessage: string
temperature?: number
top_p?: number
}
export interface ChatContext {