mirror of
https://github.com/LLM-Red-Team/kimi-free-api.git
synced 2025-10-18 08:14:39 +00:00
支持原生多轮对话
This commit is contained in:
@@ -13,22 +13,22 @@ export default {
|
||||
|
||||
'/completions': async (request: Request) => {
|
||||
request
|
||||
.validate('body.conversation_id', v => _.isUndefined(v) || _.isString(v))
|
||||
.validate('body.messages', _.isArray)
|
||||
.validate('headers.authorization', _.isString)
|
||||
// refresh_token切分
|
||||
const tokens = chat.tokenSplit(request.headers.authorization);
|
||||
// 随机挑选一个refresh_token
|
||||
const token = _.sample(tokens);
|
||||
const model = request.body.model;
|
||||
const messages = request.body.messages;
|
||||
if (request.body.stream) {
|
||||
const stream = await chat.createCompletionStream(model, messages, token, request.body.use_search);
|
||||
const { model, conversation_id: convId, messages, stream, use_search } = request.body;
|
||||
if (stream) {
|
||||
const stream = await chat.createCompletionStream(model, messages, token, use_search, convId);
|
||||
return new Response(stream, {
|
||||
type: "text/event-stream"
|
||||
});
|
||||
}
|
||||
else
|
||||
return await chat.createCompletion(model, messages, token, request.body.use_search);
|
||||
return await chat.createCompletion(model, messages, token, use_search, convId);
|
||||
}
|
||||
|
||||
}
|
||||
|
Reference in New Issue
Block a user