From fbfc70e2bf0a2187479020ff27f46d2c41787d01 Mon Sep 17 00:00:00 2001 From: Clivia <132346501+Yanyutin753@users.noreply.github.com> Date: Mon, 11 Mar 2024 14:09:18 +0800 Subject: [PATCH] =?UTF-8?q?feat=20=E4=BC=A0url=E7=9B=B4=E6=8E=A5=E4=B8=8A?= =?UTF-8?q?=E4=BC=A0=E9=80=9F=E5=BA=A6=E6=9B=B4=E5=BF=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/client/platforms/openai.ts | 66 +++++++++++++++++++--------------- 1 file changed, 37 insertions(+), 29 deletions(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 0f94368..a43bab9 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -99,13 +99,9 @@ export class ChatGPTApi implements LLMApi { role: v.role, content: [], }; - message.content.push({ - type: "text", - text: v.content, - }); if (v.image_url) { let image_url_data = ""; - if (options.config.updateTypes) { + if (options.config.updateTypes && !options.config.model.includes("moomshot")) { var base64Data = await getImageBase64Data(v.image_url); let mimeType: string | null; try { @@ -136,14 +132,41 @@ export class ChatGPTApi implements LLMApi { var url = window.location.protocol + "//" + window.location.hostname + port; image_url_data = encodeURI(`${url}${v.image_url}`) } - message.content.push({ - type: "image_url", - image_url: { - url: `${image_url_data}`, - }, - }); + if (options.config.model.includes("moonshot")) { + messages.push({ + role: v.role, + content: `${image_url_data} ${v.content}`, + }); + } + else { + message.content.push({ + type: "text", + text: v.content, + }); + message.content.push({ + type: "image_url", + image_url: { + url: `${image_url_data}`, + }, + }); + messages.push(message); + } + } + else { + if (options.config.model.includes("moonshot")) { + messages.push({ + role: v.role, + content: v.content, + }); + } + else { + message.content.push({ + type: "text", + text: v.content, + }); + messages.push(message); + } } - messages.push(message); } } else { options.messages.map((v) => @@ -176,16 +199,8 @@ export class ChatGPTApi implements LLMApi { // max_tokens: Math.max(modelConfig.max_tokens, 1024), // Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore. }; - // 用于隐藏传参变量 - const moonshotPayload = { - messages, - stream: options.config.stream, - model: modelConfig.model, - use_search: - modelConfig.model.includes("vision") - ? false - : true, - } + + console.log("[Request] openai payload: ", requestPayload); const shouldStream = !!options.config.stream; const controller = new AbortController(); @@ -199,13 +214,6 @@ export class ChatGPTApi implements LLMApi { signal: controller.signal, headers: getHeaders(), }; - if (modelConfig.model.includes("moonshot")) { - console.log("[Request] moonshot payload: ", moonshotPayload); - chatPayload.body = JSON.stringify(moonshotPayload) - } - else { - console.log("[Request] openai payload: ", requestPayload); - } // make a fetch request const requestTimeoutId = setTimeout(