fix: json schema parse error;fix: retraining image reset (#4757)

* i18n

* fix: json schema parse error

* fix: retraining image reset

* update doc
This commit is contained in:
Archer
2025-05-07 15:38:03 +08:00
committed by GitHub
parent 2d3117c5da
commit 96e7dd581e
12 changed files with 54 additions and 24 deletions

View File

@@ -11,6 +11,7 @@ import type {
import { getLLMModel } from './model';
import { getLLMDefaultUsage } from '@fastgpt/global/core/ai/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import json5 from 'json5';
/*
Count response max token
@@ -54,8 +55,6 @@ type InferCompletionsBody<T> = T extends { stream: true }
export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
body: T & {
response_format?: any;
json_schema?: string;
stop?: string;
},
model: string | LLMModelItemType
@@ -65,8 +64,26 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
return body as unknown as InferCompletionsBody<T>;
}
const response_format = body.response_format;
const json_schema = body.json_schema ?? undefined;
const response_format = (() => {
if (!body.response_format?.type) return undefined;
if (body.response_format.type === 'json_schema') {
try {
return {
type: 'json_schema',
json_schema: json5.parse(body.response_format?.json_schema as unknown as string)
};
} catch (error) {
throw new Error('Json schema error');
}
}
if (body.response_format.type) {
return {
type: body.response_format.type
};
}
return undefined;
})();
const stop = body.stop ?? undefined;
const requestBody: T = {
@@ -80,12 +97,7 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
})
: undefined,
...modelData?.defaultConfig,
response_format: response_format
? {
type: response_format,
json_schema
}
: undefined,
response_format,
stop: stop?.split('|')
};

View File

@@ -235,8 +235,10 @@ export const runToolWithFunctionCall = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
json_schema: aiChatJsonSchema
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -243,8 +243,10 @@ export const runToolWithPromptCall = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
json_schema: aiChatJsonSchema
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -296,8 +296,10 @@ export const runToolWithToolChoice = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
json_schema: aiChatJsonSchema
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -192,8 +192,10 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
modelConstantsData
);

View File

@@ -33,6 +33,7 @@
"organization_name": "Organization name",
"payment_method": "Payment method",
"payway_coupon": "Redeem code",
"rerank": "Rerank",
"save": "save",
"save_failed": "Save exception",
"save_success": "Saved successfully",

View File

@@ -33,6 +33,7 @@
"organization_name": "组织名称",
"payment_method": "支付方式",
"payway_coupon": "兑换码",
"rerank": "结果重排",
"save": "保存",
"save_failed": "保存异常",
"save_success": "保存成功",

View File

@@ -33,6 +33,7 @@
"organization_name": "組織名稱",
"payment_method": "支付方式",
"payway_coupon": "兌換碼",
"rerank": "結果重排",
"save": "儲存",
"save_failed": "儲存異常",
"save_success": "儲存成功",