fix: json schema parse error;fix: retraining image reset (#4757)

* i18n

* fix: json schema parse error

* fix: retraining image reset

* update doc
This commit is contained in:
Archer
2025-05-07 15:38:03 +08:00
committed by GitHub
parent 2d3117c5da
commit 96e7dd581e
12 changed files with 54 additions and 24 deletions

View File

@@ -24,4 +24,6 @@ weight: 792
1. 应用列表/知识库列表,删除行权限展示问题。
2. 打开知识库搜索参数后,重排选项自动被打开。
3. LLM json_schema 模式 API 请求格式错误。
4. 重新训练时,图片过期索引未成功清除,导致图片会丢失。

View File

@@ -11,6 +11,7 @@ import type {
import { getLLMModel } from './model';
import { getLLMDefaultUsage } from '@fastgpt/global/core/ai/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import json5 from 'json5';
/*
Count response max token
@@ -54,8 +55,6 @@ type InferCompletionsBody<T> = T extends { stream: true }
export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
body: T & {
response_format?: any;
json_schema?: string;
stop?: string;
},
model: string | LLMModelItemType
@@ -65,8 +64,26 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
return body as unknown as InferCompletionsBody<T>;
}
const response_format = body.response_format;
const json_schema = body.json_schema ?? undefined;
const response_format = (() => {
if (!body.response_format?.type) return undefined;
if (body.response_format.type === 'json_schema') {
try {
return {
type: 'json_schema',
json_schema: json5.parse(body.response_format?.json_schema as unknown as string)
};
} catch (error) {
throw new Error('Json schema error');
}
}
if (body.response_format.type) {
return {
type: body.response_format.type
};
}
return undefined;
})();
const stop = body.stop ?? undefined;
const requestBody: T = {
@@ -80,12 +97,7 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
})
: undefined,
...modelData?.defaultConfig,
response_format: response_format
? {
type: response_format,
json_schema
}
: undefined,
response_format,
stop: stop?.split('|')
};

View File

@@ -235,8 +235,10 @@ export const runToolWithFunctionCall = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -243,8 +243,10 @@ export const runToolWithPromptCall = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -296,8 +296,10 @@ export const runToolWithToolChoice = async (
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat,
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
toolModel
);

View File

@@ -192,8 +192,10 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
max_tokens,
top_p: aiChatTopP,
stop: aiChatStopSign,
response_format: aiChatResponseFormat as any,
response_format: {
type: aiChatResponseFormat as any,
json_schema: aiChatJsonSchema
}
},
modelConstantsData
);

View File

@@ -33,6 +33,7 @@
"organization_name": "Organization name",
"payment_method": "Payment method",
"payway_coupon": "Redeem code",
"rerank": "Rerank",
"save": "save",
"save_failed": "Save exception",
"save_success": "Saved successfully",

View File

@@ -33,6 +33,7 @@
"organization_name": "组织名称",
"payment_method": "支付方式",
"payway_coupon": "兑换码",
"rerank": "结果重排",
"save": "保存",
"save_failed": "保存异常",
"save_success": "保存成功",

View File

@@ -33,6 +33,7 @@
"organization_name": "組織名稱",
"payment_method": "支付方式",
"payway_coupon": "兌換碼",
"rerank": "結果重排",
"save": "儲存",
"save_failed": "儲存異常",
"save_success": "儲存成功",

View File

@@ -51,7 +51,7 @@ async function handler(
if (!collection.fileId) return Promise.reject('fileId is missing');
return {
type: DatasetSourceReadTypeEnum.fileLocal,
sourceId: collection.fileId
sourceId: String(collection.fileId)
};
}
if (collection.type === DatasetCollectionTypeEnum.apiFile) {
@@ -94,6 +94,7 @@ async function handler(
const { collectionId } = await createCollectionAndInsertData({
dataset: collection.dataset,
rawText,
relatedId: collection.metadata?.relatedImgId,
createCollectionParams: {
...data,
teamId: collection.teamId,

View File

@@ -95,12 +95,13 @@ async function handler(req: ApiRequestProps<SearchTestProps>): Promise<SearchTes
});
// push bill
const source = apikey ? UsageSourceEnum.api : UsageSourceEnum.fastgpt;
const { totalPoints: embeddingTotalPoints } = pushGenerateVectorUsage({
teamId,
tmbId,
inputTokens: embeddingTokens,
model: dataset.vectorModel,
source: apikey ? UsageSourceEnum.api : UsageSourceEnum.fastgpt,
source,
...(queryExtensionResult && {
extensionModel: queryExtensionResult.model,
@@ -118,7 +119,8 @@ async function handler(req: ApiRequestProps<SearchTestProps>): Promise<SearchTes
teamId,
tmbId,
inputTokens: reRankInputTokens,
model: rerankModelData.model
model: rerankModelData.model,
source
})
: { totalPoints: 0 };

View File

@@ -284,12 +284,14 @@ export const pushRerankUsage = ({
teamId,
tmbId,
model,
inputTokens
inputTokens,
source = UsageSourceEnum.fastgpt
}: {
teamId: string;
tmbId: string;
model: string;
inputTokens: number;
source?: UsageSourceEnum;
}) => {
const { totalPoints, modelName } = formatModelChars2Points({
model,
@@ -300,9 +302,9 @@ export const pushRerankUsage = ({
createUsage({
teamId,
tmbId,
appName: modelName,
appName: i18nT('account_bill:rerank'),
totalPoints,
source: UsageSourceEnum.fastgpt,
source,
list: [
{
moduleName: modelName,