Fix some bug (#5048)

* fix: chat log time range

* fix: repeat system prompt

* perf: nanoid random

* fix: get files from histories

* fix: ts

* ts config

* perf: search dataset collection
This commit is contained in:
Archer
2025-06-17 16:10:01 +08:00
committed by GitHub
parent 7981b61ca9
commit af3221fa47
21 changed files with 121 additions and 79 deletions

View File

@@ -105,8 +105,9 @@ export const loadRequestMessages = async ({
const arrayContent = content
.filter((item) => item.text)
.map((item) => ({ ...item, text: addEndpointToImageUrl(item.text) }));
if (arrayContent.length === 0) return;
.map((item) => addEndpointToImageUrl(item.text))
.join('\n');
return arrayContent;
};
// Parse user content(text and img) Store history => api messages

View File

@@ -42,8 +42,8 @@ type Response = DispatchNodeResultType<{
}>;
export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<Response> => {
const {
node: { nodeId, name, isEntry, version },
let {
node: { nodeId, name, isEntry, version, inputs },
runtimeNodes,
runtimeEdges,
histories,
@@ -70,6 +70,11 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
props.params.aiChatVision = aiChatVision && toolModel.vision;
props.params.aiChatReasoning = aiChatReasoning && toolModel.reasoning;
const fileUrlInput = inputs.find((item) => item.key === NodeInputKeyEnum.fileUrlList);
if (!fileUrlInput || !fileUrlInput.value || fileUrlInput.value.length === 0) {
fileLinks = undefined;
}
console.log(fileLinks, 22);
const toolNodeIds = filterToolNodeIdByEdges({ nodeId, edges: runtimeEdges });

View File

@@ -13,7 +13,6 @@ import { createChatCompletion } from '../../../ai/config';
import type {
ChatCompletionMessageParam,
CompletionFinishReason,
CompletionUsage,
StreamChatType
} from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
@@ -45,7 +44,8 @@ import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/
import { responseWriteController } from '../../../../common/response';
import { getLLMModel } from '../../../ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { checkQuoteQAValue, getHistories } from '../utils';
import { filterSearchResultsByMaxChars } from '../../utils';
@@ -82,7 +82,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
retainDatasetCite = true,
externalProvider,
histories,
node: { name, version },
node: { name, version, inputs },
query,
runningUserInfo,
workflowStreamResponse,
@@ -119,6 +119,11 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
aiChatVision = modelConstantsData.vision && aiChatVision;
aiChatReasoning = !!aiChatReasoning && !!modelConstantsData.reasoning;
// Check fileLinks is reference variable
const fileUrlInput = inputs.find((item) => item.key === NodeInputKeyEnum.fileUrlList);
if (!fileUrlInput || !fileUrlInput.value || fileUrlInput.value.length === 0) {
fileLinks = undefined;
}
const chatHistories = getHistories(history, histories);
quoteQA = checkQuoteQAValue(quoteQA);

View File

@@ -84,10 +84,12 @@ export const filterToolNodeIdByEdges = ({
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
if (!history) return [];
const systemHistories = histories.filter((item) => item.obj === ChatRoleEnum.System);
const systemHistoryIndex = histories.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemHistories = histories.slice(0, systemHistoryIndex);
const chatHistories = histories.slice(systemHistoryIndex);
const filterHistories = (() => {
if (typeof history === 'number') return histories.slice(-(history * 2));
if (typeof history === 'number') return chatHistories.slice(-(history * 2));
if (Array.isArray(history)) return history;
return [];
})();

View File

@@ -1,10 +1,7 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@fastgpt/servive/*": ["./*"]
}
"baseUrl": "."
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../../**/*.d.ts"]
}