mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
perf: forbid image to base64 (#3038)
* perf: forbid image to base64 * update file upload path * feat: support promptCall use image * fix: echarts load * update doc
This commit is contained in:
@@ -104,6 +104,9 @@ export const loadRequestMessages = async ({
|
||||
}) => {
|
||||
// Load image to base64
|
||||
const loadImageToBase64 = async (messages: ChatCompletionContentPart[]) => {
|
||||
if (process.env.MULTIPLE_DATA_TO_BASE64 === 'false') {
|
||||
return messages;
|
||||
}
|
||||
return Promise.all(
|
||||
messages.map(async (item) => {
|
||||
if (item.type === 'image_url') {
|
||||
|
@@ -185,7 +185,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
// array, replace last element
|
||||
const lastText = lastMessage.content[lastMessage.content.length - 1];
|
||||
if (lastText.type === 'text') {
|
||||
lastMessage.content = replaceVariable(Prompt_Tool_Call, {
|
||||
lastText.text = replaceVariable(Prompt_Tool_Call, {
|
||||
question: lastText.text
|
||||
});
|
||||
} else {
|
||||
|
@@ -176,17 +176,29 @@ export const runToolWithPromptCall = async (
|
||||
);
|
||||
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
if (typeof lastMessage.content !== 'string') {
|
||||
if (typeof lastMessage.content === 'string') {
|
||||
lastMessage.content = replaceVariable(lastMessage.content, {
|
||||
toolsPrompt
|
||||
});
|
||||
} else if (Array.isArray(lastMessage.content)) {
|
||||
// array, replace last element
|
||||
const lastText = lastMessage.content[lastMessage.content.length - 1];
|
||||
if (lastText.type === 'text') {
|
||||
lastText.text = replaceVariable(lastText.text, {
|
||||
toolsPrompt
|
||||
});
|
||||
} else {
|
||||
return Promise.reject('Prompt call invalid input');
|
||||
}
|
||||
} else {
|
||||
return Promise.reject('Prompt call invalid input');
|
||||
}
|
||||
lastMessage.content = replaceVariable(lastMessage.content, {
|
||||
toolsPrompt
|
||||
});
|
||||
|
||||
const filterMessages = await filterGPTMessageByMaxTokens({
|
||||
messages,
|
||||
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
|
||||
});
|
||||
|
||||
const [requestMessages, max_tokens] = await Promise.all([
|
||||
loadRequestMessages({
|
||||
messages: filterMessages,
|
||||
@@ -398,11 +410,27 @@ export const runToolWithPromptCall = async (
|
||||
: undefined;
|
||||
|
||||
// get the next user prompt
|
||||
lastMessage.content += `${replaceAnswer}
|
||||
if (typeof lastMessage.content === 'string') {
|
||||
lastMessage.content += `${replaceAnswer}
|
||||
TOOL_RESPONSE: """
|
||||
${workflowInteractiveResponseItem ? `{{${INTERACTIVE_STOP_SIGNAL}}}` : toolsRunResponse.toolResponsePrompt}
|
||||
"""
|
||||
ANSWER: `;
|
||||
} else if (Array.isArray(lastMessage.content)) {
|
||||
// array, replace last element
|
||||
const lastText = lastMessage.content[lastMessage.content.length - 1];
|
||||
if (lastText.type === 'text') {
|
||||
lastText.text += `${replaceAnswer}
|
||||
TOOL_RESPONSE: """
|
||||
${workflowInteractiveResponseItem ? `{{${INTERACTIVE_STOP_SIGNAL}}}` : toolsRunResponse.toolResponsePrompt}
|
||||
"""
|
||||
ANSWER: `;
|
||||
} else {
|
||||
return Promise.reject('Prompt call invalid input');
|
||||
}
|
||||
} else {
|
||||
return Promise.reject('Prompt call invalid input');
|
||||
}
|
||||
|
||||
const runTimes = (response?.runTimes || 0) + toolsRunResponse.toolResponse.runTimes;
|
||||
const toolNodeTokens = response?.toolNodeTokens ? response.toolNodeTokens + tokens : tokens;
|
||||
|
@@ -18,11 +18,11 @@ import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/util
|
||||
import { getSystemPluginCb } from '../../../../../plugins/register';
|
||||
import { ContentTypes } from '@fastgpt/global/core/workflow/constants';
|
||||
import { replaceEditorVariable } from '@fastgpt/global/core/workflow/utils';
|
||||
import { uploadFile } from '../../../../common/file/gridfs/controller';
|
||||
import { uploadFileFromBase64Img } from '../../../../common/file/gridfs/controller';
|
||||
import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
||||
import { createFileToken } from '../../../../support/permission/controller';
|
||||
import { removeFilesByPaths } from '../../../../common/file/utils';
|
||||
import { JSONPath } from 'jsonpath-plus';
|
||||
import type { SystemPluginSpecialResponse } from '../../../../../plugins/type';
|
||||
|
||||
type PropsArrType = {
|
||||
key: string;
|
||||
@@ -376,27 +376,25 @@ async function replaceSystemPluginResponse({
|
||||
tmbId: string;
|
||||
}) {
|
||||
for await (const key of Object.keys(response)) {
|
||||
if (typeof response[key] === 'object' && response[key].type === 'SYSTEM_PLUGIN_FILE') {
|
||||
const fileObj = response[key];
|
||||
const filename = fileObj.path.split('/').pop() || `${tmbId}-${Date.now()}`;
|
||||
if (typeof response[key] === 'object' && response[key].type === 'SYSTEM_PLUGIN_BASE64') {
|
||||
const fileObj = response[key] as SystemPluginSpecialResponse;
|
||||
const filename = `${tmbId}-${Date.now()}.${fileObj.extension}`;
|
||||
try {
|
||||
const fileId = await uploadFile({
|
||||
const fileId = await uploadFileFromBase64Img({
|
||||
teamId,
|
||||
tmbId,
|
||||
bucketName: 'chat',
|
||||
path: fileObj.path,
|
||||
base64: fileObj.value,
|
||||
filename,
|
||||
contentType: fileObj.contentType,
|
||||
metadata: {}
|
||||
});
|
||||
response[key] = `${ReadFileBaseUrl}?filename=${filename}&token=${await createFileToken({
|
||||
response[key] = `${ReadFileBaseUrl}/${filename}?token=${await createFileToken({
|
||||
bucketName: 'chat',
|
||||
teamId,
|
||||
tmbId,
|
||||
fileId
|
||||
})}`;
|
||||
} catch (error) {}
|
||||
removeFilesByPaths([fileObj.path]);
|
||||
}
|
||||
}
|
||||
return response;
|
||||
|
Reference in New Issue
Block a user