mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-15 15:41:05 +00:00
4.7.1-alpha (#1120)
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -6,16 +6,9 @@ import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoFileSchema } from './schema';
|
||||
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
import { readFileRawText } from '../read/rawText';
|
||||
import { ReadFileByBufferParams } from '../read/type';
|
||||
import { readMarkdown } from '../read/markdown';
|
||||
import { readHtmlRawText } from '../read/html';
|
||||
import { readPdfFile } from '../read/pdf';
|
||||
import { readWordFile } from '../read/word';
|
||||
import { readCsvRawText } from '../read/csv';
|
||||
import { MongoRwaTextBuffer } from '../../buffer/rawText/schema';
|
||||
import { readPptxRawText } from '../read/pptx';
|
||||
import { readXlsxRawText } from '../read/xlsx';
|
||||
import { readFileRawContent } from '../read/utils';
|
||||
|
||||
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
|
||||
MongoFileSchema;
|
||||
@@ -146,7 +139,7 @@ export const readFileEncode = async ({
|
||||
return encoding as BufferEncoding;
|
||||
};
|
||||
|
||||
export const readFileContent = async ({
|
||||
export const readFileContentFromMongo = async ({
|
||||
teamId,
|
||||
bucketName,
|
||||
fileId,
|
||||
@@ -205,47 +198,14 @@ export const readFileContent = async ({
|
||||
}
|
||||
};
|
||||
|
||||
const { rawText } = await (async () => {
|
||||
switch (extension) {
|
||||
case 'txt':
|
||||
return readFileRawText(params);
|
||||
case 'md':
|
||||
return readMarkdown(params);
|
||||
case 'html':
|
||||
return readHtmlRawText(params);
|
||||
case 'pdf':
|
||||
return readPdfFile(params);
|
||||
case 'docx':
|
||||
return readWordFile(params);
|
||||
case 'pptx':
|
||||
return readPptxRawText(params);
|
||||
case 'xlsx':
|
||||
const xlsxResult = await readXlsxRawText(params);
|
||||
if (csvFormat) {
|
||||
return {
|
||||
rawText: xlsxResult.formatText || ''
|
||||
};
|
||||
}
|
||||
return {
|
||||
rawText: xlsxResult.rawText
|
||||
};
|
||||
case 'csv':
|
||||
const csvResult = await readCsvRawText(params);
|
||||
if (csvFormat) {
|
||||
return {
|
||||
rawText: csvResult.formatText || ''
|
||||
};
|
||||
}
|
||||
return {
|
||||
rawText: csvResult.rawText
|
||||
};
|
||||
default:
|
||||
return Promise.reject('Only support .txt, .md, .html, .pdf, .docx, pptx, .csv, .xlsx');
|
||||
}
|
||||
})();
|
||||
const { rawText } = await readFileRawContent({
|
||||
extension,
|
||||
csvFormat,
|
||||
params
|
||||
});
|
||||
|
||||
if (rawText.trim()) {
|
||||
await MongoRwaTextBuffer.create({
|
||||
MongoRwaTextBuffer.create({
|
||||
sourceId: fileId,
|
||||
rawText,
|
||||
metadata: {
|
||||
|
@@ -2,6 +2,15 @@ import { markdownProcess } from '@fastgpt/global/common/string/markdown';
|
||||
import { uploadMongoImg } from '../image/controller';
|
||||
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
|
||||
import { addHours } from 'date-fns';
|
||||
import { ReadFileByBufferParams } from './type';
|
||||
import { readFileRawText } from '../read/rawText';
|
||||
import { readMarkdown } from '../read/markdown';
|
||||
import { readHtmlRawText } from '../read/html';
|
||||
import { readPdfFile } from '../read/pdf';
|
||||
import { readWordFile } from '../read/word';
|
||||
import { readCsvRawText } from '../read/csv';
|
||||
import { readPptxRawText } from '../read/pptx';
|
||||
import { readXlsxRawText } from '../read/xlsx';
|
||||
|
||||
export const initMarkdownText = ({
|
||||
teamId,
|
||||
@@ -23,3 +32,50 @@ export const initMarkdownText = ({
|
||||
expiredTime: addHours(new Date(), 2)
|
||||
})
|
||||
});
|
||||
|
||||
export const readFileRawContent = async ({
|
||||
extension,
|
||||
csvFormat,
|
||||
params
|
||||
}: {
|
||||
csvFormat?: boolean;
|
||||
extension: string;
|
||||
params: ReadFileByBufferParams;
|
||||
}) => {
|
||||
switch (extension) {
|
||||
case 'txt':
|
||||
return readFileRawText(params);
|
||||
case 'md':
|
||||
return readMarkdown(params);
|
||||
case 'html':
|
||||
return readHtmlRawText(params);
|
||||
case 'pdf':
|
||||
return readPdfFile(params);
|
||||
case 'docx':
|
||||
return readWordFile(params);
|
||||
case 'pptx':
|
||||
return readPptxRawText(params);
|
||||
case 'xlsx':
|
||||
const xlsxResult = await readXlsxRawText(params);
|
||||
if (csvFormat) {
|
||||
return {
|
||||
rawText: xlsxResult.formatText || ''
|
||||
};
|
||||
}
|
||||
return {
|
||||
rawText: xlsxResult.rawText
|
||||
};
|
||||
case 'csv':
|
||||
const csvResult = await readCsvRawText(params);
|
||||
if (csvFormat) {
|
||||
return {
|
||||
rawText: csvResult.formatText || ''
|
||||
};
|
||||
}
|
||||
return {
|
||||
rawText: csvResult.rawText
|
||||
};
|
||||
default:
|
||||
return Promise.reject('Only support .txt, .md, .html, .pdf, .docx, pptx, .csv, .xlsx');
|
||||
}
|
||||
};
|
||||
|
@@ -55,8 +55,8 @@ export const clearTmpUploadFiles = () => {
|
||||
fs.stat(filePath, (err, stats) => {
|
||||
if (err) return;
|
||||
|
||||
// 如果文件是在1小时前上传的,则认为是临时文件并删除它
|
||||
if (Date.now() - stats.mtime.getTime() > 1 * 60 * 60 * 1000) {
|
||||
// 如果文件是在2小时前上传的,则认为是临时文件并删除它
|
||||
if (Date.now() - stats.mtime.getTime() > 2 * 60 * 60 * 1000) {
|
||||
fs.unlink(filePath, (err) => {
|
||||
if (err) return;
|
||||
console.log(`Deleted temp file: ${filePath}`);
|
||||
|
15
packages/service/common/system/timerLock/constants.ts
Normal file
15
packages/service/common/system/timerLock/constants.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
export enum TimerIdEnum {
|
||||
checkInValidDatasetFiles = 'checkInValidDatasetFiles',
|
||||
checkInvalidDatasetData = 'checkInvalidDatasetData',
|
||||
checkInvalidVector = 'checkInvalidVector',
|
||||
clearExpiredSubPlan = 'clearExpiredSubPlan',
|
||||
updateStandardPlan = 'updateStandardPlan'
|
||||
}
|
||||
|
||||
export const timerIdMap = {
|
||||
[TimerIdEnum.checkInValidDatasetFiles]: 'checkInValidDatasetFiles',
|
||||
[TimerIdEnum.checkInvalidDatasetData]: 'checkInvalidDatasetData',
|
||||
[TimerIdEnum.checkInvalidVector]: 'checkInvalidVector',
|
||||
[TimerIdEnum.clearExpiredSubPlan]: 'clearExpiredSubPlan',
|
||||
[TimerIdEnum.updateStandardPlan]: 'updateStandardPlan'
|
||||
};
|
29
packages/service/common/system/timerLock/schema.ts
Normal file
29
packages/service/common/system/timerLock/schema.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { connectionMongo, type Model } from '../../mongo';
|
||||
import { timerIdMap } from './constants';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { TimerLockSchemaType } from './type.d';
|
||||
|
||||
export const collectionName = 'systemtimerlocks';
|
||||
|
||||
const TimerLockSchema = new Schema({
|
||||
timerId: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
enum: Object.keys(timerIdMap)
|
||||
},
|
||||
expiredTime: {
|
||||
type: Date,
|
||||
required: true
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
TimerLockSchema.index({ expiredTime: 1 }, { expireAfterSeconds: 5 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoTimerLock: Model<TimerLockSchemaType> =
|
||||
models[collectionName] || model(collectionName, TimerLockSchema);
|
||||
MongoTimerLock.syncIndexes();
|
5
packages/service/common/system/timerLock/type.d.ts
vendored
Normal file
5
packages/service/common/system/timerLock/type.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export type TimerLockSchemaType = {
|
||||
_id: string;
|
||||
timerId: string;
|
||||
expiredTime: Date;
|
||||
};
|
25
packages/service/common/system/timerLock/utils.ts
Normal file
25
packages/service/common/system/timerLock/utils.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { TimerIdEnum } from './constants';
|
||||
import { MongoTimerLock } from './schema';
|
||||
import { addMinutes } from 'date-fns';
|
||||
|
||||
/*
|
||||
利用唯一健,使得同一时间只有一个任务在执行,后创建的锁,会因唯一健创建失败,从而无法继续执行任务
|
||||
*/
|
||||
export const checkTimerLock = async ({
|
||||
timerId,
|
||||
lockMinuted
|
||||
}: {
|
||||
timerId: `${TimerIdEnum}`;
|
||||
lockMinuted: number;
|
||||
}) => {
|
||||
try {
|
||||
await MongoTimerLock.create({
|
||||
timerId,
|
||||
expiredTime: addMinutes(new Date(), lockMinuted)
|
||||
});
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
@@ -210,7 +210,6 @@ export const runToolWithToolChoice = async (
|
||||
).filter(Boolean) as ToolRunResponseType;
|
||||
|
||||
const flatToolsResponseData = toolsRunResponse.map((item) => item.moduleRunResponse).flat();
|
||||
|
||||
if (toolCalls.length > 0 && !res.closed) {
|
||||
// Run the tool, combine its results, and perform another round of AI calls
|
||||
const assistantToolMsgParams: ChatCompletionAssistantToolParam = {
|
||||
|
@@ -37,6 +37,7 @@ import { dispatchRunTools } from './agent/runTool/index';
|
||||
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { DispatchFlowResponse } from './type';
|
||||
import { dispatchStopToolCall } from './agent/runTool/stopTool';
|
||||
import { dispatchLafRequest } from './tools/runLaf';
|
||||
|
||||
const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.historyNode]: dispatchHistory,
|
||||
@@ -56,6 +57,7 @@ const callbackMap: Record<`${FlowNodeTypeEnum}`, Function> = {
|
||||
[FlowNodeTypeEnum.queryExtension]: dispatchQueryExtension,
|
||||
[FlowNodeTypeEnum.tools]: dispatchRunTools,
|
||||
[FlowNodeTypeEnum.stopTool]: dispatchStopToolCall,
|
||||
[FlowNodeTypeEnum.lafModule]: dispatchLafRequest,
|
||||
|
||||
// none
|
||||
[FlowNodeTypeEnum.userGuide]: () => Promise.resolve()
|
||||
|
@@ -61,7 +61,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
...variables,
|
||||
histories: histories.slice(0, 10),
|
||||
histories: histories.slice(-10),
|
||||
...body
|
||||
};
|
||||
|
||||
@@ -165,6 +165,7 @@ async function fetchData({
|
||||
'Content-Type': 'application/json',
|
||||
...headers
|
||||
},
|
||||
timeout: 120000,
|
||||
params: params,
|
||||
data: ['POST', 'PUT', 'PATCH'].includes(method) ? body : undefined
|
||||
});
|
||||
|
212
packages/service/core/workflow/dispatch/tools/runLaf.ts
Normal file
212
packages/service/core/workflow/dispatch/tools/runLaf.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
|
||||
import {
|
||||
DYNAMIC_INPUT_KEY,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum
|
||||
} from '@fastgpt/global/core/module/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
|
||||
import axios from 'axios';
|
||||
import { valueTypeFormat } from '../utils';
|
||||
import { SERVICE_LOCAL_HOST } from '../../../../common/system/tools';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||
|
||||
type LafRequestProps = ModuleDispatchProps<{
|
||||
[ModuleInputKeyEnum.httpReqUrl]: string;
|
||||
[DYNAMIC_INPUT_KEY]: Record<string, any>;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
type LafResponse = DispatchNodeResultType<{
|
||||
[ModuleOutputKeyEnum.failed]?: boolean;
|
||||
[key: string]: any;
|
||||
}>;
|
||||
|
||||
const UNDEFINED_SIGN = 'UNDEFINED_SIGN';
|
||||
|
||||
export const dispatchLafRequest = async (props: LafRequestProps): Promise<LafResponse> => {
|
||||
let {
|
||||
appId,
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
variables,
|
||||
module: { outputs },
|
||||
histories,
|
||||
params: { system_httpReqUrl: httpReqUrl, [DYNAMIC_INPUT_KEY]: dynamicInput, ...body }
|
||||
} = props;
|
||||
|
||||
if (!httpReqUrl) {
|
||||
return Promise.reject('Http url is empty');
|
||||
}
|
||||
|
||||
const concatVariables = {
|
||||
appId,
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
...variables,
|
||||
...body
|
||||
};
|
||||
|
||||
httpReqUrl = replaceVariable(httpReqUrl, concatVariables);
|
||||
|
||||
const requestBody = {
|
||||
systemParams: {
|
||||
appId,
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
histories: histories.slice(0, 10)
|
||||
},
|
||||
variables,
|
||||
...dynamicInput,
|
||||
...body
|
||||
};
|
||||
|
||||
try {
|
||||
const { formatResponse, rawResponse } = await fetchData({
|
||||
method: 'POST',
|
||||
url: httpReqUrl,
|
||||
body: requestBody
|
||||
});
|
||||
|
||||
// format output value type
|
||||
const results: Record<string, any> = {};
|
||||
for (const key in formatResponse) {
|
||||
const output = outputs.find((item) => item.key === key);
|
||||
if (!output) continue;
|
||||
results[key] = valueTypeFormat(formatResponse[key], output.valueType);
|
||||
}
|
||||
|
||||
return {
|
||||
assistantResponses: [],
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
httpResult: rawResponse
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: rawResponse,
|
||||
[ModuleOutputKeyEnum.httpRawResponse]: rawResponse,
|
||||
...results
|
||||
};
|
||||
} catch (error) {
|
||||
addLog.error('Http request error', error);
|
||||
return {
|
||||
[ModuleOutputKeyEnum.failed]: true,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: 0,
|
||||
body: Object.keys(requestBody).length > 0 ? requestBody : undefined,
|
||||
httpResult: { error: formatHttpError(error) }
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
async function fetchData({
|
||||
method,
|
||||
url,
|
||||
body
|
||||
}: {
|
||||
method: string;
|
||||
url: string;
|
||||
body: Record<string, any>;
|
||||
}): Promise<Record<string, any>> {
|
||||
const { data: response } = await axios({
|
||||
method,
|
||||
baseURL: `http://${SERVICE_LOCAL_HOST}`,
|
||||
url,
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
data: body
|
||||
});
|
||||
|
||||
const parseJson = (obj: Record<string, any>, prefix = '') => {
|
||||
let result: Record<string, any> = {};
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
for (let i = 0; i < obj.length; i++) {
|
||||
result[`${prefix}[${i}]`] = obj[i];
|
||||
|
||||
if (Array.isArray(obj[i])) {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[i], `${prefix}[${i}]`)
|
||||
};
|
||||
} else if (typeof obj[i] === 'object') {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[i], `${prefix}[${i}].`)
|
||||
};
|
||||
}
|
||||
}
|
||||
} else if (typeof obj == 'object') {
|
||||
for (const key in obj) {
|
||||
result[`${prefix}${key}`] = obj[key];
|
||||
|
||||
if (Array.isArray(obj[key])) {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[key], `${prefix}${key}`)
|
||||
};
|
||||
} else if (typeof obj[key] === 'object') {
|
||||
result = {
|
||||
...result,
|
||||
...parseJson(obj[key], `${prefix}${key}.`)
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
return {
|
||||
formatResponse:
|
||||
typeof response === 'object' && !Array.isArray(response) ? parseJson(response) : {},
|
||||
rawResponse: response
|
||||
};
|
||||
}
|
||||
|
||||
function replaceVariable(text: string, obj: Record<string, any>) {
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === undefined) {
|
||||
text = text.replace(new RegExp(`{{${key}}}`, 'g'), UNDEFINED_SIGN);
|
||||
} else {
|
||||
const replacement = JSON.stringify(value);
|
||||
const unquotedReplacement =
|
||||
replacement.startsWith('"') && replacement.endsWith('"')
|
||||
? replacement.slice(1, -1)
|
||||
: replacement;
|
||||
text = text.replace(new RegExp(`{{${key}}}`, 'g'), unquotedReplacement);
|
||||
}
|
||||
}
|
||||
return text || '';
|
||||
}
|
||||
function removeUndefinedSign(obj: Record<string, any>) {
|
||||
for (const key in obj) {
|
||||
if (obj[key] === UNDEFINED_SIGN) {
|
||||
obj[key] = undefined;
|
||||
} else if (Array.isArray(obj[key])) {
|
||||
obj[key] = obj[key].map((item: any) => {
|
||||
if (item === UNDEFINED_SIGN) {
|
||||
return undefined;
|
||||
} else if (typeof item === 'object') {
|
||||
removeUndefinedSign(item);
|
||||
}
|
||||
return item;
|
||||
});
|
||||
} else if (typeof obj[key] === 'object') {
|
||||
removeUndefinedSign(obj[key]);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
function formatHttpError(error: any) {
|
||||
return {
|
||||
message: error?.message,
|
||||
name: error?.name,
|
||||
method: error?.config?.method,
|
||||
baseURL: error?.config?.baseURL,
|
||||
url: error?.config?.url,
|
||||
code: error?.code,
|
||||
status: error?.status
|
||||
};
|
||||
}
|
@@ -10,7 +10,6 @@ import { MongoTeam } from './teamSchema';
|
||||
|
||||
async function getTeamMember(match: Record<string, any>): Promise<TeamItemType> {
|
||||
const tmb = (await MongoTeamMember.findOne(match).populate('teamId')) as TeamMemberWithTeamSchema;
|
||||
|
||||
if (!tmb) {
|
||||
return Promise.reject('member not exist');
|
||||
}
|
||||
@@ -27,7 +26,8 @@ async function getTeamMember(match: Record<string, any>): Promise<TeamItemType>
|
||||
role: tmb.role,
|
||||
status: tmb.status,
|
||||
defaultTeam: tmb.defaultTeam,
|
||||
canWrite: tmb.role !== TeamMemberRoleEnum.visitor
|
||||
canWrite: tmb.role !== TeamMemberRoleEnum.visitor,
|
||||
lafAccount: tmb.teamId.lafAccount
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -35,6 +35,14 @@ const TeamSchema = new Schema({
|
||||
lastWebsiteSyncTime: {
|
||||
type: Date
|
||||
}
|
||||
},
|
||||
lafAccount: {
|
||||
token: {
|
||||
type: String
|
||||
},
|
||||
appid: {
|
||||
type: String
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user