Perf system plugin and worker (#2126)

* perf: worker pool

* perf: worker register

* perf: worker controller

* perf: system plugin worker

* perf: system plugin worker

* perf: worker

* perf: worker

* worker timeout

* perf: copy icon
This commit is contained in:
Archer
2024-07-23 11:23:42 +08:00
committed by GitHub
parent a4787bce5c
commit e99c91aaa6
34 changed files with 433 additions and 235 deletions

View File

@@ -23,3 +23,8 @@ weight: 816
1. 新增 - 重构系统插件的结构。允许向开源社区 PR 系统插件,具体可见: [如何向 FastGPT 社区提交系统插件](https://fael3z0zfze.feishu.cn/wiki/ERZnw9R26iRRG0kXZRec6WL9nwh)。
2. 新增 - DuckDuckGo 系统插件。
3. 优化 - 节点图标。
4. 优化 - 对话框引用增加额外复制案件,便于复制。增加引用内容折叠。
5. 修复 - Permission 表声明问题。
6. 修复 - 并行执行节点,运行时间未正确记录。
7. 修复 - 简易模式,首次进入,无法正确获取知识库配置。
8. 修复 - Log level 配置

View File

@@ -4,6 +4,7 @@
"dependencies": {
"duck-duck-scrape": "^2.2.5",
"lodash": "^4.17.21",
"axios": "^1.5.1",
"expr-eval": "^2.0.2"
},
"devDependencies": {

View File

@@ -4,10 +4,12 @@ import { FastGPTProUrl, isProduction } from '../service/common/system/constants'
import { GET, POST } from '@fastgpt/service/common/api/plusRequest';
import { SystemPluginTemplateItemType } from '@fastgpt/global/core/workflow/type';
import { cloneDeep } from 'lodash';
import { WorkerNameEnum, runWorker } from '@fastgpt/service/worker/utils';
let list = [
'getTime',
'fetchUrl',
// Run in main thread
const staticPluginList = ['getTime', 'fetchUrl'];
// Run in worker thread (Have npm packages)
const packagePluginList = [
'mathExprVal',
'duckduckgo',
'duckduckgo/search',
@@ -16,6 +18,8 @@ let list = [
'duckduckgo/searchVideo'
];
const list = [...staticPluginList, ...packagePluginList];
/* Get plugins */
export const getCommunityPlugins = () => {
return list.map<SystemPluginTemplateItemType>((name) => {
@@ -58,8 +62,7 @@ export const getSystemPluginTemplates = async (refresh = false) => {
};
export const getCommunityCb = async () => {
// Do not modify the following code
const loadModule = async (name: string) => {
const loadCommunityModule = async (name: string) => {
const module = await import(`./src/${name}/index`);
return module.default;
};
@@ -70,7 +73,14 @@ export const getCommunityCb = async () => {
try {
return {
name,
cb: await loadModule(name)
cb: staticPluginList.includes(name)
? await loadCommunityModule(name)
: (e: any) => {
return runWorker(WorkerNameEnum.systemPluginRun, {
pluginName: name,
data: e
});
}
};
} catch (error) {}
})

View File

@@ -0,0 +1,24 @@
import { SystemPluginResponseType } from '../type';
import { parentPort } from 'worker_threads';
const loadModule = async (name: string): Promise<(e: any) => SystemPluginResponseType> => {
const module = await import(`../src/${name}/index`);
return module.default;
};
parentPort?.on('message', async ({ pluginName, data }: { pluginName: string; data: any }) => {
try {
const cb = await loadModule(pluginName);
parentPort?.postMessage({
type: 'success',
data: await cb(data)
});
} catch (error) {
parentPort?.postMessage({
type: 'error',
data: error
});
}
process.exit();
});

View File

@@ -32,14 +32,13 @@ const main = async (props: Props, retry = 3): Response => {
};
} catch (error) {
if (retry <= 0) {
addLog.warn('DuckDuckGo error', { error });
return {
result: 'Failed to fetch data'
};
}
addLog.warn('DuckDuckGo error', { error });
await delay(Math.random() * 2000);
await delay(Math.random() * 5000);
return main(props, retry - 1);
}
};

View File

@@ -31,14 +31,13 @@ const main = async (props: Props, retry = 3): Response => {
};
} catch (error) {
if (retry <= 0) {
addLog.warn('DuckDuckGo error', { error });
return {
result: 'Failed to fetch data'
};
}
addLog.warn('DuckDuckGo error', { error });
await delay(Math.random() * 2000);
await delay(Math.random() * 5000);
return main(props, retry - 1);
}
};

View File

@@ -32,14 +32,13 @@ const main = async (props: Props, retry = 3): Response => {
};
} catch (error) {
if (retry <= 0) {
addLog.warn('DuckDuckGo error', { error });
return {
result: 'Failed to fetch data'
};
}
addLog.warn('DuckDuckGo error', { error });
await delay(Math.random() * 2000);
await delay(Math.random() * 5000);
return main(props, retry - 1);
}
};

View File

@@ -32,14 +32,13 @@ const main = async (props: Props, retry = 3): Response => {
};
} catch (error) {
if (retry <= 0) {
addLog.warn('DuckDuckGo error', { error });
return {
result: 'Failed to fetch data'
};
}
addLog.warn('DuckDuckGo error', { error });
await delay(Math.random() * 2000);
await delay(Math.random() * 5000);
return main(props, retry - 1);
}
};

View File

@@ -1,4 +1,3 @@
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { Parser } from 'expr-eval';
type Props = {

View File

@@ -6,7 +6,7 @@ import { addHours } from 'date-fns';
import { WorkerNameEnum, runWorker } from '../../../worker/utils';
import fs from 'fs';
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
import { ReadFileResponse } from '../../../worker/file/type';
import type { ReadFileResponse } from '../../../worker/readFile/type';
export const initMarkdownText = ({
teamId,

View File

@@ -6,95 +6,41 @@ import {
} from '@fastgpt/global/core/ai/type';
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { WorkerNameEnum, getWorker } from '../../../worker/utils';
import { WorkerNameEnum, getWorkerController } from '../../../worker/utils';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { addLog } from '../../system/log';
export const getTiktokenWorker = () => {
const maxWorkers = global.systemEnv?.tokenWorkers || 20;
if (!global.tiktokenWorkers) {
global.tiktokenWorkers = [];
}
if (global.tiktokenWorkers.length >= maxWorkers) {
return global.tiktokenWorkers[Math.floor(Math.random() * global.tiktokenWorkers.length)];
}
const worker = getWorker(WorkerNameEnum.countGptMessagesTokens);
const i = global.tiktokenWorkers.push({
index: global.tiktokenWorkers.length,
worker,
callbackMap: {}
});
worker.on('message', ({ id, data }: { id: string; data: number }) => {
const callback = global.tiktokenWorkers[i - 1]?.callbackMap?.[id];
if (callback) {
callback?.(data);
delete global.tiktokenWorkers[i - 1].callbackMap[id];
}
});
return global.tiktokenWorkers[i - 1];
};
export const countGptMessagesTokens = (
export const countGptMessagesTokens = async (
messages: ChatCompletionMessageParam[],
tools?: ChatCompletionTool[],
functionCall?: ChatCompletionCreateParams.Function[]
) => {
return new Promise<number>(async (resolve) => {
try {
const start = Date.now();
try {
const workerController = getWorkerController<
{
messages: ChatCompletionMessageParam[];
tools?: ChatCompletionTool[];
functionCall?: ChatCompletionCreateParams.Function[];
},
number
>({
name: WorkerNameEnum.countGptMessagesTokens,
maxReservedThreads: global.systemEnv?.tokenWorkers || 20
});
const { worker, callbackMap } = getTiktokenWorker();
const total = await workerController.run({ messages, tools, functionCall });
const id = getNanoid();
const timer = setTimeout(() => {
console.log('Count token Time out');
resolve(
messages.reduce((sum, item) => {
if (item.content) {
return sum + item.content.length * 0.5;
}
return sum;
}, 0)
);
delete callbackMap[id];
}, 60000);
callbackMap[id] = (data) => {
// 检测是否有内存泄漏
addLog.debug(`Count token time: ${Date.now() - start}, token: ${data}`);
// console.log(process.memoryUsage());
resolve(data);
clearTimeout(timer);
};
// 可以进一步优化(传递100w token数据,实际需要300ms,较慢)
worker.postMessage({
id,
messages,
tools,
functionCall
});
} catch (error) {
addLog.error('Count token error', error);
const total = messages.reduce((sum, item) => {
if (item.content) {
return sum + item.content.length;
}
return sum;
}, 0);
resolve(total);
}
});
return total;
} catch (error) {
addLog.error('Count token error', error);
const total = messages.reduce((sum, item) => {
if (item.content) {
return sum + item.content.length * 0.5;
}
return sum;
}, 0);
return total;
}
};
export const countMessagesTokens = (messages: ChatItemType[]) => {

View File

@@ -30,7 +30,7 @@ const { LOG_LEVEL, STORE_LOG_LEVEL } = (() => {
const STORE_LOG_LEVEL = (process.env.STORE_LOG_LEVEL || '').toLocaleLowerCase();
return {
LOG_LEVEL: envLogLevelMap[LOG_LEVEL] || LogLevelEnum.info,
LOG_LEVEL: envLogLevelMap[LOG_LEVEL] ?? LogLevelEnum.info,
STORE_LOG_LEVEL: envLogLevelMap[STORE_LOG_LEVEL] ?? 99
};
})();

View File

@@ -441,11 +441,18 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
// token filter
const filterMaxTokensResult = await (async () => {
const tokensScoreFilter = await Promise.all(
scoreFilter.map(async (item) => ({
...item,
tokens: await countPromptTokens(item.q + item.a)
}))
);
const results: SearchDataResponseItemType[] = [];
let totalTokens = 0;
for await (const item of scoreFilter) {
totalTokens += await countPromptTokens(item.q + item.a);
for await (const item of tokensScoreFilter) {
totalTokens += item.tokens;
if (totalTokens > maxTokens + 500) {
break;

View File

@@ -122,7 +122,6 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
let chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
let chatNodeUsages: ChatNodeUsageType[] = [];
let toolRunResponse: ToolRunResponseItemType;
let runningTime = Date.now();
let debugNextStepRunNodes: RuntimeNodeItemType[] = [];
/* Store special response field */
@@ -142,13 +141,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
[DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // tool module, save the response value
}
) {
const time = Date.now();
if (responseData) {
chatResponses.push({
...responseData,
runningTime: +((time - runningTime) / 1000).toFixed(2)
});
chatResponses.push(responseData);
}
if (nodeDispatchUsages) {
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
@@ -175,8 +169,6 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
});
}
}
runningTime = time;
}
/* Pass the output of the module to the next stage */
function nodeOutput(
@@ -328,6 +320,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
status: 'running'
});
}
const startTime = Date.now();
// get node running params
const params = getNodeRunParams(node);
@@ -362,6 +355,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
nodeId: node.nodeId,
moduleName: node.name,
moduleType: node.flowNodeType,
runningTime: +((Date.now() - startTime) / 1000).toFixed(2),
...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]
};
})();

View File

@@ -7,6 +7,7 @@ import {
LLMModelItemType
} from '@fastgpt/global/core/ai/model.d';
import { SubPlanType } from '@fastgpt/global/support/wallet/sub/type';
import { WorkerNameEnum, WorkerPool } from './worker/utils';
import { Worker } from 'worker_threads';
declare global {
@@ -20,12 +21,8 @@ declare global {
var whisperModel: WhisperModelType;
var reRankModels: ReRankModelItemType[];
var tiktokenWorkers: {
index: number;
worker: Worker;
callbackMap: Record<string, (e: number) => void>;
}[];
var systemLoadedGlobalVariables: boolean;
var systemLoadedGlobalConfig: boolean;
var workerPoll: Record<WorkerNameEnum, WorkerPool>;
}

View File

@@ -59,16 +59,16 @@ export const readPdfFile = async ({ buffer }: ReadRawTextByBuffer): Promise<Read
const loadingTask = pdfjs.getDocument(buffer.buffer);
const doc = await loadingTask.promise;
const pageTextPromises = [];
for (let pageNo = 1; pageNo <= doc.numPages; pageNo++) {
pageTextPromises.push(readPDFPage(doc, pageNo));
// Avoid OOM.
let result = '';
const pageArr = Array.from({ length: doc.numPages }, (_, i) => i + 1);
for await (const pageNo of pageArr) {
result += await readPDFPage(doc, pageNo);
}
const pageTexts = await Promise.all(pageTextPromises);
loadingTask.destroy();
return {
rawText: pageTexts.join('')
rawText: result
};
};

View File

@@ -1,19 +1,32 @@
import { Worker } from 'worker_threads';
import path from 'path';
import { addLog } from '../common/system/log';
export enum WorkerNameEnum {
readFile = 'readFile',
htmlStr2Md = 'htmlStr2Md',
countGptMessagesTokens = 'countGptMessagesTokens'
countGptMessagesTokens = 'countGptMessagesTokens',
systemPluginRun = 'systemPluginRun'
}
export const getSafeEnv = () => {
return {
LOG_LEVEL: process.env.LOG_LEVEL,
STORE_LOG_LEVEL: process.env.STORE_LOG_LEVEL,
NODE_ENV: process.env.NODE_ENV
};
};
export const getWorker = (name: WorkerNameEnum) => {
const workerPath = path.join(process.cwd(), '.next', 'server', 'worker', `${name}.js`);
return new Worker(workerPath);
return new Worker(workerPath, {
env: getSafeEnv()
});
};
export const runWorker = <T = any>(name: WorkerNameEnum, params?: Record<string, any>) => {
return new Promise<T>((resolve, reject) => {
const start = Date.now();
const worker = getWorker(name);
worker.postMessage(params);
@@ -22,6 +35,11 @@ export const runWorker = <T = any>(name: WorkerNameEnum, params?: Record<string,
if (msg.type === 'error') return reject(msg.data);
resolve(msg.data);
const time = Date.now() - start;
if (time > 1000) {
addLog.info(`Worker ${name} run time: ${time}ms`);
}
});
worker.on('error', (err) => {
@@ -34,3 +52,169 @@ export const runWorker = <T = any>(name: WorkerNameEnum, params?: Record<string,
});
});
};
type WorkerRunTaskType<T> = { data: T; resolve: (e: any) => void; reject: (e: any) => void };
type WorkerQueueItem = {
id: string;
worker: Worker;
status: 'running' | 'idle';
taskTime: number;
timeoutId?: NodeJS.Timeout;
resolve: (e: any) => void;
reject: (e: any) => void;
};
type WorkerResponse<T = any> = {
id: string;
type: 'success' | 'error';
data: T;
};
/*
多线程任务管理
* 全局只需要创建一个示例
* 可以设置最大常驻线程(不会被销毁),线程满了后,后续任务会等待执行。
* 每次执行,会把数据丢到一个空闲线程里运行。主线程需要监听子线程返回的数据,并执行对于的 callback主要是通过 workerId 进行标记。
* 务必保证,每个线程只会同时运行 1 个任务,否则 callback 会对应不上。
*/
export class WorkerPool<Props = Record<string, any>, Response = any> {
name: WorkerNameEnum;
maxReservedThreads: number;
workerQueue: WorkerQueueItem[] = [];
waitQueue: WorkerRunTaskType<Props>[] = [];
constructor({ name, maxReservedThreads }: { name: WorkerNameEnum; maxReservedThreads: number }) {
this.name = name;
this.maxReservedThreads = maxReservedThreads;
}
runTask({ data, resolve, reject }: WorkerRunTaskType<Props>) {
// Get idle worker or create a new worker
const runningWorker = (() => {
const worker = this.workerQueue.find((item) => item.status === 'idle');
if (worker) return worker;
if (this.workerQueue.length < this.maxReservedThreads) {
return this.createWorker();
}
})();
if (runningWorker) {
// Update memory data to latest task
runningWorker.status = 'running';
runningWorker.taskTime = Date.now();
runningWorker.resolve = resolve;
runningWorker.reject = reject;
runningWorker.timeoutId = setTimeout(() => {
reject('Worker timeout');
}, 30000);
runningWorker.worker.postMessage({
id: runningWorker.id,
...data
});
} else {
// Not enough worker, push to wait queue
this.waitQueue.push({ data, resolve, reject });
}
}
run(data: Props) {
// watch memory
addLog.debug(`${this.name} worker queueLength: ${this.workerQueue.length}`);
return new Promise<Response>((resolve, reject) => {
/*
Whether the task is executed immediately or delayed, the promise callback will dispatch after task complete.
*/
this.runTask({
data,
resolve,
reject
});
}).finally(() => {
// Run wait queue
const waitTask = this.waitQueue.shift();
if (waitTask) {
this.runTask(waitTask);
}
});
}
createWorker() {
// Create a new worker and push it queue.
const workerId = `${Date.now()}${Math.random()}`;
const worker = getWorker(this.name);
const item: WorkerQueueItem = {
id: workerId,
worker,
status: 'running',
taskTime: Date.now(),
resolve: () => {},
reject: () => {}
};
this.workerQueue.push(item);
// watch response
worker.on('message', ({ id, type, data }: WorkerResponse<Response>) => {
// Run callback
const workerItem = this.workerQueue.find((item) => item.id === id);
if (!workerItem) {
addLog.warn('Invalid worker', { id, type, data });
return;
}
if (type === 'success') {
workerItem.resolve(data);
} else if (type === 'error') {
workerItem.reject(data);
}
// Clear timeout timer and update worker status
clearTimeout(workerItem.timeoutId);
workerItem.status = 'idle';
});
// Worker error, terminate and delete it.Un catch error)
worker.on('error', (err) => {
addLog.warn('Worker error', { err });
this.deleteWorker(workerId);
});
worker.on('messageerror', (err) => {
addLog.warn('Worker error', { err });
this.deleteWorker(workerId);
});
return item;
}
deleteWorker(workerId: string) {
const item = this.workerQueue.find((item) => item.id === workerId);
if (item) {
item.reject?.('error');
clearTimeout(item.timeoutId);
item.worker.terminate();
}
this.workerQueue = this.workerQueue.filter((item) => item.id !== workerId);
}
}
export const getWorkerController = <Props, Response>(props: {
name: WorkerNameEnum;
maxReservedThreads: number;
}) => {
if (!global.workerPoll) {
// @ts-ignore
global.workerPoll = {};
}
const name = props.name;
if (global.workerPoll[name]) return global.workerPoll[name] as WorkerPool<Props, Response>;
global.workerPoll[name] = new WorkerPool(props);
return global.workerPoll[name] as WorkerPool<Props, Response>;
};

View File

@@ -1,3 +1,3 @@
<svg width="18" height="18" viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.96967 11.7803C4.26256 12.0732 4.73744 12.0732 5.03033 11.7803L9 7.81066L12.9697 11.7803C13.2626 12.0732 13.7374 12.0732 14.0303 11.7803C14.3232 11.4874 14.3232 11.0126 14.0303 10.7197L9.53033 6.21967C9.23744 5.92678 8.76256 5.92678 8.46967 6.21967L3.96967 10.7197C3.67678 11.0126 3.67678 11.4874 3.96967 11.7803Z" fill="#667085"/>
<svg viewBox="0 0 18 18" fill="none" xmlns="http://www.w3.org/2000/svg">
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.96967 11.7803C4.26256 12.0732 4.73744 12.0732 5.03033 11.7803L9 7.81066L12.9697 11.7803C13.2626 12.0732 13.7374 12.0732 14.0303 11.7803C14.3232 11.4874 14.3232 11.0126 14.0303 10.7197L9.53033 6.21967C9.23744 5.92678 8.76256 5.92678 8.46967 6.21967L3.96967 10.7197C3.67678 11.0126 3.67678 11.4874 3.96967 11.7803Z" />
</svg>

Before

Width:  |  Height:  |  Size: 486 B

After

Width:  |  Height:  |  Size: 449 B

9
pnpm-lock.yaml generated
View File

@@ -81,6 +81,9 @@ importers:
packages/plugins:
dependencies:
axios:
specifier: ^1.5.1
version: 1.7.2
duck-duck-scrape:
specifier: ^2.2.5
version: 2.2.5
@@ -13823,7 +13826,7 @@ snapshots:
eslint: 8.56.0
eslint-import-resolver-node: 0.3.9
eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.56.0)
eslint-plugin-jsx-a11y: 6.9.0(eslint@8.56.0)
eslint-plugin-react: 7.34.4(eslint@8.56.0)
eslint-plugin-react-hooks: 4.6.2(eslint@8.56.0)
@@ -13847,7 +13850,7 @@ snapshots:
enhanced-resolve: 5.17.0
eslint: 8.56.0
eslint-module-utils: 2.8.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0)
eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.56.0)
fast-glob: 3.3.2
get-tsconfig: 4.7.5
is-core-module: 2.14.0
@@ -13869,7 +13872,7 @@ snapshots:
transitivePeerDependencies:
- supports-color
eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1(eslint@8.56.0))(eslint@8.56.0))(eslint@8.56.0):
eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.56.0)(typescript@5.5.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.56.0):
dependencies:
array-includes: 3.1.8
array.prototype.findlastindex: 1.2.5

View File

@@ -1,5 +1,6 @@
const { i18n } = require('./next-i18next.config');
const path = require('path');
const fs = require('fs');
const isDev = process.env.NODE_ENV === 'development';
@@ -53,17 +54,10 @@ const nextConfig = {
const entries = await oldEntry(...args);
return {
...entries,
'worker/htmlStr2Md': path.resolve(
...getWorkerConfig(),
'worker/systemPluginRun': path.resolve(
process.cwd(),
'../../packages/service/worker/htmlStr2Md/index.ts'
),
'worker/countGptMessagesTokens': path.resolve(
process.cwd(),
'../../packages/service/worker/tiktoken/countGptMessagesTokens.ts'
),
'worker/readFile': path.resolve(
process.cwd(),
'../../packages/service/worker/file/read.ts'
'../../packages/plugins/runtime/worker.ts'
)
};
}
@@ -95,3 +89,39 @@ const nextConfig = {
};
module.exports = nextConfig;
function getWorkerConfig() {
const result = fs.readdirSync(path.resolve(__dirname, '../../packages/service/worker'));
// 获取所有的目录名
const folderList = result.filter((item) => {
return fs
.statSync(path.resolve(__dirname, '../../packages/service/worker', item))
.isDirectory();
});
/*
{
'worker/htmlStr2Md': path.resolve(
process.cwd(),
'../../packages/service/worker/htmlStr2Md/index.ts'
),
'worker/countGptMessagesTokens': path.resolve(
process.cwd(),
'../../packages/service/worker/countGptMessagesTokens/index.ts'
),
'worker/readFile': path.resolve(
process.cwd(),
'../../packages/service/worker/readFile/index.ts'
)
}
*/
const workerConfig = folderList.reduce((acc, item) => {
acc[`worker/${item}`] = path.resolve(
process.cwd(),
`../../packages/service/worker/${item}/index.ts`
);
return acc;
}, {});
return workerConfig;
}

View File

@@ -144,7 +144,16 @@ const ChatItem = ({
)}
</Flex>
{/* content */}
<Box mt={['6px', 2]} className="chat-box-card" textAlign={styleMap.textAlign}>
<Box
mt={['6px', 2]}
className="chat-box-card"
textAlign={styleMap.textAlign}
_hover={{
'& .footer-copy': {
display: 'block'
}
}}
>
<Card
{...MessageCardStyle}
bg={styleMap.bg}
@@ -156,19 +165,21 @@ const ChatItem = ({
{/* 对话框底部的复制按钮 */}
{type == ChatRoleEnum.AI && (!isChatting || (isChatting && !isLastChild)) && (
<Box
className="footer-copy"
display={['block', 'none']}
position={'absolute'}
bottom={0}
right={[0, -2]}
color={'myGray.400'}
right={0}
transform={'translateX(100%)'}
>
<MyTooltip label={t('common.Copy')}>
<MyTooltip label={t('common:common.Copy')}>
<MyIcon
w={'14px'}
w={'1rem'}
cursor="pointer"
p="5px"
bg="white"
name={'copy'}
color={'myGray.500'}
_hover={{ color: 'primary.600' }}
onClick={() => copyData(chatText)}
/>

View File

@@ -40,7 +40,6 @@ const ResponseTags = ({
sourceName: string;
};
}>();
const [isOverflow, setIsOverflow] = useState<boolean>(true);
const [quoteFolded, setQuoteFolded] = useState<boolean>(true);
const [contextModalData, setContextModalData] =
useState<DispatchNodeResponseType['historyPreview']>();
@@ -51,11 +50,9 @@ const ResponseTags = ({
} = useDisclosure();
const quoteListSize = useSize(quoteListRef);
useEffect(() => {
setIsOverflow(
quoteListRef.current ? quoteListRef.current.scrollHeight > (isPc ? 50 : 55) : true
);
}, [isOverflow, quoteListSize]);
const quoteIsOverflow = quoteListRef.current
? quoteListRef.current.scrollHeight > (isPc ? 50 : 55)
: true;
const {
llmModuleAccount,
@@ -114,7 +111,7 @@ const ResponseTags = ({
<Box width={'100%'}>
<ChatBoxDivider icon="core/chat/quoteFill" text={t('common:core.chat.Quote')} />{' '}
</Box>
{quoteFolded && isOverflow && (
{quoteFolded && quoteIsOverflow && (
<MyIcon
_hover={{ color: 'primary.500', cursor: 'pointer' }}
name="core/chat/chevronDown"
@@ -124,89 +121,79 @@ const ResponseTags = ({
)}
</Flex>
<Flex alignItems={'center'} flexWrap={'wrap'} gap={2} position={'relative'}>
{
<Collapse
startingHeight={isPc ? '50px' : '55px'}
in={(!quoteFolded && isOverflow) || !isOverflow}
>
<Flex
ref={quoteListRef}
alignItems={'center'}
position={'relative'}
flexWrap={'wrap'}
gap={2}
height={quoteFolded && isOverflow ? ['55px', '50px'] : 'auto'}
overflow={'hidden'}
_after={
quoteFolded && isOverflow
? {
content: '""',
position: 'absolute',
zIndex: 2,
bottom: 0,
left: 0,
width: '100%',
height: '50%',
background:
'linear-gradient(to bottom, rgba(247,247,247,0), rgba(247, 247, 247, 0.91))',
pointerEvents: 'none'
}
: {}
<Flex
ref={quoteListRef}
alignItems={'center'}
position={'relative'}
flexWrap={'wrap'}
gap={2}
maxH={quoteFolded && quoteIsOverflow ? ['50px', '55px'] : 'auto'}
overflow={'hidden'}
_after={
quoteFolded && quoteIsOverflow
? {
content: '""',
position: 'absolute',
zIndex: 2,
bottom: 0,
left: 0,
width: '100%',
height: '50%',
background:
'linear-gradient(to bottom, rgba(247,247,247,0), rgba(247, 247, 247, 0.91))'
}
>
{sourceList.map((item) => {
return (
<MyTooltip key={item.collectionId} label={t('core.chat.quote.Read Quote')}>
<Flex
alignItems={'center'}
fontSize={'xs'}
border={'sm'}
py={1.5}
px={2}
borderRadius={'sm'}
_hover={{
'.controller': {
display: 'flex'
}
}}
overflow={'hidden'}
position={'relative'}
cursor={'pointer'}
onClick={(e) => {
e.stopPropagation();
setQuoteModalData({
rawSearch: quoteList,
metadata: {
collectionId: item.collectionId,
sourceId: item.sourceId,
sourceName: item.sourceName
}
});
}}
>
<MyIcon name={item.icon as any} mr={1} flexShrink={0} w={'12px'} />
<Box className="textEllipsis3" wordBreak={'break-all'} flex={'1 0 0'}>
{item.sourceName}
</Box>
</Flex>
</MyTooltip>
);
})}
{isOverflow && !quoteFolded && (
<MyIcon
position={'absolute'}
bottom={0}
right={0}
_hover={{ color: 'primary.500', cursor: 'pointer' }}
name="core/chat/chevronUp"
w={'14px'}
onClick={() => setQuoteFolded(!quoteFolded)}
/>
)}
</Flex>
</Collapse>
: {}
}
>
{sourceList.map((item) => {
return (
<MyTooltip key={item.collectionId} label={t('common:core.chat.quote.Read Quote')}>
<Flex
alignItems={'center'}
fontSize={'xs'}
border={'sm'}
py={1.5}
px={2}
borderRadius={'sm'}
_hover={{
'.controller': {
display: 'flex'
}
}}
overflow={'hidden'}
position={'relative'}
cursor={'pointer'}
onClick={(e) => {
e.stopPropagation();
setQuoteModalData({
rawSearch: quoteList,
metadata: {
collectionId: item.collectionId,
sourceId: item.sourceId,
sourceName: item.sourceName
}
});
}}
>
<MyIcon name={item.icon as any} mr={1} flexShrink={0} w={'12px'} />
<Box className="textEllipsis3" wordBreak={'break-all'} flex={'1 0 0'}>
{item.sourceName}
</Box>
</Flex>
</MyTooltip>
);
})}
{!quoteFolded && (
<MyIcon
position={'absolute'}
bottom={0}
right={0}
_hover={{ color: 'primary.500', cursor: 'pointer' }}
name="core/chat/chevronUp"
w={'14px'}
onClick={() => setQuoteFolded(!quoteFolded)}
/>
)}
</Flex>
</>
)}

View File

@@ -11,21 +11,25 @@ import { useI18n } from '@/web/context/I18n';
import { useContextSelector } from 'use-context-selector';
import { AppContext } from '../context';
import { useChatTest } from '../useChatTest';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
const ChatTest = ({ appForm }: { appForm: AppSimpleEditFormType }) => {
const { t } = useTranslation();
const { appT } = useI18n();
const { appDetail } = useContextSelector(AppContext, (v) => v);
// form2AppWorkflow dependent allDatasets
const { allDatasets } = useDatasetStore();
const [workflowData, setWorkflowData] = useSafeState({
nodes: appDetail.modules || [],
edges: appDetail.edges || []
});
useEffect(() => {
const { nodes, edges } = form2AppWorkflow(appForm);
setWorkflowData({ nodes, edges });
}, [appForm, setWorkflowData]);
}, [appForm, setWorkflowData, allDatasets]);
const { restartChat, ChatContainer } = useChatTest({
...workflowData,