mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
fix: 优化读取mongo文件性能,解决因执行Buffer.concat过多造成等待导致整个服务hang住的 (#3985)
如果 Buffer.concat() 被频繁调用,尤其是在处理大量数据时,可能会导致内存分配压力增大,从而影响性能。 测试大于100M的PDF文件上传解析会导致/api/core/dataset/collection/create/fileId接口长时间无响应,其他接口也处于hang住的状态,导致整个服务不可用。 使用一次性拼接后问题解决
This commit is contained in:
@@ -3,13 +3,16 @@ import { PassThrough } from 'stream';
|
||||
|
||||
export const gridFsStream2Buffer = (stream: NodeJS.ReadableStream) => {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let tmpBuffer: Buffer = Buffer.from([]);
|
||||
const chunks: Buffer[] = [];
|
||||
let totalLength = 0;
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
tmpBuffer = Buffer.concat([tmpBuffer, chunk]);
|
||||
chunks.push(chunk);
|
||||
totalLength += chunk.length;
|
||||
});
|
||||
stream.on('end', () => {
|
||||
resolve(tmpBuffer);
|
||||
const resultBuffer = Buffer.concat(chunks, totalLength); // 一次性拼接
|
||||
resolve(resultBuffer);
|
||||
});
|
||||
stream.on('error', (err) => {
|
||||
reject(err);
|
||||
|
Reference in New Issue
Block a user