Add SSE controller; fix share page login failed (#330)

This commit is contained in:
Archer
2023-09-20 16:34:32 +08:00
committed by GitHub
parent 0d94db4331
commit 7e0deb29e0
13 changed files with 234 additions and 138 deletions

View File

@@ -88,8 +88,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export const config = {
api: {
bodyParser: {
sizeLimit: '20mb'
}
responseLimit: '20mb'
}
};

View File

@@ -101,8 +101,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export const config = {
api: {
bodyParser: {
sizeLimit: '10mb'
}
responseLimit: '10mb'
}
};

View File

@@ -7,6 +7,7 @@ import { UpdateFileProps } from '@/api/core/dataset/file.d';
import { Types } from 'mongoose';
import { PgClient } from '@/service/pg';
import { PgDatasetTableName } from '@/constants/plugin';
import { addLog } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -59,6 +60,7 @@ async function updateDatasetSource(data: { fileId: string; userId: string; name?
]
});
} catch (error) {
addLog.error(`Update dataset source error`, error);
setTimeout(() => {
updateDatasetSource(data);
}, 2000);

View File

@@ -173,8 +173,6 @@ export async function pushDataToKb({
export const config = {
api: {
bodyParser: {
sizeLimit: '12mb'
}
responseLimit: '12mb'
}
};

View File

@@ -477,8 +477,6 @@ export function getSystemVariable({ timezone }: { timezone: string }) {
export const config = {
api: {
bodyParser: {
sizeLimit: '20mb'
}
responseLimit: '20mb'
}
};

View File

@@ -5,7 +5,9 @@ import { authUser } from '@/service/utils/auth';
import { PgDatasetTableName } from '@/constants/plugin';
import { findAllChildrenIds } from '../delete';
import QueryStream from 'pg-query-stream';
import Papa from 'papaparse';
import { PgClient } from '@/service/pg';
import { addLog } from '@/service/utils/tools';
import { responseWriteController } from '@/service/common/stream';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -24,7 +26,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const exportIds = [kbId, ...(await findAllChildrenIds(kbId))];
const thirtyMinutesAgo = new Date(
const limitMinutesAgo = new Date(
Date.now() - (global.feConfigs?.limit?.exportLimitMinutes || 0) * 60 * 1000
);
@@ -34,7 +36,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
_id: userId,
$or: [
{ 'limit.exportKbTime': { $exists: false } },
{ 'limit.exportKbTime': { $lte: thirtyMinutesAgo } }
{ 'limit.exportKbTime': { $lte: limitMinutesAgo } }
]
},
'_id limit'
@@ -45,6 +47,19 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error(`上次导出未到 ${minutes},每 ${minutes}仅可导出一次。`);
}
const { rows } = await PgClient.query(
`SELECT count(id) FROM ${PgDatasetTableName} where user_id='${userId}' AND kb_id IN (${exportIds
.map((id) => `'${id}'`)
.join(',')})`
);
const total = rows?.[0]?.count || 0;
addLog.info(`export datasets: ${userId}`, { total });
if (total > 100000) {
throw new Error('数据量超出 10 万,无法导出');
}
// connect pg
global.pgClient.connect((err, client, done) => {
if (err) {
@@ -52,6 +67,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
res.end('Error connecting to database');
return;
}
console.log('export data');
// create pg select stream
const query = new QueryStream(
`SELECT q, a, source FROM ${PgDatasetTableName} where user_id='${userId}' AND kb_id IN (${exportIds
@@ -65,11 +82,19 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
res.write('index,content,source');
// parse data every row
stream.on('data', (row: { q: string; a: string; source?: string }) => {
const csv = Papa.unparse([row], { header: false });
res.write(`\n${csv}`);
const write = responseWriteController({
res,
readStream: stream
});
// parse data every row
stream.on('data', ({ q, a, source }: { q: string; a: string; source?: string }) => {
if (res.closed) {
return stream.destroy();
}
write(`\n"${q}","${a || ''}","${source || ''}"`);
});
// finish
stream.on('end', async () => {
try {
// update export time
@@ -98,8 +123,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
export const config = {
api: {
bodyParser: {
sizeLimit: '200mb'
}
responseLimit: '100mb'
}
};