Fixed the duplicate data check problem, history filter and add tts stream (#477)

This commit is contained in:
Archer
2023-11-16 16:22:08 +08:00
committed by GitHub
parent 16103029f5
commit fbe1d8cfed
31 changed files with 359 additions and 187 deletions

View File

@@ -1,5 +1,4 @@
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import { PgClient } from '@fastgpt/service/common/pg';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
/**
* Same value judgment
@@ -13,14 +12,13 @@ export async function hasSameValue({
q: string;
a?: string;
}) {
const { rows: existsRows } = await PgClient.query(`
SELECT COUNT(*) > 0 AS exists
FROM ${PgDatasetTableName}
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') AND collection_id='${collectionId}'
`);
const exists = existsRows[0]?.exists || false;
const count = await MongoDatasetData.countDocuments({
q,
a,
collectionId
});
if (exists) {
if (count > 0) {
return Promise.reject('已经存在完全一致的数据');
}
}

View File

@@ -50,6 +50,14 @@ export async function dispatchModules({
stream?: boolean;
detail?: boolean;
}) {
// set sse response headers
if (stream) {
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
}
variables = {
...getSystemVariable({ timezone: user.timezone }),
...variables
@@ -167,6 +175,7 @@ export async function dispatchModules({
user,
teamId,
tmbId,
chatId,
inputs: params
};