mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-22 11:55:07 +00:00
V4.9.7 feature (#4669)
* update doc * feat: Add coupon redemption feature for team subscriptions (#4595) * feat: Add coupon redemption feature for team subscriptions - Introduced `TeamCouponSub` and `TeamCouponSchema` types - Added `redeemCoupon` API endpoint - Updated UI to include a modal for coupon redemption - Added new icon and translations for "Redeem coupon" * perf: remove field teamId * perf: use dynamic import * refactor: move to page component * perf: coupon code * perf: mcp server * perf: test * auto layout (#4634) * fix 4.9.6 (#4631) * fix debug quote list * delete next text node match * fix extract default boolean value * export latest 100 chat items * fix quote item ui * doc * fix doc * feat: auto layout * perf: auto layout * fix: auto layout null * add start node --------- Co-authored-by: heheer <heheer@sealos.io> * fix: share link (#4644) * Add workflow run duration;Get audio duration (#4645) * add duration * get audio duration * Custom config path (#4649) * feat: 通过环境变量DATA_PATH获取配置文件目录 (#4622) 通过环境变量DATA_PATH获取配置文件目录,以应对不同的部署方式的多样化需求 * feat: custom configjson path * doc --------- Co-authored-by: John Chen <sss1991@163.com> * 程序api调用场景下,如果大量调用带有图片或视频,产生的聊天记录会导致后台mongo数据库异常。这个修改给api客户端一个禁止生成聊天记录的选项,避免这个后果。 (#3964) * update special chatId * perf: vector db rename * update operationLog (#4647) * update operationLog * combine operationLogMap * solve operationI18nLogMap bug * remoce log * feat: Rerank usage (#4654) * refresh concat when update (#4655) * fix: refresh code * perf: timer lock * Fix operationLog (#4657) * perf: http streamable mcp * add alipay (#4630) * perf: subplan ui * perf: pay code * hiden bank tip * Fix: pay error (#4665) * fix quote number (#4666) * remove log --------- Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com> Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: John Chen <sss1991@163.com> Co-authored-by: gaord <bengao168@msn.com> Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
This commit is contained in:
276
packages/service/common/vectorDB/pg/index.ts
Normal file
276
packages/service/common/vectorDB/pg/index.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
/* pg vector crud */
|
||||
import { DatasetVectorTableName } from '../constants';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { PgClient, connectPg } from './controller';
|
||||
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
|
||||
import type {
|
||||
DelDatasetVectorCtrlProps,
|
||||
EmbeddingRecallCtrlProps,
|
||||
EmbeddingRecallResponse,
|
||||
InsertVectorControllerProps
|
||||
} from '../controller.d';
|
||||
import dayjs from 'dayjs';
|
||||
import { addLog } from '../../system/log';
|
||||
|
||||
export class PgVectorCtrl {
|
||||
constructor() {}
|
||||
init = async () => {
|
||||
try {
|
||||
await connectPg();
|
||||
await PgClient.query(`
|
||||
CREATE EXTENSION IF NOT EXISTS vector;
|
||||
CREATE TABLE IF NOT EXISTS ${DatasetVectorTableName} (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
vector VECTOR(1536) NOT NULL,
|
||||
team_id VARCHAR(50) NOT NULL,
|
||||
dataset_id VARCHAR(50) NOT NULL,
|
||||
collection_id VARCHAR(50) NOT NULL,
|
||||
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`);
|
||||
|
||||
await PgClient.query(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS vector_index ON ${DatasetVectorTableName} USING hnsw (vector vector_ip_ops) WITH (m = 32, ef_construction = 128);`
|
||||
);
|
||||
await PgClient.query(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS team_dataset_collection_index ON ${DatasetVectorTableName} USING btree(team_id, dataset_id, collection_id);`
|
||||
);
|
||||
await PgClient.query(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${DatasetVectorTableName} USING btree(createtime);`
|
||||
);
|
||||
// 10w rows
|
||||
// await PgClient.query(`
|
||||
// ALTER TABLE modeldata SET (
|
||||
// autovacuum_vacuum_scale_factor = 0.1,
|
||||
// autovacuum_analyze_scale_factor = 0.05,
|
||||
// autovacuum_vacuum_threshold = 50,
|
||||
// autovacuum_analyze_threshold = 50,
|
||||
// autovacuum_vacuum_cost_delay = 20,
|
||||
// autovacuum_vacuum_cost_limit = 200
|
||||
// );`);
|
||||
|
||||
// 100w rows
|
||||
// await PgClient.query(`
|
||||
// ALTER TABLE modeldata SET (
|
||||
// autovacuum_vacuum_scale_factor = 0.01,
|
||||
// autovacuum_analyze_scale_factor = 0.02,
|
||||
// autovacuum_vacuum_threshold = 1000,
|
||||
// autovacuum_analyze_threshold = 1000,
|
||||
// autovacuum_vacuum_cost_delay = 10,
|
||||
// autovacuum_vacuum_cost_limit = 2000
|
||||
// );`)
|
||||
|
||||
addLog.info('init pg successful');
|
||||
} catch (error) {
|
||||
addLog.error('init pg error', error);
|
||||
}
|
||||
};
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
|
||||
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
|
||||
|
||||
try {
|
||||
const { rowCount, rows } = await PgClient.insert(DatasetVectorTableName, {
|
||||
values: [
|
||||
[
|
||||
{ key: 'vector', value: `[${vector}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'dataset_id', value: String(datasetId) },
|
||||
{ key: 'collection_id', value: String(collectionId) }
|
||||
]
|
||||
]
|
||||
});
|
||||
|
||||
if (rowCount === 0) {
|
||||
return Promise.reject('insertDatasetData: no insert');
|
||||
}
|
||||
|
||||
return {
|
||||
insertId: rows[0].id
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.insert({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
};
|
||||
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
|
||||
const { teamId, retry = 2 } = props;
|
||||
|
||||
const teamIdWhere = `team_id='${String(teamId)}' AND`;
|
||||
|
||||
const where = await (() => {
|
||||
if ('id' in props && props.id) return `${teamIdWhere} id=${props.id}`;
|
||||
|
||||
if ('datasetIds' in props && props.datasetIds) {
|
||||
const datasetIdWhere = `dataset_id IN (${props.datasetIds
|
||||
.map((id) => `'${String(id)}'`)
|
||||
.join(',')})`;
|
||||
|
||||
if ('collectionIds' in props && props.collectionIds) {
|
||||
return `${teamIdWhere} ${datasetIdWhere} AND collection_id IN (${props.collectionIds
|
||||
.map((id) => `'${String(id)}'`)
|
||||
.join(',')})`;
|
||||
}
|
||||
|
||||
return `${teamIdWhere} ${datasetIdWhere}`;
|
||||
}
|
||||
|
||||
if ('idList' in props && Array.isArray(props.idList)) {
|
||||
if (props.idList.length === 0) return;
|
||||
return `${teamIdWhere} id IN (${props.idList.map((id) => String(id)).join(',')})`;
|
||||
}
|
||||
return Promise.reject('deleteDatasetData: no where');
|
||||
})();
|
||||
|
||||
if (!where) return;
|
||||
|
||||
try {
|
||||
await PgClient.delete(DatasetVectorTableName, {
|
||||
where: [where]
|
||||
});
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.delete({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
};
|
||||
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
|
||||
const {
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList,
|
||||
retry = 2
|
||||
} = props;
|
||||
|
||||
// Get forbid collection
|
||||
const formatForbidCollectionIdList = (() => {
|
||||
if (!filterCollectionIdList) return forbidCollectionIdList;
|
||||
const list = forbidCollectionIdList
|
||||
.map((id) => String(id))
|
||||
.filter((id) => !filterCollectionIdList.includes(id));
|
||||
return list;
|
||||
})();
|
||||
const forbidCollectionSql =
|
||||
formatForbidCollectionIdList.length > 0
|
||||
? `AND collection_id NOT IN (${formatForbidCollectionIdList.map((id) => `'${id}'`).join(',')})`
|
||||
: '';
|
||||
|
||||
// Filter by collectionId
|
||||
const formatFilterCollectionId = (() => {
|
||||
if (!filterCollectionIdList) return;
|
||||
|
||||
return filterCollectionIdList
|
||||
.map((id) => String(id))
|
||||
.filter((id) => !forbidCollectionIdList.includes(id));
|
||||
})();
|
||||
const filterCollectionIdSql = formatFilterCollectionId
|
||||
? `AND collection_id IN (${formatFilterCollectionId.map((id) => `'${id}'`).join(',')})`
|
||||
: '';
|
||||
// Empty data
|
||||
if (formatFilterCollectionId && formatFilterCollectionId.length === 0) {
|
||||
return { results: [] };
|
||||
}
|
||||
|
||||
try {
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL hnsw.ef_search = ${global.systemEnv?.hnswEfSearch || 100};
|
||||
SET LOCAL hnsw.iterative_scan = relaxed_order;
|
||||
WITH relaxed_results AS MATERIALIZED (
|
||||
select id, collection_id, vector <#> '[${vector}]' AS score
|
||||
from ${DatasetVectorTableName}
|
||||
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
|
||||
${filterCollectionIdSql}
|
||||
${forbidCollectionSql}
|
||||
order by score limit ${limit}
|
||||
) SELECT id, collection_id, score FROM relaxed_results ORDER BY score;
|
||||
COMMIT;`
|
||||
);
|
||||
const rows = results?.[3]?.rows as PgSearchRawType[];
|
||||
|
||||
if (!Array.isArray(rows)) {
|
||||
return {
|
||||
results: []
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collection_id,
|
||||
score: item.score * -1
|
||||
}))
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
return this.embRecall({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
};
|
||||
getVectorDataByTime = async (start: Date, end: Date) => {
|
||||
const { rows } = await PgClient.query<{
|
||||
id: string;
|
||||
team_id: string;
|
||||
dataset_id: string;
|
||||
}>(`SELECT id, team_id, dataset_id
|
||||
FROM ${DatasetVectorTableName}
|
||||
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(
|
||||
end
|
||||
).format('YYYY-MM-DD HH:mm:ss')}';
|
||||
`);
|
||||
|
||||
return rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
teamId: item.team_id,
|
||||
datasetId: item.dataset_id
|
||||
}));
|
||||
};
|
||||
getVectorCountByTeamId = async (teamId: string) => {
|
||||
const total = await PgClient.count(DatasetVectorTableName, {
|
||||
where: [['team_id', String(teamId)]]
|
||||
});
|
||||
|
||||
return total;
|
||||
};
|
||||
getVectorCountByDatasetId = async (teamId: string, datasetId: string) => {
|
||||
const total = await PgClient.count(DatasetVectorTableName, {
|
||||
where: [['team_id', String(teamId)], 'and', ['dataset_id', String(datasetId)]]
|
||||
});
|
||||
|
||||
return total;
|
||||
};
|
||||
getVectorCountByCollectionId = async (
|
||||
teamId: string,
|
||||
datasetId: string,
|
||||
collectionId: string
|
||||
) => {
|
||||
const total = await PgClient.count(DatasetVectorTableName, {
|
||||
where: [
|
||||
['team_id', String(teamId)],
|
||||
'and',
|
||||
['dataset_id', String(datasetId)],
|
||||
'and',
|
||||
['collection_id', String(collectionId)]
|
||||
]
|
||||
});
|
||||
|
||||
return total;
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user