Files
FastGPT/packages/service/common/vectorStore/pg/class.ts
Archer e75d81d05a V4.9.1 feature (#4206)
* fix: remove DefaultTeam (#4037)

* fix :Get application bound knowledge base information logical rewrite (#4057)

* fix :Get application bound knowledge base information logical rewrite

* fix :Get application bound knowledge base information logical rewrite

* fix :Get application bound knowledge base information logical rewrite

* fix :Get application bound knowledge base information logical rewrite

* update package

* fix: import dataset step error;perf: ai proxy avatar (#4074)

* perf: pg config params

* perf: ai proxy avatar

* fix: import dataset step error

* feat: data input ux

* perf: app dataset rewite

* fix: 文本提取不支持arrayString,arrayNumber等jsonSchema (#4079)

* update doc ;perf: model test (#4098)

* perf: extract array

* update doc

* perf: model test

* perf: model test

* perf: think tag parse (#4102)

* chat quote reader (#3912)

* init chat quote full text reader

* linked structure

* dataset data linked

* optimize code

* fix ts build

* test finish

* delete log

* fix

* fix ts

* fix ts

* remove nextId

* initial scroll

* fix

* fix

* perf: chunk read   (#4109)

* package

* perf: chunk read

* feat: api dataset support pdf parse;fix: chunk reader auth (#4117)

* feat: api dataset support pdf parse

* fix: chunk reader auth

* feat: invitation link (#3979)

* feat: invitation link schema and apis

* feat: add invitation link

* feat: member status: active, leave, forbidden

* fix: expires show hours and minutes

* feat: invalid invitation link hint

* fix: typo

* chore: fix typo & i18n

* fix

* pref: fe

* feat: add ttl index for 30-day-clean-up

* perf: invite member code (#4118)

* perf: invite member code

* fix: ts

* fix: model test channel id;fix: quote reader (#4123)

* fix: model test channel id

* fix: quote reader

* fix chat quote reader (#4125)

* perf: model test;perf: sidebar trigger (#4127)

* fix: import dataset step error;perf: ai proxy avatar (#4074)

* perf: pg config params

* perf: ai proxy avatar

* fix: import dataset step error

* feat: data input ux

* perf: app dataset rewite

* perf: model test

* perf: sidebar trigger

* lock

* update nanoid version

* fix: select component ux

* fix: ts

* fix: vitest

* remove test

* fix: prompt toolcall ui (#4139)

* load log error adapt

* fix: prompt toolcall ui

* perf: commercial function tip

* update package

* pref: copy link (#4147)

* fix(i18n): namespace (#4143)

* hiden dataset source (#4152)

* hiden dataset source

* perf: reader

* chore: move all tests into a single folder (#4160)

* fix modal close scroll (#4162)

* fix modal close scroll

* update refresh

* feat: rerank modal select and weight (#4164)

* fix loadInitData refresh (#4169)

* fix

* fix

* form input number default & api dataset max token

* feat: mix search weight (#4170)

* feat: mix search weight

* feat: svg render

* fix: avatar error remove (#4173)

* fix: avatar error remove

* fix: index

* fix: guide

* fix: auth

* update package;fix: input data model ui (#4181)

* update package

* fix: ts

* update config

* update jieba package

* add type sign

* fix: input data ui

* fix: page title refresh (#4186)

* fix: ts

* update jieba package

* fix: page title refresh

* fix: remove member length check when opening invite create modal (#4193)

* add env to check internal ip (#4187)

* fix: ts

* update jieba package

* add env to check internal ip

* package

* fix: jieba

* reset package

* update config

* fix: jieba package

* init shell

* init version

* change team reload

* update jieba package (#4200)

* update jieba package

* package

* update package

* remove invalid code

* action

* package (#4201)

* package

* update package

* remove invalid code

* package

* remove i18n tip (#4202)

* doc (#4205)

* fix: i18n (#4208)

* fix: next config (#4207)

* reset package

* i18n

* update config

* i18n

* remove log

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
Co-authored-by: shilin <39396378+shilin66@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
2025-03-18 14:40:41 +08:00

272 lines
8.4 KiB
TypeScript

/* pg vector crud */
import { DatasetVectorTableName } from '../constants';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgClient, connectPg } from './index';
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
import {
DelDatasetVectorCtrlProps,
EmbeddingRecallCtrlProps,
EmbeddingRecallResponse,
InsertVectorControllerProps
} from '../controller.d';
import dayjs from 'dayjs';
import { addLog } from '../../system/log';
export class PgVectorCtrl {
constructor() {}
init = async () => {
try {
await connectPg();
await PgClient.query(`
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS ${DatasetVectorTableName} (
id BIGSERIAL PRIMARY KEY,
vector VECTOR(1536) NOT NULL,
team_id VARCHAR(50) NOT NULL,
dataset_id VARCHAR(50) NOT NULL,
collection_id VARCHAR(50) NOT NULL,
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS vector_index ON ${DatasetVectorTableName} USING hnsw (vector vector_ip_ops) WITH (m = 32, ef_construction = 128);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS team_dataset_collection_index ON ${DatasetVectorTableName} USING btree(team_id, dataset_id, collection_id);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${DatasetVectorTableName} USING btree(createtime);`
);
// 10w rows
// await PgClient.query(`
// ALTER TABLE modeldata SET (
// autovacuum_vacuum_scale_factor = 0.1,
// autovacuum_analyze_scale_factor = 0.05,
// autovacuum_vacuum_threshold = 50,
// autovacuum_analyze_threshold = 50,
// autovacuum_vacuum_cost_delay = 20,
// autovacuum_vacuum_cost_limit = 200
// );`);
// 100w rows
// await PgClient.query(`
// ALTER TABLE modeldata SET (
// autovacuum_vacuum_scale_factor = 0.01,
// autovacuum_analyze_scale_factor = 0.02,
// autovacuum_vacuum_threshold = 1000,
// autovacuum_analyze_threshold = 1000,
// autovacuum_vacuum_cost_delay = 10,
// autovacuum_vacuum_cost_limit = 2000
// );`)
addLog.info('init pg successful');
} catch (error) {
addLog.error('init pg error', error);
}
};
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
try {
const { rowCount, rows } = await PgClient.insert(DatasetVectorTableName, {
values: [
[
{ key: 'vector', value: `[${vector}]` },
{ key: 'team_id', value: String(teamId) },
{ key: 'dataset_id', value: String(datasetId) },
{ key: 'collection_id', value: String(collectionId) }
]
]
});
if (rowCount === 0) {
return Promise.reject('insertDatasetData: no insert');
}
return {
insertId: rows[0].id
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.insert({
...props,
retry: retry - 1
});
}
};
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
const { teamId, retry = 2 } = props;
const teamIdWhere = `team_id='${String(teamId)}' AND`;
const where = await (() => {
if ('id' in props && props.id) return `${teamIdWhere} id=${props.id}`;
if ('datasetIds' in props && props.datasetIds) {
const datasetIdWhere = `dataset_id IN (${props.datasetIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
if ('collectionIds' in props && props.collectionIds) {
return `${teamIdWhere} ${datasetIdWhere} AND collection_id IN (${props.collectionIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
}
return `${teamIdWhere} ${datasetIdWhere}`;
}
if ('idList' in props && Array.isArray(props.idList)) {
if (props.idList.length === 0) return;
return `${teamIdWhere} id IN (${props.idList.map((id) => String(id)).join(',')})`;
}
return Promise.reject('deleteDatasetData: no where');
})();
if (!where) return;
try {
await PgClient.delete(DatasetVectorTableName, {
where: [where]
});
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.delete({
...props,
retry: retry - 1
});
}
};
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
const {
teamId,
datasetIds,
vector,
limit,
forbidCollectionIdList,
filterCollectionIdList,
retry = 2
} = props;
// Get forbid collection
const formatForbidCollectionIdList = (() => {
if (!filterCollectionIdList) return forbidCollectionIdList;
const list = forbidCollectionIdList
.map((id) => String(id))
.filter((id) => !filterCollectionIdList.includes(id));
return list;
})();
const forbidCollectionSql =
formatForbidCollectionIdList.length > 0
? `AND collection_id NOT IN (${formatForbidCollectionIdList.map((id) => `'${id}'`).join(',')})`
: '';
// Filter by collectionId
const formatFilterCollectionId = (() => {
if (!filterCollectionIdList) return;
return filterCollectionIdList
.map((id) => String(id))
.filter((id) => !forbidCollectionIdList.includes(id));
})();
const filterCollectionIdSql = formatFilterCollectionId
? `AND collection_id IN (${formatFilterCollectionId.map((id) => `'${id}'`).join(',')})`
: '';
// Empty data
if (formatFilterCollectionId && formatFilterCollectionId.length === 0) {
return { results: [] };
}
try {
const results: any = await PgClient.query(
`BEGIN;
SET LOCAL hnsw.ef_search = ${global.systemEnv?.pgHNSWEfSearch || 100};
SET LOCAL hnsw.iterative_scan = relaxed_order;
WITH relaxed_results AS MATERIALIZED (
select id, collection_id, vector <#> '[${vector}]' AS score
from ${DatasetVectorTableName}
where team_id='${teamId}'
AND dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
${filterCollectionIdSql}
${forbidCollectionSql}
order by score limit ${limit}
) SELECT id, collection_id, score FROM relaxed_results ORDER BY score;
COMMIT;`
);
const rows = results?.[3]?.rows as PgSearchRawType[];
return {
results: rows.map((item) => ({
id: String(item.id),
collectionId: item.collection_id,
score: item.score * -1
}))
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
return this.embRecall({
...props,
retry: retry - 1
});
}
};
getVectorDataByTime = async (start: Date, end: Date) => {
const { rows } = await PgClient.query<{
id: string;
team_id: string;
dataset_id: string;
}>(`SELECT id, team_id, dataset_id
FROM ${DatasetVectorTableName}
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(
end
).format('YYYY-MM-DD HH:mm:ss')}';
`);
return rows.map((item) => ({
id: String(item.id),
teamId: item.team_id,
datasetId: item.dataset_id
}));
};
getVectorCountByTeamId = async (teamId: string) => {
const total = await PgClient.count(DatasetVectorTableName, {
where: [['team_id', String(teamId)]]
});
return total;
};
getVectorCountByDatasetId = async (teamId: string, datasetId: string) => {
const total = await PgClient.count(DatasetVectorTableName, {
where: [['team_id', String(teamId)], 'and', ['dataset_id', String(datasetId)]]
});
return total;
};
getVectorCountByCollectionId = async (
teamId: string,
datasetId: string,
collectionId: string
) => {
const total = await PgClient.count(DatasetVectorTableName, {
where: [
['team_id', String(teamId)],
'and',
['dataset_id', String(datasetId)],
'and',
['collection_id', String(collectionId)]
]
});
return total;
};
}