* Milvus (#1644)

* feat: support regx

* 4.8.3 test and fix (#1648)

* perf: version tip

* feat: sandbox support log

* fix: debug component render

* fix: share page header

* fix: input guide auth

* fix: iso viewport

* remove file

* fix: route url

* feat: add debug timout

* perf: reference select support trigger

* perf: session code

* perf: theme

* perf: load milvus
This commit is contained in:
Archer
2024-06-01 09:26:11 +08:00
committed by GitHub
parent 9fc6a8c74a
commit a259d034b8
81 changed files with 1775 additions and 594 deletions

View File

@@ -2,18 +2,18 @@ import { connectionMongo, ClientSession } from './index';
export const mongoSessionRun = async <T = unknown>(fn: (session: ClientSession) => Promise<T>) => {
const session = await connectionMongo.startSession();
session.startTransaction();
try {
session.startTransaction();
const result = await fn(session);
await session.commitTransaction();
await session.endSession();
return result as T;
} catch (error) {
await session.abortTransaction();
await session.endSession();
return Promise.reject(error);
} finally {
await session.endSession();
}
};

View File

@@ -1,13 +1,35 @@
import dayjs from 'dayjs';
import chalk from 'chalk';
enum LogLevelEnum {
debug = 'debug',
info = 'info',
warn = 'warn',
error = 'error'
}
const logMap = {
[LogLevelEnum.debug]: {
levelLog: chalk.green('[Debug]')
},
[LogLevelEnum.info]: {
levelLog: chalk.blue('[Info]')
},
[LogLevelEnum.warn]: {
levelLog: chalk.yellow('[Warn]')
},
[LogLevelEnum.error]: {
levelLog: chalk.red('[Error]')
}
};
/* add logger */
export const addLog = {
log(level: 'info' | 'warn' | 'error', msg: string, obj: Record<string, any> = {}) {
log(level: LogLevelEnum, msg: string, obj: Record<string, any> = {}) {
const stringifyObj = JSON.stringify(obj);
const isEmpty = Object.keys(obj).length === 0;
console.log(
`[${level.toLocaleUpperCase()}] ${dayjs().format('YYYY-MM-DD HH:mm:ss')} ${msg} ${
`${logMap[level].levelLog} ${dayjs().format('YYYY-MM-DD HH:mm:ss')} ${msg} ${
level !== 'error' && !isEmpty ? stringifyObj : ''
}`
);
@@ -44,14 +66,17 @@ export const addLog = {
});
} catch (error) {}
},
debug(msg: string, obj?: Record<string, any>) {
this.log(LogLevelEnum.debug, msg, obj);
},
info(msg: string, obj?: Record<string, any>) {
this.log('info', msg, obj);
this.log(LogLevelEnum.info, msg, obj);
},
warn(msg: string, obj?: Record<string, any>) {
this.log('warn', msg, obj);
this.log(LogLevelEnum.warn, msg, obj);
},
error(msg: string, error?: any) {
this.log('error', msg, {
this.log(LogLevelEnum.error, msg, {
message: error?.message || error,
stack: error?.stack,
...(error?.config && {

View File

@@ -0,0 +1,6 @@
export const DatasetVectorDbName = 'fastgpt';
export const DatasetVectorTableName = 'modeldata';
export const PG_ADDRESS = process.env.PG_URL;
export const MILVUS_ADDRESS = process.env.MILVUS_ADDRESS;
export const MILVUS_TOKEN = process.env.MILVUS_TOKEN;

View File

@@ -1,3 +1,5 @@
import type { EmbeddingRecallItemType } from './type';
export type DeleteDatasetVectorProps = (
| { id: string }
| { datasetIds: string[]; collectionIds?: string[] }
@@ -5,12 +7,19 @@ export type DeleteDatasetVectorProps = (
) & {
teamId: string;
};
export type DelDatasetVectorCtrlProps = DeleteDatasetVectorProps & {
retry?: number;
};
export type InsertVectorProps = {
teamId: string;
datasetId: string;
collectionId: string;
};
export type InsertVectorControllerProps = InsertVectorProps & {
vector: number[];
retry?: number;
};
export type EmbeddingRecallProps = {
teamId: string;
@@ -18,3 +27,11 @@ export type EmbeddingRecallProps = {
// similarity?: number;
// efSearch?: number;
};
export type EmbeddingRecallCtrlProps = EmbeddingRecallProps & {
vector: number[];
limit: number;
retry?: number;
};
export type EmbeddingRecallResponse = {
results: EmbeddingRecallItemType[];
};

View File

@@ -1,18 +1,25 @@
/* vector crud */
import { PgVector } from './pg/class';
import { PgVectorCtrl } from './pg/class';
import { getVectorsByText } from '../../core/ai/embedding';
import { InsertVectorProps } from './controller.d';
import { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
import { MILVUS_ADDRESS, PG_ADDRESS } from './constants';
import { MilvusCtrl } from './milvus/class';
const getVectorObj = () => {
return new PgVector();
if (PG_ADDRESS) return new PgVectorCtrl();
if (MILVUS_ADDRESS) return new MilvusCtrl();
return new PgVectorCtrl();
};
export const initVectorStore = getVectorObj().init;
export const deleteDatasetDataVector = getVectorObj().delete;
export const recallFromVectorStore = getVectorObj().recall;
export const getVectorDataByTime = getVectorObj().getVectorDataByTime;
export const getVectorCountByTeamId = getVectorObj().getVectorCountByTeamId;
const Vector = getVectorObj();
export const initVectorStore = Vector.init;
export const deleteDatasetDataVector = Vector.delete;
export const recallFromVectorStore = Vector.embRecall;
export const getVectorDataByTime = Vector.getVectorDataByTime;
export const getVectorCountByTeamId = Vector.getVectorCountByTeamId;
export const insertDatasetDataVector = async ({
model,
@@ -27,9 +34,9 @@ export const insertDatasetDataVector = async ({
input: query,
type: 'db'
});
const { insertId } = await getVectorObj().insert({
const { insertId } = await Vector.insert({
...props,
vectors
vector: vectors[0]
});
return {

View File

@@ -0,0 +1,287 @@
import { DataType, LoadState, MilvusClient } from '@zilliz/milvus2-sdk-node';
import {
DatasetVectorDbName,
DatasetVectorTableName,
MILVUS_ADDRESS,
MILVUS_TOKEN
} from '../constants';
import type {
DelDatasetVectorCtrlProps,
EmbeddingRecallCtrlProps,
EmbeddingRecallResponse,
InsertVectorControllerProps
} from '../controller.d';
import { delay } from '@fastgpt/global/common/system/utils';
import { addLog } from '../../../common/system/log';
export class MilvusCtrl {
constructor() {}
getClient = async () => {
if (!MILVUS_ADDRESS) {
return Promise.reject('MILVUS_ADDRESS is not set');
}
if (global.milvusClient) return global.milvusClient;
global.milvusClient = new MilvusClient({
address: MILVUS_ADDRESS,
token: MILVUS_TOKEN
});
addLog.info(`Milvus connected`);
return global.milvusClient;
};
init = async () => {
const client = await this.getClient();
// init db(zilliz cloud will error)
try {
const { db_names } = await client.listDatabases();
if (!db_names.includes(DatasetVectorDbName)) {
await client.createDatabase({
db_name: DatasetVectorDbName
});
}
await client.useDatabase({
db_name: DatasetVectorDbName
});
} catch (error) {}
// init collection and index
const { value: hasCollection } = await client.hasCollection({
collection_name: DatasetVectorTableName
});
if (!hasCollection) {
const result = await client.createCollection({
collection_name: DatasetVectorTableName,
description: 'Store dataset vector',
enableDynamicField: true,
fields: [
{
name: 'id',
data_type: DataType.Int64,
is_primary_key: true,
autoID: true
},
{
name: 'vector',
data_type: DataType.FloatVector,
dim: 1536
},
{ name: 'teamId', data_type: DataType.VarChar, max_length: 64 },
{ name: 'datasetId', data_type: DataType.VarChar, max_length: 64 },
{ name: 'collectionId', data_type: DataType.VarChar, max_length: 64 },
{
name: 'createTime',
data_type: DataType.Int64
}
],
index_params: [
{
field_name: 'vector',
index_name: 'vector_HNSW',
index_type: 'HNSW',
metric_type: 'IP',
params: { efConstruction: 32, M: 64 }
},
{
field_name: 'teamId',
index_type: 'Trie'
},
{
field_name: 'datasetId',
index_type: 'Trie'
},
{
field_name: 'collectionId',
index_type: 'Trie'
},
{
field_name: 'createTime',
index_type: 'STL_SORT'
}
]
});
addLog.info(`Create milvus collection: `, result);
}
const { state: colLoadState } = await client.getLoadState({
collection_name: DatasetVectorTableName
});
if (
colLoadState === LoadState.LoadStateNotExist ||
colLoadState === LoadState.LoadStateNotLoad
) {
await client.loadCollectionSync({
collection_name: DatasetVectorTableName
});
addLog.info(`Milvus collection load success`);
}
};
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
const client = await this.getClient();
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
try {
const result = await client.insert({
collection_name: DatasetVectorTableName,
data: [
{
vector,
teamId: String(teamId),
datasetId: String(datasetId),
collectionId: String(collectionId),
createTime: Date.now()
}
]
});
const insertId = (() => {
if ('int_id' in result.IDs) {
return `${result.IDs.int_id.data?.[0]}`;
}
return `${result.IDs.str_id.data?.[0]}`;
})();
return {
insertId: insertId
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.insert({
...props,
retry: retry - 1
});
}
};
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
const { teamId, retry = 2 } = props;
const client = await this.getClient();
const teamIdWhere = `(teamId=="${String(teamId)}")`;
const where = await (() => {
if ('id' in props && props.id) return `(id==${props.id})`;
if ('datasetIds' in props && props.datasetIds) {
const datasetIdWhere = `(datasetId in [${props.datasetIds
.map((id) => `"${String(id)}"`)
.join(',')}])`;
if ('collectionIds' in props && props.collectionIds) {
return `${datasetIdWhere} and (collectionId in [${props.collectionIds
.map((id) => `"${String(id)}"`)
.join(',')}])`;
}
return `${datasetIdWhere}`;
}
if ('idList' in props && Array.isArray(props.idList)) {
if (props.idList.length === 0) return;
return `(id in [${props.idList.map((id) => String(id)).join(',')}])`;
}
return Promise.reject('deleteDatasetData: no where');
})();
if (!where) return;
const concatWhere = `${teamIdWhere} and ${where}`;
try {
await client.delete({
collection_name: DatasetVectorTableName,
filter: concatWhere
});
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.delete({
...props,
retry: retry - 1
});
}
};
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
const client = await this.getClient();
const { teamId, datasetIds, vector, limit, retry = 2 } = props;
try {
const { results } = await client.search({
collection_name: DatasetVectorTableName,
data: vector,
limit,
filter: `(teamId == "${teamId}") and (datasetId in [${datasetIds.map((id) => `"${String(id)}"`).join(',')}])`,
output_fields: ['collectionId']
});
const rows = results as {
score: number;
id: string;
collectionId: string;
}[];
return {
results: rows.map((item) => ({
id: String(item.id),
collectionId: item.collectionId,
score: item.score
}))
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
return this.embRecall({
...props,
retry: retry - 1
});
}
};
getVectorCountByTeamId = async (teamId: string) => {
const client = await this.getClient();
const result = await client.query({
collection_name: DatasetVectorTableName,
output_fields: ['count(*)'],
filter: `teamId == "${String(teamId)}"`
});
const total = result.data?.[0]?.['count(*)'] as number;
return total;
};
getVectorDataByTime = async (start: Date, end: Date) => {
const client = await this.getClient();
const startTimestamp = new Date(start).getTime();
const endTimestamp = new Date(end).getTime();
const result = await client.query({
collection_name: DatasetVectorTableName,
output_fields: ['id', 'teamId', 'datasetId'],
filter: `(createTime >= ${startTimestamp}) and (createTime <= ${endTimestamp})`
});
const rows = result.data as {
id: string;
teamId: string;
datasetId: string;
}[];
return rows.map((item) => ({
id: String(item.id),
teamId: item.teamId,
datasetId: item.datasetId
}));
};
}

View File

@@ -1,18 +1,180 @@
/* pg vector crud */
import { DatasetVectorTableName } from '../constants';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgClient, connectPg } from './index';
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
import {
initPg,
insertDatasetDataVector,
deleteDatasetDataVector,
embeddingRecall,
getVectorDataByTime,
getVectorCountByTeamId
} from './controller';
DelDatasetVectorCtrlProps,
EmbeddingRecallCtrlProps,
EmbeddingRecallResponse,
InsertVectorControllerProps
} from '../controller.d';
import dayjs from 'dayjs';
export class PgVector {
export class PgVectorCtrl {
constructor() {}
init = initPg;
insert = insertDatasetDataVector;
delete = deleteDatasetDataVector;
recall = embeddingRecall;
getVectorCountByTeamId = getVectorCountByTeamId;
getVectorDataByTime = getVectorDataByTime;
init = async () => {
try {
await connectPg();
await PgClient.query(`
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS ${DatasetVectorTableName} (
id BIGSERIAL PRIMARY KEY,
vector VECTOR(1536) NOT NULL,
team_id VARCHAR(50) NOT NULL,
dataset_id VARCHAR(50) NOT NULL,
collection_id VARCHAR(50) NOT NULL,
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS vector_index ON ${DatasetVectorTableName} USING hnsw (vector vector_ip_ops) WITH (m = 32, ef_construction = 128);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS team_dataset_collection_index ON ${DatasetVectorTableName} USING btree(team_id, dataset_id, collection_id);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${DatasetVectorTableName} USING btree(createtime);`
);
console.log('init pg successful');
} catch (error) {
console.log('init pg error', error);
}
};
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
try {
const { rows } = await PgClient.insert(DatasetVectorTableName, {
values: [
[
{ key: 'vector', value: `[${vector}]` },
{ key: 'team_id', value: String(teamId) },
{ key: 'dataset_id', value: String(datasetId) },
{ key: 'collection_id', value: String(collectionId) }
]
]
});
return {
insertId: rows[0].id
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.insert({
...props,
retry: retry - 1
});
}
};
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
const { teamId, retry = 2 } = props;
const teamIdWhere = `team_id='${String(teamId)}' AND`;
const where = await (() => {
if ('id' in props && props.id) return `${teamIdWhere} id=${props.id}`;
if ('datasetIds' in props && props.datasetIds) {
const datasetIdWhere = `dataset_id IN (${props.datasetIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
if ('collectionIds' in props && props.collectionIds) {
return `${teamIdWhere} ${datasetIdWhere} AND collection_id IN (${props.collectionIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
}
return `${teamIdWhere} ${datasetIdWhere}`;
}
if ('idList' in props && Array.isArray(props.idList)) {
if (props.idList.length === 0) return;
return `${teamIdWhere} id IN (${props.idList.map((id) => String(id)).join(',')})`;
}
return Promise.reject('deleteDatasetData: no where');
})();
if (!where) return;
try {
await PgClient.delete(DatasetVectorTableName, {
where: [where]
});
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return this.delete({
...props,
retry: retry - 1
});
}
};
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
const { teamId, datasetIds, vector, limit, retry = 2 } = props;
try {
const results: any = await PgClient.query(
`
BEGIN;
SET LOCAL hnsw.ef_search = ${global.systemEnv?.pgHNSWEfSearch || 100};
select id, collection_id, vector <#> '[${vector}]' AS score
from ${DatasetVectorTableName}
where team_id='${teamId}'
AND dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
order by score limit ${limit};
COMMIT;`
);
const rows = results?.[2]?.rows as PgSearchRawType[];
return {
results: rows.map((item) => ({
id: String(item.id),
collectionId: item.collection_id,
score: item.score * -1
}))
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
return this.embRecall({
...props,
retry: retry - 1
});
}
};
getVectorCountByTeamId = async (teamId: string) => {
const total = await PgClient.count(DatasetVectorTableName, {
where: [['team_id', String(teamId)]]
});
return total;
};
getVectorDataByTime = async (start: Date, end: Date) => {
const { rows } = await PgClient.query<{
id: string;
team_id: string;
dataset_id: string;
}>(`SELECT id, team_id, dataset_id
FROM ${DatasetVectorTableName}
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(
end
).format('YYYY-MM-DD HH:mm:ss')}';
`);
return rows.map((item) => ({
id: String(item.id),
teamId: item.team_id,
datasetId: item.dataset_id
}));
};
}

View File

@@ -1,195 +0,0 @@
/* pg vector crud */
import { PgDatasetTableName } from '@fastgpt/global/common/vectorStore/constants';
import { delay } from '@fastgpt/global/common/system/utils';
import { PgClient, connectPg } from './index';
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
import { EmbeddingRecallItemType } from '../type';
import { DeleteDatasetVectorProps, EmbeddingRecallProps, InsertVectorProps } from '../controller.d';
import dayjs from 'dayjs';
export async function initPg() {
try {
await connectPg();
await PgClient.query(`
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS ${PgDatasetTableName} (
id BIGSERIAL PRIMARY KEY,
vector VECTOR(1536) NOT NULL,
team_id VARCHAR(50) NOT NULL,
dataset_id VARCHAR(50) NOT NULL,
collection_id VARCHAR(50) NOT NULL,
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS vector_index ON ${PgDatasetTableName} USING hnsw (vector vector_ip_ops) WITH (m = 32, ef_construction = 128);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS team_dataset_collection_index ON ${PgDatasetTableName} USING btree(team_id, dataset_id, collection_id);`
);
await PgClient.query(
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${PgDatasetTableName} USING btree(createtime);`
);
console.log('init pg successful');
} catch (error) {
console.log('init pg error', error);
}
}
export const insertDatasetDataVector = async (
props: InsertVectorProps & {
vectors: number[][];
retry?: number;
}
): Promise<{ insertId: string }> => {
const { teamId, datasetId, collectionId, vectors, retry = 3 } = props;
try {
const { rows } = await PgClient.insert(PgDatasetTableName, {
values: [
[
{ key: 'vector', value: `[${vectors[0]}]` },
{ key: 'team_id', value: String(teamId) },
{ key: 'dataset_id', value: String(datasetId) },
{ key: 'collection_id', value: String(collectionId) }
]
]
});
return {
insertId: rows[0].id
};
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return insertDatasetDataVector({
...props,
retry: retry - 1
});
}
};
export const deleteDatasetDataVector = async (
props: DeleteDatasetVectorProps & {
retry?: number;
}
): Promise<any> => {
const { teamId, retry = 2 } = props;
const teamIdWhere = `team_id='${String(teamId)}' AND`;
const where = await (() => {
if ('id' in props && props.id) return `${teamIdWhere} id=${props.id}`;
if ('datasetIds' in props && props.datasetIds) {
const datasetIdWhere = `dataset_id IN (${props.datasetIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
if ('collectionIds' in props && props.collectionIds) {
return `${teamIdWhere} ${datasetIdWhere} AND collection_id IN (${props.collectionIds
.map((id) => `'${String(id)}'`)
.join(',')})`;
}
return `${teamIdWhere} ${datasetIdWhere}`;
}
if ('idList' in props && Array.isArray(props.idList)) {
if (props.idList.length === 0) return;
return `${teamIdWhere} id IN (${props.idList.map((id) => `'${String(id)}'`).join(',')})`;
}
return Promise.reject('deleteDatasetData: no where');
})();
if (!where) return;
try {
await PgClient.delete(PgDatasetTableName, {
where: [where]
});
} catch (error) {
if (retry <= 0) {
return Promise.reject(error);
}
await delay(500);
return deleteDatasetDataVector({
...props,
retry: retry - 1
});
}
};
export const embeddingRecall = async (
props: EmbeddingRecallProps & {
vectors: number[][];
limit: number;
retry?: number;
}
): Promise<{
results: EmbeddingRecallItemType[];
}> => {
const { teamId, datasetIds, vectors, limit, retry = 2 } = props;
try {
const results: any = await PgClient.query(
`
BEGIN;
SET LOCAL hnsw.ef_search = ${global.systemEnv?.pgHNSWEfSearch || 100};
select id, collection_id, vector <#> '[${vectors[0]}]' AS score
from ${PgDatasetTableName}
where team_id='${teamId}'
AND dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
order by score limit ${limit};
COMMIT;`
);
const rows = results?.[2]?.rows as PgSearchRawType[];
return {
results: rows.map((item) => ({
id: item.id,
collectionId: item.collection_id,
score: item.score * -1
}))
};
} catch (error) {
console.log(error);
if (retry <= 0) {
return Promise.reject(error);
}
return embeddingRecall({
...props,
retry: retry - 1
});
}
};
export const getVectorCountByTeamId = async (teamId: string) => {
const total = await PgClient.count(PgDatasetTableName, {
where: [['team_id', String(teamId)]]
});
return total;
};
export const getVectorDataByTime = async (start: Date, end: Date) => {
const { rows } = await PgClient.query<{
id: string;
team_id: string;
dataset_id: string;
}>(`SELECT id, team_id, dataset_id
FROM ${PgDatasetTableName}
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(end).format(
'YYYY-MM-DD HH:mm:ss'
)}';
`);
return rows.map((item) => ({
id: String(item.id),
teamId: item.team_id,
datasetId: item.dataset_id
}));
};

View File

@@ -2,6 +2,7 @@ import { delay } from '@fastgpt/global/common/system/utils';
import { addLog } from '../../system/log';
import { Pool } from 'pg';
import type { QueryResultRow } from 'pg';
import { PG_ADDRESS } from '../constants';
export const connectPg = async (): Promise<Pool> => {
if (global.pgClient) {
@@ -9,7 +10,7 @@ export const connectPg = async (): Promise<Pool> => {
}
global.pgClient = new Pool({
connectionString: process.env.PG_URL,
connectionString: PG_ADDRESS,
max: Number(process.env.DB_MAX_LINK || 20),
min: 10,
keepAlive: true,

View File

@@ -1,7 +1,9 @@
import type { Pool } from 'pg';
import { MilvusClient } from '@zilliz/milvus2-sdk-node';
declare global {
var pgClient: Pool | null;
var milvusClient: MilvusClient | null;
}
export type EmbeddingRecallItemType = {