mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-15 07:31:19 +00:00
V4.12.0 features (#5435)
* add logs chart (#5352) * charts * chart data * log chart * delete * rename api * fix * move api * fix * fix * pro config * fix * feat: Repository interaction (#5356) * feat: 1好像功能没问题了,明天再测 * feat: 2 解决了昨天遗留的bug,但全选按钮又bug了 * feat: 3 第三版,解决了全选功能bug * feat: 4 第四版,下面改小细节 * feat: 5 我勒个痘 * feat: 6 * feat: 6 pr * feat: 7 * feat: 8 * feat: 9 * feat: 10 * feat: 11 * feat: 12 * perf: checkbox ui * refactor: tweak login loyout (#5357) Co-authored-by: Archer <545436317@qq.com> * login ui * app chat log chart pro display (#5392) * app chat log chart pro display * add canopen props * perf: pro tag tip * perf: pro tag tip * feat: openrouter provider (#5406) * perf: login ui * feat: openrouter provider * provider * perf: custom error throw * perf: emb batch (#5407) * perf: emb batch * perf: vector retry * doc * doc (#5411) * doc * fix: team folder will add to workflow * fix: generateToc shell * Tool price (#5376) * resolve conflicts for cherry-pick * fix i18n * Enhance system plugin template data structure and update ToolSelectModal to include CostTooltip component * refactor: update systemKeyCost type to support array of objects in plugin and workflow types * refactor: simplify systemKeyCost type across plugin and workflow types to a single number * refactor: streamline systemKeyCost handling in plugin and workflow components * fix * fix * perf: toolset price config;fix: workflow array selector ui (#5419) * fix: workflow array selector ui * update default model tip * perf: toolset price config * doc * fix: test * Refactor/chat (#5418) * refactor: add homepage configuration; add home chat page; add side bar animated collapse and layout * fix: fix lint rules * chore: improve logics and code * chore: more clearer logics * chore: adjust api --------- Co-authored-by: Archer <545436317@qq.com> * perf: chat setting code * del history * logo image * perf: home chat ui * feat: enhance chat response handling with external links and user info (#5427) * feat: enhance chat response handling with external links and user info * fix * cite code * perf: toolset add in workflow * fix: test * fix: search paraentId * Fix/chat (#5434) * wip: rebase了upstream * wip: adapt mobile UI * fix: fix chat page logic and UI * fix: fix UI and improve some logics * fix: model selector missing logo; vision model to retrieve file * perf: role selector * fix: chat ui * optimize export app chat log (#5436) * doc * chore: move components to proper directory; fix the api to get app list (#5437) * chore: improve team app panel display form (#5438) * feat: add home chat log tab * chore: improve team app panel display form * chore: improve log panel * fix: spec * doc * fix: log permission * fix: dataset schema required * add loading status * remove ui weight * manage log * fix: log detail per * doc * fix: log menu * rename permission * bg color * fix: app log per * fix: log key selector * fix: log * doc --------- Co-authored-by: heheer <zhiyu44@qq.com> Co-authored-by: colnii <1286949794@qq.com> Co-authored-by: 伍闲犬 <76519998+xqvvu@users.noreply.github.com> Co-authored-by: Ctrlz <143257420+ctrlz526@users.noreply.github.com> Co-authored-by: 伍闲犬 <whoeverimf5@gmail.com> Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
@@ -5,6 +5,7 @@ import axios, {
|
||||
type AxiosRequestConfig
|
||||
} from 'axios';
|
||||
import { FastGPTProUrl } from '../system/constants';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
interface ConfigType {
|
||||
headers?: { [key: string]: string };
|
||||
@@ -78,7 +79,7 @@ instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err)
|
||||
export function request(url: string, data: any, config: ConfigType, method: Method): any {
|
||||
if (!FastGPTProUrl) {
|
||||
console.log('未部署商业版接口', url);
|
||||
return Promise.reject('The The request was denied...');
|
||||
return Promise.reject(new UserError('The request was denied...'));
|
||||
}
|
||||
|
||||
/* 去空 */
|
||||
|
@@ -151,10 +151,6 @@ export async function getFileById({
|
||||
_id: new Types.ObjectId(fileId)
|
||||
});
|
||||
|
||||
// if (!file) {
|
||||
// return Promise.reject('File not found');
|
||||
// }
|
||||
|
||||
return file || undefined;
|
||||
}
|
||||
|
||||
|
@@ -11,6 +11,7 @@ import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export const maxImgSize = 1024 * 1024 * 12;
|
||||
const base64MimeRegex = /data:image\/([^\)]+);base64/;
|
||||
|
||||
export async function uploadMongoImg({
|
||||
base64Img,
|
||||
teamId,
|
||||
@@ -22,13 +23,13 @@ export async function uploadMongoImg({
|
||||
forever?: Boolean;
|
||||
}) {
|
||||
if (base64Img.length > maxImgSize) {
|
||||
return Promise.reject('Image too large');
|
||||
return Promise.reject(new UserError('Image too large'));
|
||||
}
|
||||
|
||||
const [base64Mime, base64Data] = base64Img.split(',');
|
||||
// Check if mime type is valid
|
||||
if (!base64MimeRegex.test(base64Mime)) {
|
||||
return Promise.reject('Invalid image base64');
|
||||
return Promise.reject(new UserError('Invalid image base64'));
|
||||
}
|
||||
|
||||
const mime = `image/${base64Mime.match(base64MimeRegex)?.[1] ?? 'image/jpeg'}`;
|
||||
@@ -39,7 +40,7 @@ export async function uploadMongoImg({
|
||||
}
|
||||
|
||||
if (!extension || !imageFileType.includes(`.${extension}`)) {
|
||||
return Promise.reject(`Invalid image file type: ${mime}`);
|
||||
return Promise.reject(new UserError(`Invalid image file type: ${mime}`));
|
||||
}
|
||||
|
||||
const { _id } = await retryFn(() =>
|
||||
|
@@ -4,6 +4,7 @@ import path from 'path';
|
||||
import type { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { bucketNameMap } from '@fastgpt/global/common/file/constants';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export type FileType = {
|
||||
fieldname: string;
|
||||
@@ -61,7 +62,7 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
// check bucket name
|
||||
const bucketName = (req.body?.bucketName || originBucketName) as `${BucketNameEnum}`;
|
||||
if (bucketName && !bucketNameMap[bucketName]) {
|
||||
return reject('BucketName is invalid');
|
||||
return reject(new UserError('BucketName is invalid'));
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
|
@@ -17,8 +17,7 @@ export type InsertVectorProps = {
|
||||
collectionId: string;
|
||||
};
|
||||
export type InsertVectorControllerProps = InsertVectorProps & {
|
||||
vector: number[];
|
||||
retry?: number;
|
||||
vectors: number[][];
|
||||
};
|
||||
|
||||
export type EmbeddingRecallProps = {
|
||||
|
@@ -2,7 +2,12 @@
|
||||
import { PgVectorCtrl } from './pg';
|
||||
import { ObVectorCtrl } from './oceanbase';
|
||||
import { getVectorsByText } from '../../core/ai/embedding';
|
||||
import { type DelDatasetVectorCtrlProps, type InsertVectorProps } from './controller.d';
|
||||
import type {
|
||||
EmbeddingRecallCtrlProps} from './controller.d';
|
||||
import {
|
||||
type DelDatasetVectorCtrlProps,
|
||||
type InsertVectorProps
|
||||
} from './controller.d';
|
||||
import { type EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { MILVUS_ADDRESS, PG_ADDRESS, OCEANBASE_ADDRESS } from './constants';
|
||||
import { MilvusCtrl } from './milvus';
|
||||
@@ -35,7 +40,8 @@ const onIncrCache = (teamId: string) => incrValueToCache(getChcheKey(teamId), 1)
|
||||
const Vector = getVectorObj();
|
||||
|
||||
export const initVectorStore = Vector.init;
|
||||
export const recallFromVectorStore = Vector.embRecall;
|
||||
export const recallFromVectorStore = (props: EmbeddingRecallCtrlProps) =>
|
||||
retryFn(() => Vector.embRecall(props));
|
||||
export const getVectorDataByTime = Vector.getVectorDataByTime;
|
||||
|
||||
export const getVectorCountByTeamId = async (teamId: string) => {
|
||||
@@ -58,34 +64,34 @@ export const getVectorCountByCollectionId = Vector.getVectorCountByCollectionId;
|
||||
|
||||
export const insertDatasetDataVector = async ({
|
||||
model,
|
||||
query,
|
||||
inputs,
|
||||
...props
|
||||
}: InsertVectorProps & {
|
||||
query: string;
|
||||
inputs: string[];
|
||||
model: EmbeddingModelItemType;
|
||||
}) => {
|
||||
return retryFn(async () => {
|
||||
const { vectors, tokens } = await getVectorsByText({
|
||||
model,
|
||||
input: query,
|
||||
type: 'db'
|
||||
});
|
||||
const { insertId } = await Vector.insert({
|
||||
...props,
|
||||
vector: vectors[0]
|
||||
});
|
||||
|
||||
onIncrCache(props.teamId);
|
||||
|
||||
return {
|
||||
tokens,
|
||||
insertId
|
||||
};
|
||||
const { vectors, tokens } = await getVectorsByText({
|
||||
model,
|
||||
input: inputs,
|
||||
type: 'db'
|
||||
});
|
||||
const { insertIds } = await retryFn(() =>
|
||||
Vector.insert({
|
||||
...props,
|
||||
vectors
|
||||
})
|
||||
);
|
||||
|
||||
onIncrCache(props.teamId);
|
||||
|
||||
return {
|
||||
tokens,
|
||||
insertIds
|
||||
};
|
||||
};
|
||||
|
||||
export const deleteDatasetDataVector = async (props: DelDatasetVectorCtrlProps) => {
|
||||
const result = await Vector.delete(props);
|
||||
const result = await retryFn(() => Vector.delete(props));
|
||||
onDelCache(props.teamId);
|
||||
return result;
|
||||
};
|
||||
|
@@ -11,7 +11,7 @@ import type {
|
||||
EmbeddingRecallResponse,
|
||||
InsertVectorControllerProps
|
||||
} from '../controller.d';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
|
||||
import { addLog } from '../../system/log';
|
||||
import { customNanoid } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
@@ -27,6 +27,7 @@ export class MilvusCtrl {
|
||||
address: MILVUS_ADDRESS,
|
||||
token: MILVUS_TOKEN
|
||||
});
|
||||
await global.milvusClient.connectPromise;
|
||||
|
||||
addLog.info(`Milvus connected`);
|
||||
|
||||
@@ -124,9 +125,9 @@ export class MilvusCtrl {
|
||||
}
|
||||
};
|
||||
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertIds: string[] }> => {
|
||||
const client = await this.getClient();
|
||||
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
|
||||
const { teamId, datasetId, collectionId, vectors } = props;
|
||||
|
||||
const generateId = () => {
|
||||
// in js, the max safe integer is 2^53 - 1: 9007199254740991
|
||||
@@ -136,45 +137,32 @@ export class MilvusCtrl {
|
||||
const restDigits = customNanoid('1234567890', 15);
|
||||
return Number(`${firstDigit}${restDigits}`);
|
||||
};
|
||||
const id = generateId();
|
||||
try {
|
||||
const result = await client.insert({
|
||||
collection_name: DatasetVectorTableName,
|
||||
data: [
|
||||
{
|
||||
id,
|
||||
vector,
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
collectionId: String(collectionId),
|
||||
createTime: Date.now()
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const insertId = (() => {
|
||||
if ('int_id' in result.IDs) {
|
||||
return `${result.IDs.int_id.data?.[0]}`;
|
||||
}
|
||||
return `${result.IDs.str_id.data?.[0]}`;
|
||||
})();
|
||||
const result = await client.insert({
|
||||
collection_name: DatasetVectorTableName,
|
||||
data: vectors.map((vector) => ({
|
||||
id: generateId(),
|
||||
vector,
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
collectionId: String(collectionId),
|
||||
createTime: Date.now()
|
||||
}))
|
||||
});
|
||||
|
||||
return {
|
||||
insertId: insertId
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
const insertIds = (() => {
|
||||
if ('int_id' in result.IDs) {
|
||||
return result.IDs.int_id.data.map((id) => String(id));
|
||||
}
|
||||
await delay(500);
|
||||
return this.insert({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
return result.IDs.str_id.data.map((id) => String(id));
|
||||
})();
|
||||
|
||||
return {
|
||||
insertIds
|
||||
};
|
||||
};
|
||||
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
|
||||
const { teamId, retry = 2 } = props;
|
||||
const { teamId } = props;
|
||||
const client = await this.getClient();
|
||||
|
||||
const teamIdWhere = `(teamId=="${String(teamId)}")`;
|
||||
@@ -206,33 +194,15 @@ export class MilvusCtrl {
|
||||
|
||||
const concatWhere = `${teamIdWhere} and ${where}`;
|
||||
|
||||
try {
|
||||
await client.delete({
|
||||
collection_name: DatasetVectorTableName,
|
||||
filter: concatWhere
|
||||
});
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.delete({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
await client.delete({
|
||||
collection_name: DatasetVectorTableName,
|
||||
filter: concatWhere
|
||||
});
|
||||
};
|
||||
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
|
||||
const client = await this.getClient();
|
||||
const {
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList,
|
||||
retry = 2
|
||||
} = props;
|
||||
const { teamId, datasetIds, vector, limit, forbidCollectionIdList, filterCollectionIdList } =
|
||||
props;
|
||||
|
||||
// Forbid collection
|
||||
const formatForbidCollectionIdList = (() => {
|
||||
@@ -262,37 +232,29 @@ export class MilvusCtrl {
|
||||
return { results: [] };
|
||||
}
|
||||
|
||||
try {
|
||||
const { results } = await client.search({
|
||||
const { results } = await retryFn(() =>
|
||||
client.search({
|
||||
collection_name: DatasetVectorTableName,
|
||||
data: vector,
|
||||
limit,
|
||||
filter: `(teamId == "${teamId}") and (datasetId in [${datasetIds.map((id) => `"${id}"`).join(',')}]) ${collectionIdQuery} ${forbidColQuery}`,
|
||||
output_fields: ['collectionId']
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
const rows = results as {
|
||||
score: number;
|
||||
id: string;
|
||||
collectionId: string;
|
||||
}[];
|
||||
const rows = results as {
|
||||
score: number;
|
||||
id: string;
|
||||
collectionId: string;
|
||||
}[];
|
||||
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collectionId,
|
||||
score: item.score
|
||||
}))
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
return this.embRecall({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collectionId,
|
||||
score: item.score
|
||||
}))
|
||||
};
|
||||
};
|
||||
|
||||
getVectorCountByTeamId = async (teamId: string) => {
|
||||
|
@@ -1,6 +1,6 @@
|
||||
/* oceanbase vector crud */
|
||||
import { DatasetVectorTableName } from '../constants';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
|
||||
import { ObClient } from './controller';
|
||||
import { type RowDataPacket } from 'mysql2/promise';
|
||||
import {
|
||||
@@ -42,41 +42,30 @@ export class ObVectorCtrl {
|
||||
addLog.error('init oceanbase error', error);
|
||||
}
|
||||
};
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
|
||||
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertIds: string[] }> => {
|
||||
const { teamId, datasetId, collectionId, vectors } = props;
|
||||
|
||||
try {
|
||||
const { rowCount, rows } = await ObClient.insert(DatasetVectorTableName, {
|
||||
values: [
|
||||
[
|
||||
{ key: 'vector', value: `[${vector}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'dataset_id', value: String(datasetId) },
|
||||
{ key: 'collection_id', value: String(collectionId) }
|
||||
]
|
||||
]
|
||||
});
|
||||
const values = vectors.map((vector) => [
|
||||
{ key: 'vector', value: `[${vector}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'dataset_id', value: String(datasetId) },
|
||||
{ key: 'collection_id', value: String(collectionId) }
|
||||
]);
|
||||
|
||||
if (rowCount === 0) {
|
||||
return Promise.reject('insertDatasetData: no insert');
|
||||
}
|
||||
const { rowCount, rows } = await ObClient.insert(DatasetVectorTableName, {
|
||||
values
|
||||
});
|
||||
|
||||
return {
|
||||
insertId: rows[0].id
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.insert({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
if (rowCount === 0) {
|
||||
return Promise.reject('insertDatasetData: no insert');
|
||||
}
|
||||
|
||||
return {
|
||||
insertIds: rows.map((row) => row.id)
|
||||
};
|
||||
};
|
||||
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
|
||||
const { teamId, retry = 2 } = props;
|
||||
const { teamId } = props;
|
||||
|
||||
const teamIdWhere = `team_id='${String(teamId)}' AND`;
|
||||
|
||||
@@ -106,31 +95,13 @@ export class ObVectorCtrl {
|
||||
|
||||
if (!where) return;
|
||||
|
||||
try {
|
||||
await ObClient.delete(DatasetVectorTableName, {
|
||||
where: [where]
|
||||
});
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.delete({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
await ObClient.delete(DatasetVectorTableName, {
|
||||
where: [where]
|
||||
});
|
||||
};
|
||||
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
|
||||
const {
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList,
|
||||
retry = 2
|
||||
} = props;
|
||||
const { teamId, datasetIds, vector, limit, forbidCollectionIdList, filterCollectionIdList } =
|
||||
props;
|
||||
|
||||
// Get forbid collection
|
||||
const formatForbidCollectionIdList = (() => {
|
||||
@@ -161,15 +132,14 @@ export class ObVectorCtrl {
|
||||
return { results: [] };
|
||||
}
|
||||
|
||||
try {
|
||||
const rows = await ObClient.query<
|
||||
({
|
||||
id: string;
|
||||
collection_id: string;
|
||||
score: number;
|
||||
} & RowDataPacket)[][]
|
||||
>(
|
||||
`BEGIN;
|
||||
const rows = await ObClient.query<
|
||||
({
|
||||
id: string;
|
||||
collection_id: string;
|
||||
score: number;
|
||||
} & RowDataPacket)[][]
|
||||
>(
|
||||
`BEGIN;
|
||||
SET ob_hnsw_ef_search = ${global.systemEnv?.hnswEfSearch || 100};
|
||||
SELECT id, collection_id, inner_product(vector, [${vector}]) AS score
|
||||
FROM ${DatasetVectorTableName}
|
||||
@@ -179,24 +149,15 @@ export class ObVectorCtrl {
|
||||
${forbidCollectionSql}
|
||||
ORDER BY score desc APPROXIMATE LIMIT ${limit};
|
||||
COMMIT;`
|
||||
).then(([rows]) => rows[2]);
|
||||
).then(([rows]) => rows[2]);
|
||||
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collection_id,
|
||||
score: item.score
|
||||
}))
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
return this.embRecall({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collection_id,
|
||||
score: item.score
|
||||
}))
|
||||
};
|
||||
};
|
||||
getVectorDataByTime = async (start: Date, end: Date) => {
|
||||
const rows = await ObClient.query<
|
||||
|
@@ -1,6 +1,6 @@
|
||||
/* pg vector crud */
|
||||
import { DatasetVectorTableName } from '../constants';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
|
||||
import { PgClient, connectPg } from './controller';
|
||||
import { type PgSearchRawType } from '@fastgpt/global/core/dataset/api';
|
||||
import type {
|
||||
@@ -65,41 +65,30 @@ export class PgVectorCtrl {
|
||||
addLog.error('init pg error', error);
|
||||
}
|
||||
};
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertId: string }> => {
|
||||
const { teamId, datasetId, collectionId, vector, retry = 3 } = props;
|
||||
insert = async (props: InsertVectorControllerProps): Promise<{ insertIds: string[] }> => {
|
||||
const { teamId, datasetId, collectionId, vectors } = props;
|
||||
|
||||
try {
|
||||
const { rowCount, rows } = await PgClient.insert(DatasetVectorTableName, {
|
||||
values: [
|
||||
[
|
||||
{ key: 'vector', value: `[${vector}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'dataset_id', value: String(datasetId) },
|
||||
{ key: 'collection_id', value: String(collectionId) }
|
||||
]
|
||||
]
|
||||
});
|
||||
const values = vectors.map((vector) => [
|
||||
{ key: 'vector', value: `[${vector}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'dataset_id', value: String(datasetId) },
|
||||
{ key: 'collection_id', value: String(collectionId) }
|
||||
]);
|
||||
|
||||
if (rowCount === 0) {
|
||||
return Promise.reject('insertDatasetData: no insert');
|
||||
}
|
||||
const { rowCount, rows } = await PgClient.insert(DatasetVectorTableName, {
|
||||
values
|
||||
});
|
||||
|
||||
return {
|
||||
insertId: rows[0].id
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.insert({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
if (rowCount === 0) {
|
||||
return Promise.reject('insertDatasetData: no insert');
|
||||
}
|
||||
|
||||
return {
|
||||
insertIds: rows.map((row) => row.id)
|
||||
};
|
||||
};
|
||||
delete = async (props: DelDatasetVectorCtrlProps): Promise<any> => {
|
||||
const { teamId, retry = 2 } = props;
|
||||
const { teamId } = props;
|
||||
|
||||
const teamIdWhere = `team_id='${String(teamId)}' AND`;
|
||||
|
||||
@@ -129,31 +118,13 @@ export class PgVectorCtrl {
|
||||
|
||||
if (!where) return;
|
||||
|
||||
try {
|
||||
await PgClient.delete(DatasetVectorTableName, {
|
||||
where: [where]
|
||||
});
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return this.delete({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
await PgClient.delete(DatasetVectorTableName, {
|
||||
where: [where]
|
||||
});
|
||||
};
|
||||
embRecall = async (props: EmbeddingRecallCtrlProps): Promise<EmbeddingRecallResponse> => {
|
||||
const {
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList,
|
||||
retry = 2
|
||||
} = props;
|
||||
const { teamId, datasetIds, vector, limit, forbidCollectionIdList, filterCollectionIdList } =
|
||||
props;
|
||||
|
||||
// Get forbid collection
|
||||
const formatForbidCollectionIdList = (() => {
|
||||
@@ -184,9 +155,8 @@ export class PgVectorCtrl {
|
||||
return { results: [] };
|
||||
}
|
||||
|
||||
try {
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL hnsw.ef_search = ${global.systemEnv?.hnswEfSearch || 100};
|
||||
SET LOCAL hnsw.max_scan_tuples = ${global.systemEnv?.hnswMaxScanTuples || 100000};
|
||||
SET LOCAL hnsw.iterative_scan = relaxed_order;
|
||||
@@ -199,31 +169,22 @@ export class PgVectorCtrl {
|
||||
order by score limit ${limit}
|
||||
) SELECT id, collection_id, score FROM relaxed_results ORDER BY score;
|
||||
COMMIT;`
|
||||
);
|
||||
const rows = results?.[results.length - 2]?.rows as PgSearchRawType[];
|
||||
|
||||
if (!Array.isArray(rows)) {
|
||||
return {
|
||||
results: []
|
||||
};
|
||||
}
|
||||
);
|
||||
const rows = results?.[results.length - 2]?.rows as PgSearchRawType[];
|
||||
|
||||
if (!Array.isArray(rows)) {
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collection_id,
|
||||
score: item.score * -1
|
||||
}))
|
||||
results: []
|
||||
};
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
return this.embRecall({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
results: rows.map((item) => ({
|
||||
id: String(item.id),
|
||||
collectionId: item.collection_id,
|
||||
score: item.score * -1
|
||||
}))
|
||||
};
|
||||
};
|
||||
getVectorDataByTime = async (start: Date, end: Date) => {
|
||||
const { rows } = await PgClient.query<{
|
||||
|
@@ -3,6 +3,7 @@ import { getAxiosConfig } from '../config';
|
||||
import axios from 'axios';
|
||||
import FormData from 'form-data';
|
||||
import { type STTModelType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export const aiTranscriptions = async ({
|
||||
model: modelData,
|
||||
@@ -14,7 +15,7 @@ export const aiTranscriptions = async ({
|
||||
headers?: Record<string, string>;
|
||||
}) => {
|
||||
if (!modelData) {
|
||||
return Promise.reject('no model');
|
||||
return Promise.reject(new UserError('no model'));
|
||||
}
|
||||
|
||||
const data = new FormData();
|
||||
|
@@ -6,7 +6,7 @@ import { addLog } from '../../../common/system/log';
|
||||
|
||||
type GetVectorProps = {
|
||||
model: EmbeddingModelItemType;
|
||||
input: string;
|
||||
input: string[] | string;
|
||||
type?: `${EmbeddingTypeEnm}`;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
@@ -19,60 +19,85 @@ export async function getVectorsByText({ model, input, type, headers }: GetVecto
|
||||
message: 'input is empty'
|
||||
});
|
||||
}
|
||||
const ai = getAIApi();
|
||||
|
||||
const formatInput = Array.isArray(input) ? input : [input];
|
||||
|
||||
// 20 size every request
|
||||
const chunkSize = 20;
|
||||
const chunks = [];
|
||||
for (let i = 0; i < formatInput.length; i += chunkSize) {
|
||||
chunks.push(formatInput.slice(i, i + chunkSize));
|
||||
}
|
||||
|
||||
try {
|
||||
const ai = getAIApi();
|
||||
// Process chunks sequentially
|
||||
let totalTokens = 0;
|
||||
const allVectors: number[][] = [];
|
||||
|
||||
// input text to vector
|
||||
const result = await ai.embeddings
|
||||
.create(
|
||||
{
|
||||
...model.defaultConfig,
|
||||
...(type === EmbeddingTypeEnm.db && model.dbConfig),
|
||||
...(type === EmbeddingTypeEnm.query && model.queryConfig),
|
||||
model: model.model,
|
||||
input: [input]
|
||||
},
|
||||
model.requestUrl
|
||||
? {
|
||||
path: model.requestUrl,
|
||||
headers: {
|
||||
...(model.requestAuth ? { Authorization: `Bearer ${model.requestAuth}` } : {}),
|
||||
...headers
|
||||
for (const chunk of chunks) {
|
||||
// input text to vector
|
||||
const result = await ai.embeddings
|
||||
.create(
|
||||
{
|
||||
...model.defaultConfig,
|
||||
...(type === EmbeddingTypeEnm.db && model.dbConfig),
|
||||
...(type === EmbeddingTypeEnm.query && model.queryConfig),
|
||||
model: model.model,
|
||||
input: chunk
|
||||
},
|
||||
model.requestUrl
|
||||
? {
|
||||
path: model.requestUrl,
|
||||
headers: {
|
||||
...(model.requestAuth ? { Authorization: `Bearer ${model.requestAuth}` } : {}),
|
||||
...headers
|
||||
}
|
||||
}
|
||||
}
|
||||
: { headers }
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.data) {
|
||||
addLog.error('Embedding API is not responding', res);
|
||||
return Promise.reject('Embedding API is not responding');
|
||||
}
|
||||
if (!res?.data?.[0]?.embedding) {
|
||||
console.log(res);
|
||||
// @ts-ignore
|
||||
return Promise.reject(res.data?.err?.message || 'Embedding API Error');
|
||||
}
|
||||
: { headers }
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.data) {
|
||||
addLog.error('Embedding API is not responding', res);
|
||||
return Promise.reject('Embedding API is not responding');
|
||||
}
|
||||
if (!res?.data?.[0]?.embedding) {
|
||||
console.log(res);
|
||||
// @ts-ignore
|
||||
return Promise.reject(res.data?.err?.message || 'Embedding API Error');
|
||||
}
|
||||
|
||||
const [tokens, vectors] = await Promise.all([
|
||||
countPromptTokens(input),
|
||||
Promise.all(
|
||||
res.data
|
||||
.map((item) => unityDimensional(item.embedding))
|
||||
.map((item) => {
|
||||
if (model.normalization) return normalization(item);
|
||||
return item;
|
||||
})
|
||||
)
|
||||
]);
|
||||
const [tokens, vectors] = await Promise.all([
|
||||
(async () => {
|
||||
if (res.usage) return res.usage.total_tokens;
|
||||
|
||||
return {
|
||||
tokens,
|
||||
vectors
|
||||
};
|
||||
});
|
||||
const tokens = await Promise.all(chunk.map((item) => countPromptTokens(item)));
|
||||
return tokens.reduce((sum, item) => sum + item, 0);
|
||||
})(),
|
||||
Promise.all(
|
||||
res.data
|
||||
.map((item) => unityDimensional(item.embedding))
|
||||
.map((item) => {
|
||||
if (model.normalization) return normalization(item);
|
||||
return item;
|
||||
})
|
||||
)
|
||||
]);
|
||||
|
||||
return result;
|
||||
return {
|
||||
tokens,
|
||||
vectors
|
||||
};
|
||||
});
|
||||
|
||||
totalTokens += result.tokens;
|
||||
allVectors.push(...result.vectors);
|
||||
}
|
||||
|
||||
return {
|
||||
tokens: totalTokens,
|
||||
vectors: allVectors
|
||||
};
|
||||
} catch (error) {
|
||||
addLog.error(`Embedding Error`, error);
|
||||
|
||||
|
77
packages/service/core/app/logs/chatLogsSchema.ts
Normal file
77
packages/service/core/app/logs/chatLogsSchema.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import type { AppChatLogSchema } from '@fastgpt/global/core/app/logs/type';
|
||||
import { getMongoLogModel, Schema } from '../../../common/mongo';
|
||||
import { AppCollectionName } from '../schema';
|
||||
|
||||
export const ChatLogCollectionName = 'app_chat_logs';
|
||||
|
||||
const ChatLogSchema = new Schema({
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: AppCollectionName,
|
||||
required: true
|
||||
},
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
required: true
|
||||
},
|
||||
chatId: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
userId: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
sourceName: {
|
||||
type: String
|
||||
},
|
||||
createTime: {
|
||||
type: Date,
|
||||
required: true
|
||||
},
|
||||
updateTime: {
|
||||
type: Date,
|
||||
required: true
|
||||
},
|
||||
// 累计统计字段
|
||||
chatItemCount: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
errorCount: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
totalPoints: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
goodFeedbackCount: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
badFeedbackCount: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
totalResponseTime: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
isFirstChat: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
}
|
||||
});
|
||||
|
||||
ChatLogSchema.index({ teamId: 1, appId: 1, source: 1, updateTime: -1 });
|
||||
ChatLogSchema.index({ userId: 1, appId: 1, source: 1, createTime: -1 });
|
||||
|
||||
export const MongoAppChatLog = getMongoLogModel<AppChatLogSchema>(
|
||||
ChatLogCollectionName,
|
||||
ChatLogSchema
|
||||
);
|
@@ -46,6 +46,7 @@ import { getMCPParentId, getMCPToolRuntimeNode } from '@fastgpt/global/core/app/
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { getMCPChildren } from '../mcp';
|
||||
import { cloneDeep } from 'lodash';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
type ChildAppType = SystemPluginTemplateItemType & {
|
||||
teamId?: string;
|
||||
@@ -80,7 +81,7 @@ export const getSystemPluginByIdAndVersionId = async (
|
||||
app
|
||||
})
|
||||
: await getAppLatestVersion(plugin.associatedPluginId, app);
|
||||
if (!version.versionId) return Promise.reject('App version not found');
|
||||
if (!version.versionId) return Promise.reject(new UserError('App version not found'));
|
||||
const isLatest = version.versionId
|
||||
? await checkIsLatestVersion({
|
||||
appId: plugin.associatedPluginId,
|
||||
@@ -119,7 +120,7 @@ export const getSystemPluginByIdAndVersionId = async (
|
||||
const versionList = (plugin.versionList as SystemPluginTemplateItemType['versionList']) || [];
|
||||
|
||||
if (versionList.length === 0) {
|
||||
return Promise.reject('Can not find plugin version list');
|
||||
return Promise.reject(new UserError('Can not find plugin version list'));
|
||||
}
|
||||
|
||||
const version = versionId
|
||||
@@ -304,11 +305,13 @@ export async function getChildAppPreviewNode({
|
||||
? {
|
||||
systemToolSet: {
|
||||
toolId: app.id,
|
||||
toolList: children.map((item) => ({
|
||||
toolId: item.id,
|
||||
name: parseI18nString(item.name, lang),
|
||||
description: parseI18nString(item.intro, lang)
|
||||
}))
|
||||
toolList: children
|
||||
.filter((item) => item.isActive !== false)
|
||||
.map((item) => ({
|
||||
toolId: item.id,
|
||||
name: parseI18nString(item.name, lang),
|
||||
description: parseI18nString(item.intro, lang)
|
||||
}))
|
||||
}
|
||||
}
|
||||
: { systemTool: { toolId: app.id } })
|
||||
@@ -378,8 +381,10 @@ export async function getChildAppPreviewNode({
|
||||
showTargetHandle: true,
|
||||
|
||||
currentCost: app.currentCost,
|
||||
systemKeyCost: app.systemKeyCost,
|
||||
hasTokenFee: app.hasTokenFee,
|
||||
hasSystemSecret: app.hasSystemSecret,
|
||||
isFolder: app.isFolder,
|
||||
|
||||
...nodeIOConfig,
|
||||
outputs: nodeIOConfig.outputs.some((item) => item.type === FlowNodeOutputTypeEnum.error)
|
||||
@@ -432,6 +437,7 @@ export async function getChildAppRuntimeById({
|
||||
|
||||
originCost: 0,
|
||||
currentCost: 0,
|
||||
systemKeyCost: 0,
|
||||
hasTokenFee: false,
|
||||
pluginOrder: 0
|
||||
};
|
||||
@@ -448,6 +454,7 @@ export async function getChildAppRuntimeById({
|
||||
avatar: app.avatar || '',
|
||||
showStatus: true,
|
||||
currentCost: app.currentCost,
|
||||
systemKeyCost: app.systemKeyCost,
|
||||
nodes: app.workflow.nodes,
|
||||
edges: app.workflow.edges,
|
||||
hasTokenFee: app.hasTokenFee
|
||||
@@ -474,6 +481,7 @@ const dbPluginFormat = (item: SystemPluginConfigSchemaType): SystemPluginTemplat
|
||||
currentCost: item.currentCost,
|
||||
hasTokenFee: item.hasTokenFee,
|
||||
pluginOrder: item.pluginOrder,
|
||||
systemKeyCost: item.systemKeyCost,
|
||||
associatedPluginId,
|
||||
userGuide,
|
||||
workflow: {
|
||||
@@ -515,61 +523,63 @@ export const getSystemTools = async (): Promise<SystemPluginTemplateItemType[]>
|
||||
const tools = await APIGetSystemToolList();
|
||||
|
||||
// 从数据库里加载插件配置进行替换
|
||||
const systemPluginsArray = await MongoSystemPlugin.find({}).lean();
|
||||
const systemPlugins = new Map(systemPluginsArray.map((plugin) => [plugin.pluginId, plugin]));
|
||||
const systemToolsArray = await MongoSystemPlugin.find({}).lean();
|
||||
const systemTools = new Map(systemToolsArray.map((plugin) => [plugin.pluginId, plugin]));
|
||||
|
||||
tools.forEach((tool) => {
|
||||
// 如果有插件的配置信息,则需要进行替换
|
||||
const dbPluginConfig = systemPlugins.get(tool.id);
|
||||
// tools.forEach((tool) => {
|
||||
// // 如果有插件的配置信息,则需要进行替换
|
||||
// const dbPluginConfig = systemTools.get(tool.id);
|
||||
|
||||
if (dbPluginConfig) {
|
||||
const children = tools.filter((item) => item.parentId === tool.id);
|
||||
const list = [tool, ...children];
|
||||
list.forEach((item) => {
|
||||
item.isActive = dbPluginConfig.isActive ?? item.isActive ?? true;
|
||||
item.originCost = dbPluginConfig.originCost ?? 0;
|
||||
item.currentCost = dbPluginConfig.currentCost ?? 0;
|
||||
item.hasTokenFee = dbPluginConfig.hasTokenFee ?? false;
|
||||
item.pluginOrder = dbPluginConfig.pluginOrder ?? 0;
|
||||
});
|
||||
}
|
||||
});
|
||||
// if (dbPluginConfig) {
|
||||
// const children = tools.filter((item) => item.parentId === tool.id);
|
||||
// const list = [tool, ...children];
|
||||
// list.forEach((item) => {
|
||||
// item.isActive = dbPluginConfig.isActive ?? item.isActive ?? true;
|
||||
// item.originCost = dbPluginConfig.originCost ?? 0;
|
||||
// item.currentCost = dbPluginConfig.currentCost ?? 0;
|
||||
// item.hasTokenFee = dbPluginConfig.hasTokenFee ?? false;
|
||||
// item.pluginOrder = dbPluginConfig.pluginOrder ?? 0;
|
||||
// });
|
||||
// }
|
||||
// });
|
||||
|
||||
const formatTools = tools.map<SystemPluginTemplateItemType>((item) => {
|
||||
const dbPluginConfig = systemPlugins.get(item.id);
|
||||
const dbPluginConfig = systemTools.get(item.id);
|
||||
const isFolder = tools.some((tool) => tool.parentId === item.id);
|
||||
|
||||
const versionList = (item.versionList as SystemPluginTemplateItemType['versionList']) || [];
|
||||
|
||||
return {
|
||||
id: item.id,
|
||||
parentId: item.parentId,
|
||||
isFolder: tools.some((tool) => tool.parentId === item.id),
|
||||
|
||||
isFolder,
|
||||
name: item.name,
|
||||
avatar: item.avatar,
|
||||
intro: item.description,
|
||||
|
||||
author: item.author,
|
||||
courseUrl: item.courseUrl,
|
||||
weight: item.weight,
|
||||
|
||||
workflow: {
|
||||
nodes: [],
|
||||
edges: []
|
||||
},
|
||||
versionList,
|
||||
|
||||
templateType: item.templateType,
|
||||
showStatus: true,
|
||||
|
||||
isActive: item.isActive,
|
||||
isActive: dbPluginConfig?.isActive ?? item.isActive ?? true,
|
||||
inputList: item?.secretInputConfig,
|
||||
hasSystemSecret: !!dbPluginConfig?.inputListVal
|
||||
hasSystemSecret: !!dbPluginConfig?.inputListVal,
|
||||
|
||||
originCost: dbPluginConfig?.originCost ?? 0,
|
||||
currentCost: dbPluginConfig?.currentCost ?? 0,
|
||||
systemKeyCost: dbPluginConfig?.systemKeyCost ?? 0,
|
||||
hasTokenFee: dbPluginConfig?.hasTokenFee ?? false,
|
||||
pluginOrder: dbPluginConfig?.pluginOrder
|
||||
};
|
||||
});
|
||||
|
||||
// TODO: Check the app exists
|
||||
const dbPlugins = systemPluginsArray
|
||||
const dbPlugins = systemToolsArray
|
||||
.filter((item) => item.customConfig?.associatedPluginId)
|
||||
.map((item) => dbPluginFormat(item));
|
||||
|
||||
|
@@ -27,6 +27,10 @@ const SystemPluginSchema = new Schema({
|
||||
pluginOrder: {
|
||||
type: Number
|
||||
},
|
||||
systemKeyCost: {
|
||||
type: Number,
|
||||
default: 0
|
||||
},
|
||||
customConfig: Object,
|
||||
inputListVal: Object,
|
||||
|
||||
|
2
packages/service/core/app/plugin/type.d.ts
vendored
2
packages/service/core/app/plugin/type.d.ts
vendored
@@ -1,6 +1,7 @@
|
||||
import { SystemPluginListItemType } from '@fastgpt/global/core/app/type';
|
||||
import { FlowNodeTemplateTypeEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import type { WorkflowTemplateBasicType } from '@fastgpt/global/core/workflow/type';
|
||||
import type { InputConfigType } from '@fastgpt/global/core/workflow/type/io';
|
||||
|
||||
export type SystemPluginConfigSchemaType = {
|
||||
pluginId: string;
|
||||
@@ -10,6 +11,7 @@ export type SystemPluginConfigSchemaType = {
|
||||
hasTokenFee: boolean;
|
||||
isActive: boolean;
|
||||
pluginOrder?: number;
|
||||
systemKeyCost?: number;
|
||||
|
||||
customConfig?: {
|
||||
name: string;
|
||||
|
@@ -82,8 +82,10 @@ export async function rewriteAppWorkflowToDetail({
|
||||
node.version = preview.version;
|
||||
|
||||
node.currentCost = preview.currentCost;
|
||||
node.systemKeyCost = preview.systemKeyCost;
|
||||
node.hasTokenFee = preview.hasTokenFee;
|
||||
node.hasSystemSecret = preview.hasSystemSecret;
|
||||
node.isFolder = preview.isFolder;
|
||||
|
||||
node.toolConfig = preview.toolConfig;
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { connectionMongo, getMongoModel } from '../../common/mongo';
|
||||
const { Schema } = connectionMongo;
|
||||
import { type ChatSchema as ChatType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatSourceEnum, ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
|
@@ -4,6 +4,7 @@ import { addLog } from '../../common/system/log';
|
||||
import { delFileByFileIdList, getGFSCollection } from '../../common/file/gridfs/controller';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { MongoChat } from './chatSchema';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export async function getChatItems({
|
||||
appId,
|
||||
@@ -72,7 +73,8 @@ export const deleteChatFiles = async ({
|
||||
chatIdList?: string[];
|
||||
appId?: string;
|
||||
}) => {
|
||||
if (!appId && !chatIdList) return Promise.reject('appId or chatIdList is required');
|
||||
if (!appId && !chatIdList)
|
||||
return Promise.reject(new UserError('appId or chatIdList is required'));
|
||||
|
||||
const appChatIdList = await (async () => {
|
||||
if (appId) {
|
||||
|
@@ -14,6 +14,7 @@ import { pushChatLog } from './pushChatLog';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { MongoAppChatLog } from '../app/logs/chatLogsSchema';
|
||||
|
||||
type Props = {
|
||||
chatId: string;
|
||||
@@ -163,6 +164,62 @@ export async function saveChat({
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
const userId = outLinkUid || tmbId;
|
||||
const now = new Date();
|
||||
const fifteenMinutesAgo = new Date(now.getTime() - 15 * 60 * 1000);
|
||||
|
||||
const aiResponse = processedContent.find((item) => item.obj === ChatRoleEnum.AI);
|
||||
const errorCount = aiResponse?.responseData?.some((item) => item.errorText) ? 1 : 0;
|
||||
const totalPoints =
|
||||
aiResponse?.responseData?.reduce(
|
||||
(sum: number, item: any) => sum + (item.totalPoints || 0),
|
||||
0
|
||||
) || 0;
|
||||
|
||||
const hasHistoryChat = await MongoAppChatLog.exists({
|
||||
appId,
|
||||
userId,
|
||||
createTime: { $lt: now }
|
||||
});
|
||||
|
||||
await MongoAppChatLog.updateOne(
|
||||
{
|
||||
chatId,
|
||||
appId,
|
||||
updateTime: { $gte: fifteenMinutesAgo }
|
||||
},
|
||||
{
|
||||
$inc: {
|
||||
chatItemCount: 1,
|
||||
errorCount,
|
||||
totalPoints,
|
||||
totalResponseTime: durationSeconds
|
||||
},
|
||||
$set: {
|
||||
updateTime: now,
|
||||
sourceName
|
||||
},
|
||||
$setOnInsert: {
|
||||
appId,
|
||||
teamId,
|
||||
chatId,
|
||||
userId,
|
||||
source,
|
||||
createTime: now,
|
||||
goodFeedbackCount: 0,
|
||||
badFeedbackCount: 0,
|
||||
isFirstChat: !hasHistoryChat
|
||||
}
|
||||
},
|
||||
{
|
||||
upsert: true
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
addLog.error('update chat log error', error);
|
||||
}
|
||||
|
||||
if (isUpdateUseTime) {
|
||||
await MongoApp.findByIdAndUpdate(appId, {
|
||||
updateTime: new Date()
|
||||
|
37
packages/service/core/chat/setting/schema.ts
Normal file
37
packages/service/core/chat/setting/schema.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { connectionMongo, getMongoModel } from '../../../common/mongo';
|
||||
import { type ChatSettingSchema as ChatSettingType } from '@fastgpt/global/core/chat/setting/type';
|
||||
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
|
||||
import { AppCollectionName } from '../../app/schema';
|
||||
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
export const ChatSettingCollectionName = 'chat_settings';
|
||||
|
||||
const ChatSettingSchema = new Schema({
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: TeamCollectionName,
|
||||
required: true
|
||||
},
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: AppCollectionName,
|
||||
required: true
|
||||
},
|
||||
slogan: String,
|
||||
dialogTips: String,
|
||||
selectedTools: {
|
||||
type: Array,
|
||||
default: []
|
||||
},
|
||||
homeTabTitle: String,
|
||||
wideLogoUrl: String,
|
||||
squareLogoUrl: String
|
||||
});
|
||||
|
||||
ChatSettingSchema.index({ teamId: 1 });
|
||||
|
||||
export const MongoChatSetting = getMongoModel<ChatSettingType>(
|
||||
ChatSettingCollectionName,
|
||||
ChatSettingSchema
|
||||
);
|
@@ -14,6 +14,7 @@ import { MongoDatasetCollectionTags } from './tag/schema';
|
||||
import { removeDatasetSyncJobScheduler } from './datasetSync';
|
||||
import { mongoSessionRun } from '../../common/mongo/sessionRun';
|
||||
import { removeImageByPath } from '../../common/file/image/controller';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
/* ============= dataset ========== */
|
||||
/* find all datasetId by top datasetId */
|
||||
@@ -50,7 +51,7 @@ export async function findDatasetAndAllChildren({
|
||||
]);
|
||||
|
||||
if (!dataset) {
|
||||
return Promise.reject('Dataset not found');
|
||||
return Promise.reject(new UserError('Dataset not found'));
|
||||
}
|
||||
|
||||
return [dataset, ...childDatasets];
|
||||
@@ -79,7 +80,7 @@ export async function delDatasetRelevantData({
|
||||
const teamId = datasets[0].teamId;
|
||||
|
||||
if (!teamId) {
|
||||
return Promise.reject('TeamId is required');
|
||||
return Promise.reject(new UserError('TeamId is required'));
|
||||
}
|
||||
|
||||
const datasetIds = datasets.map((item) => item._id);
|
||||
|
@@ -16,6 +16,7 @@ import { text2Chunks } from '../../worker/function';
|
||||
import { addLog } from '../../common/system/log';
|
||||
import { retryFn } from '@fastgpt/global/common/system/utils';
|
||||
import { getFileMaxSize } from '../../common/file/utils';
|
||||
import { UserError } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export const readFileRawTextByUrl = async ({
|
||||
teamId,
|
||||
@@ -200,7 +201,7 @@ export const readDatasetSourceRawText = async ({
|
||||
rawText: content
|
||||
};
|
||||
} else if (type === DatasetSourceReadTypeEnum.externalFile) {
|
||||
if (!externalFileId) return Promise.reject('FileId not found');
|
||||
if (!externalFileId) return Promise.reject(new UserError('FileId not found'));
|
||||
const rawText = await readFileRawTextByUrl({
|
||||
teamId,
|
||||
tmbId,
|
||||
|
@@ -7,6 +7,10 @@ import { recallFromVectorStore } from '../../../common/vectorDB/controller';
|
||||
import { getVectorsByText } from '../../ai/embedding';
|
||||
import { getEmbeddingModel, getDefaultRerankModel, getLLMModel } from '../../ai/model';
|
||||
import { MongoDatasetData } from '../data/schema';
|
||||
import type {
|
||||
DatasetCollectionSchemaType,
|
||||
DatasetDataSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import {
|
||||
type DatasetDataTextSchemaType,
|
||||
type SearchDataResponseItemType
|
||||
@@ -27,7 +31,6 @@ import { type ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { datasetSearchQueryExtension } from './utils';
|
||||
import type { RerankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { formatDatasetDataValue } from '../data/controller';
|
||||
|
||||
export type SearchDatasetDataProps = {
|
||||
@@ -435,214 +438,114 @@ export async function searchDatasetData(
|
||||
} catch (error) {}
|
||||
};
|
||||
const embeddingRecall = async ({
|
||||
query,
|
||||
queries,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList
|
||||
}: {
|
||||
query: string;
|
||||
queries: string[];
|
||||
limit: number;
|
||||
forbidCollectionIdList: string[];
|
||||
filterCollectionIdList?: string[];
|
||||
}) => {
|
||||
}): Promise<{
|
||||
embeddingRecallResults: SearchDataResponseItemType[][];
|
||||
tokens: number;
|
||||
}> => {
|
||||
if (limit === 0) {
|
||||
return {
|
||||
embeddingRecallResults: [],
|
||||
tokens: 0
|
||||
};
|
||||
}
|
||||
|
||||
const { vectors, tokens } = await getVectorsByText({
|
||||
model: getEmbeddingModel(model),
|
||||
input: query,
|
||||
input: queries,
|
||||
type: 'query'
|
||||
});
|
||||
|
||||
const { results } = await recallFromVectorStore({
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector: vectors[0],
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList
|
||||
});
|
||||
const recallResults = await Promise.all(
|
||||
vectors.map(async (vector) => {
|
||||
return await recallFromVectorStore({
|
||||
teamId,
|
||||
datasetIds,
|
||||
vector,
|
||||
limit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Get data and collections
|
||||
const collectionIdList = Array.from(new Set(results.map((item) => item.collectionId)));
|
||||
const [dataList, collections] = await Promise.all([
|
||||
const collectionIdList = Array.from(
|
||||
new Set(recallResults.map((item) => item.results.map((item) => item.collectionId)).flat())
|
||||
);
|
||||
const indexDataIds = Array.from(
|
||||
new Set(recallResults.map((item) => item.results.map((item) => item.id?.trim())).flat())
|
||||
);
|
||||
|
||||
const [dataMaps, collectionMaps] = await Promise.all([
|
||||
MongoDatasetData.find(
|
||||
{
|
||||
teamId,
|
||||
datasetId: { $in: datasetIds },
|
||||
collectionId: { $in: collectionIdList },
|
||||
'indexes.dataId': { $in: results.map((item) => item.id?.trim()) }
|
||||
'indexes.dataId': { $in: indexDataIds }
|
||||
},
|
||||
datasetDataSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean(),
|
||||
)
|
||||
.lean()
|
||||
.then((res) => {
|
||||
const map = new Map<string, DatasetDataSchemaType>();
|
||||
|
||||
res.forEach((item) => {
|
||||
item.indexes.forEach((index) => {
|
||||
map.set(String(index.dataId), item);
|
||||
});
|
||||
});
|
||||
|
||||
return map;
|
||||
}),
|
||||
MongoDatasetCollection.find(
|
||||
{
|
||||
_id: { $in: collectionIdList }
|
||||
},
|
||||
datsaetCollectionSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean()
|
||||
)
|
||||
.lean()
|
||||
.then((res) => {
|
||||
const map = new Map<string, DatasetCollectionSchemaType>();
|
||||
|
||||
res.forEach((item) => {
|
||||
map.set(String(item._id), item);
|
||||
});
|
||||
|
||||
return map;
|
||||
})
|
||||
]);
|
||||
|
||||
const set = new Set<string>();
|
||||
const formatResult = results
|
||||
.map((item, index) => {
|
||||
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
|
||||
if (!collection) {
|
||||
console.log('Collection is not found', item);
|
||||
return;
|
||||
}
|
||||
const data = dataList.find((data) =>
|
||||
data.indexes.some((index) => index.dataId === item.id)
|
||||
);
|
||||
if (!data) {
|
||||
console.log('Data is not found', item);
|
||||
return;
|
||||
}
|
||||
|
||||
const result: SearchDataResponseItemType = {
|
||||
id: String(data._id),
|
||||
updateTime: data.updateTime,
|
||||
...formatDatasetDataValue({
|
||||
teamId,
|
||||
datasetId: data.datasetId,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
imageId: data.imageId,
|
||||
imageDescMap: data.imageDescMap
|
||||
}),
|
||||
chunkIndex: data.chunkIndex,
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
...getCollectionSourceData(collection),
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: item?.score || 0, index }]
|
||||
};
|
||||
|
||||
return result;
|
||||
})
|
||||
.filter((item) => {
|
||||
if (!item) return false;
|
||||
if (set.has(item.id)) return false;
|
||||
set.add(item.id);
|
||||
return true;
|
||||
})
|
||||
.map((item, index) => {
|
||||
if (!item) return;
|
||||
return {
|
||||
...item,
|
||||
score: item.score.map((item) => ({ ...item, index }))
|
||||
};
|
||||
}) as SearchDataResponseItemType[];
|
||||
|
||||
return {
|
||||
embeddingRecallResults: formatResult,
|
||||
tokens
|
||||
};
|
||||
};
|
||||
const fullTextRecall = async ({
|
||||
query,
|
||||
limit,
|
||||
filterCollectionIdList,
|
||||
forbidCollectionIdList
|
||||
}: {
|
||||
query: string;
|
||||
limit: number;
|
||||
filterCollectionIdList?: string[];
|
||||
forbidCollectionIdList: string[];
|
||||
}): Promise<{
|
||||
fullTextRecallResults: SearchDataResponseItemType[];
|
||||
tokenLen: number;
|
||||
}> => {
|
||||
if (limit === 0) {
|
||||
return {
|
||||
fullTextRecallResults: [],
|
||||
tokenLen: 0
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const searchResults = (await MongoDatasetDataText.aggregate(
|
||||
[
|
||||
{
|
||||
$match: {
|
||||
teamId: new Types.ObjectId(teamId),
|
||||
$text: { $search: await jiebaSplit({ text: query }) },
|
||||
datasetId: { $in: datasetIds.map((id) => new Types.ObjectId(id)) },
|
||||
...(filterCollectionIdList
|
||||
? {
|
||||
collectionId: {
|
||||
$in: filterCollectionIdList
|
||||
.filter((id) => !forbidCollectionIdList.includes(id))
|
||||
.map((id) => new Types.ObjectId(id))
|
||||
}
|
||||
}
|
||||
: forbidCollectionIdList?.length
|
||||
? {
|
||||
collectionId: {
|
||||
$nin: forbidCollectionIdList.map((id) => new Types.ObjectId(id))
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
score: { $meta: 'textScore' }
|
||||
}
|
||||
},
|
||||
{
|
||||
$limit: limit
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
collectionId: 1,
|
||||
dataId: 1,
|
||||
score: { $meta: 'textScore' }
|
||||
}
|
||||
}
|
||||
],
|
||||
{
|
||||
...readFromSecondary
|
||||
}
|
||||
)) as (DatasetDataTextSchemaType & { score: number })[];
|
||||
|
||||
// Get data and collections
|
||||
const [dataList, collections] = await Promise.all([
|
||||
MongoDatasetData.find(
|
||||
{
|
||||
_id: { $in: searchResults.map((item) => item.dataId) }
|
||||
},
|
||||
datasetDataSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean(),
|
||||
MongoDatasetCollection.find(
|
||||
{
|
||||
_id: { $in: searchResults.map((item) => item.collectionId) }
|
||||
},
|
||||
datsaetCollectionSelectField,
|
||||
{ ...readFromSecondary }
|
||||
).lean()
|
||||
]);
|
||||
|
||||
return {
|
||||
fullTextRecallResults: searchResults
|
||||
const embeddingRecallResults = recallResults.map((item) => {
|
||||
const set = new Set<string>();
|
||||
return (
|
||||
item.results
|
||||
.map((item, index) => {
|
||||
const collection = collections.find(
|
||||
(col) => String(col._id) === String(item.collectionId)
|
||||
);
|
||||
const collection = collectionMaps.get(String(item.collectionId));
|
||||
if (!collection) {
|
||||
console.log('Collection is not found', item);
|
||||
return;
|
||||
}
|
||||
const data = dataList.find((data) => String(data._id) === String(item.dataId));
|
||||
|
||||
const data = dataMaps.get(String(item.id));
|
||||
if (!data) {
|
||||
console.log('Data is not found', item);
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
const result: SearchDataResponseItemType = {
|
||||
id: String(data._id),
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
updateTime: data.updateTime,
|
||||
...formatDatasetDataValue({
|
||||
teamId,
|
||||
@@ -653,37 +556,204 @@ export async function searchDatasetData(
|
||||
imageDescMap: data.imageDescMap
|
||||
}),
|
||||
chunkIndex: data.chunkIndex,
|
||||
indexes: data.indexes,
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
...getCollectionSourceData(collection),
|
||||
score: [
|
||||
{
|
||||
type: SearchScoreTypeEnum.fullText,
|
||||
value: item.score || 0,
|
||||
index
|
||||
}
|
||||
]
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: item?.score || 0, index }]
|
||||
};
|
||||
|
||||
return result;
|
||||
})
|
||||
// 多个向量对应一个数据,每一路召回,保障数据只有一份,并且取最高排名
|
||||
.filter((item) => {
|
||||
if (!item) return false;
|
||||
if (set.has(item.id)) return false;
|
||||
set.add(item.id);
|
||||
return true;
|
||||
})
|
||||
.map((item, index) => {
|
||||
if (!item) return;
|
||||
return {
|
||||
...item,
|
||||
score: item.score.map((item) => ({ ...item, index }))
|
||||
...item!,
|
||||
score: item!.score.map((item) => ({ ...item, index }))
|
||||
};
|
||||
}) as SearchDataResponseItemType[],
|
||||
tokenLen: 0
|
||||
};
|
||||
} catch (error) {
|
||||
addLog.error('Full text search error', error);
|
||||
}) as SearchDataResponseItemType[]
|
||||
);
|
||||
});
|
||||
|
||||
return {
|
||||
embeddingRecallResults,
|
||||
tokens
|
||||
};
|
||||
};
|
||||
const fullTextRecall = async ({
|
||||
queries,
|
||||
limit,
|
||||
filterCollectionIdList,
|
||||
forbidCollectionIdList
|
||||
}: {
|
||||
queries: string[];
|
||||
limit: number;
|
||||
filterCollectionIdList?: string[];
|
||||
forbidCollectionIdList: string[];
|
||||
}): Promise<{
|
||||
fullTextRecallResults: SearchDataResponseItemType[][];
|
||||
}> => {
|
||||
if (limit === 0) {
|
||||
return {
|
||||
fullTextRecallResults: [],
|
||||
tokenLen: 0
|
||||
fullTextRecallResults: []
|
||||
};
|
||||
}
|
||||
|
||||
const recallResults = await Promise.all(
|
||||
queries.map(async (query) => {
|
||||
return (await MongoDatasetDataText.aggregate(
|
||||
[
|
||||
{
|
||||
$match: {
|
||||
teamId: new Types.ObjectId(teamId),
|
||||
$text: { $search: await jiebaSplit({ text: query }) },
|
||||
datasetId: { $in: datasetIds.map((id) => new Types.ObjectId(id)) },
|
||||
...(filterCollectionIdList
|
||||
? {
|
||||
collectionId: {
|
||||
$in: filterCollectionIdList
|
||||
.filter((id) => !forbidCollectionIdList.includes(id))
|
||||
.map((id) => new Types.ObjectId(id))
|
||||
}
|
||||
}
|
||||
: forbidCollectionIdList?.length
|
||||
? {
|
||||
collectionId: {
|
||||
$nin: forbidCollectionIdList.map((id) => new Types.ObjectId(id))
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
score: { $meta: 'textScore' }
|
||||
}
|
||||
},
|
||||
{
|
||||
$limit: limit
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 1,
|
||||
collectionId: 1,
|
||||
dataId: 1,
|
||||
score: { $meta: 'textScore' }
|
||||
}
|
||||
}
|
||||
],
|
||||
{
|
||||
...readFromSecondary
|
||||
}
|
||||
)) as (DatasetDataTextSchemaType & { score: number })[];
|
||||
})
|
||||
);
|
||||
|
||||
const dataIds = Array.from(
|
||||
new Set(recallResults.map((item) => item.map((item) => item.dataId)).flat())
|
||||
);
|
||||
const collectionIds = Array.from(
|
||||
new Set(recallResults.map((item) => item.map((item) => item.collectionId)).flat())
|
||||
);
|
||||
|
||||
// Get data and collections
|
||||
const [dataMaps, collectionMaps] = await Promise.all([
|
||||
MongoDatasetData.find(
|
||||
{
|
||||
_id: { $in: dataIds }
|
||||
},
|
||||
datasetDataSelectField,
|
||||
{ ...readFromSecondary }
|
||||
)
|
||||
.lean()
|
||||
.then((res) => {
|
||||
const map = new Map<string, DatasetDataSchemaType>();
|
||||
|
||||
res.forEach((item) => {
|
||||
map.set(String(item._id), item);
|
||||
});
|
||||
|
||||
return map;
|
||||
}),
|
||||
MongoDatasetCollection.find(
|
||||
{
|
||||
_id: { $in: collectionIds }
|
||||
},
|
||||
datsaetCollectionSelectField,
|
||||
{ ...readFromSecondary }
|
||||
)
|
||||
.lean()
|
||||
.then((res) => {
|
||||
const map = new Map<string, DatasetCollectionSchemaType>();
|
||||
|
||||
res.forEach((item) => {
|
||||
map.set(String(item._id), item);
|
||||
});
|
||||
|
||||
return map;
|
||||
})
|
||||
]);
|
||||
|
||||
const fullTextRecallResults = recallResults.map((item) => {
|
||||
return item
|
||||
.map((item, index) => {
|
||||
const collection = collectionMaps.get(String(item.collectionId));
|
||||
if (!collection) {
|
||||
console.log('Collection is not found', item);
|
||||
return;
|
||||
}
|
||||
|
||||
const data = dataMaps.get(String(item.dataId));
|
||||
if (!data) {
|
||||
console.log('Data is not found', item);
|
||||
return;
|
||||
}
|
||||
|
||||
return {
|
||||
id: String(data._id),
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
updateTime: data.updateTime,
|
||||
...formatDatasetDataValue({
|
||||
teamId,
|
||||
datasetId: data.datasetId,
|
||||
q: data.q,
|
||||
a: data.a,
|
||||
imageId: data.imageId,
|
||||
imageDescMap: data.imageDescMap
|
||||
}),
|
||||
chunkIndex: data.chunkIndex,
|
||||
indexes: data.indexes,
|
||||
...getCollectionSourceData(collection),
|
||||
score: [
|
||||
{
|
||||
type: SearchScoreTypeEnum.fullText,
|
||||
value: item.score || 0,
|
||||
index
|
||||
}
|
||||
]
|
||||
};
|
||||
})
|
||||
.filter((item) => {
|
||||
if (!item) return false;
|
||||
return true;
|
||||
})
|
||||
.map((item, index) => {
|
||||
return {
|
||||
...item,
|
||||
score: item!.score.map((item) => ({ ...item, index }))
|
||||
};
|
||||
}) as SearchDataResponseItemType[];
|
||||
});
|
||||
|
||||
return {
|
||||
fullTextRecallResults
|
||||
};
|
||||
};
|
||||
const multiQueryRecall = async ({
|
||||
embeddingLimit,
|
||||
@@ -692,50 +762,36 @@ export async function searchDatasetData(
|
||||
embeddingLimit: number;
|
||||
fullTextLimit: number;
|
||||
}) => {
|
||||
// multi query recall
|
||||
const embeddingRecallResList: SearchDataResponseItemType[][] = [];
|
||||
const fullTextRecallResList: SearchDataResponseItemType[][] = [];
|
||||
let totalTokens = 0;
|
||||
|
||||
const [{ forbidCollectionIdList }, filterCollectionIdList] = await Promise.all([
|
||||
getForbidData(),
|
||||
filterCollectionByMetadata()
|
||||
]);
|
||||
|
||||
await Promise.all(
|
||||
queries.map(async (query) => {
|
||||
const [{ tokens, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
|
||||
embeddingRecall({
|
||||
query,
|
||||
limit: embeddingLimit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList
|
||||
}),
|
||||
// FullText tmp
|
||||
fullTextRecall({
|
||||
query,
|
||||
limit: fullTextLimit,
|
||||
filterCollectionIdList,
|
||||
forbidCollectionIdList
|
||||
})
|
||||
]);
|
||||
totalTokens += tokens;
|
||||
|
||||
embeddingRecallResList.push(embeddingRecallResults);
|
||||
fullTextRecallResList.push(fullTextRecallResults);
|
||||
const [{ tokens, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
|
||||
embeddingRecall({
|
||||
queries,
|
||||
limit: embeddingLimit,
|
||||
forbidCollectionIdList,
|
||||
filterCollectionIdList
|
||||
}),
|
||||
fullTextRecall({
|
||||
queries,
|
||||
limit: fullTextLimit,
|
||||
filterCollectionIdList,
|
||||
forbidCollectionIdList
|
||||
})
|
||||
);
|
||||
]);
|
||||
|
||||
// rrf concat
|
||||
const rrfEmbRecall = datasetSearchResultConcat(
|
||||
embeddingRecallResList.map((list) => ({ k: 60, list }))
|
||||
embeddingRecallResults.map((list) => ({ k: 60, list }))
|
||||
).slice(0, embeddingLimit);
|
||||
const rrfFTRecall = datasetSearchResultConcat(
|
||||
fullTextRecallResList.map((list) => ({ k: 60, list }))
|
||||
fullTextRecallResults.map((list) => ({ k: 60, list }))
|
||||
).slice(0, fullTextLimit);
|
||||
|
||||
return {
|
||||
tokens: totalTokens,
|
||||
tokens,
|
||||
embeddingRecallResults: rrfEmbRecall,
|
||||
fullTextRecallResults: rrfFTRecall
|
||||
};
|
||||
|
@@ -53,7 +53,7 @@ export async function pushDataListToTrainingQueue({
|
||||
const { model, maxToken, weight } = await (async () => {
|
||||
if (mode === TrainingModeEnum.chunk) {
|
||||
return {
|
||||
maxToken: getLLMMaxChunkSize(agentModelData),
|
||||
maxToken: Infinity,
|
||||
model: vectorModelData.model,
|
||||
weight: vectorModelData.weight
|
||||
};
|
||||
|
@@ -21,6 +21,7 @@ import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { getAppVersionById } from '../../../app/version/controller';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
import { getUserChatInfoAndAuthTeamPoints } from '../../../../support/permission/auth/team';
|
||||
import { getRunningUserInfoByTmbId } from '../../../../support/user/team/utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
@@ -147,6 +148,7 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
tmbId: String(appData.tmbId),
|
||||
isChildApp: true
|
||||
},
|
||||
runningUserInfo: await getRunningUserInfoByTmbId(appData.tmbId),
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
histories: chatHistories,
|
||||
|
@@ -90,6 +90,10 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
|
||||
systemVar: {
|
||||
user: {
|
||||
id: variables.userId,
|
||||
username: runningUserInfo.username,
|
||||
contact: runningUserInfo.contact,
|
||||
membername: runningUserInfo.memberName,
|
||||
teamName: runningUserInfo.teamName,
|
||||
teamId: runningUserInfo.teamId,
|
||||
name: runningUserInfo.tmbId
|
||||
},
|
||||
@@ -150,7 +154,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
|
||||
if (params.system_input_config?.type !== SystemToolInputTypeEnum.system) {
|
||||
return 0;
|
||||
}
|
||||
return tool.currentCost ?? 0;
|
||||
return (tool.systemKeyCost ?? 0) + (tool.currentCost ?? 0);
|
||||
})();
|
||||
|
||||
pushTrack.runSystemTool({
|
||||
|
@@ -152,7 +152,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
} = data;
|
||||
const startTime = Date.now();
|
||||
|
||||
await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges });
|
||||
await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges, lang: data.lang });
|
||||
|
||||
// 初始化深度和自动增加深度,避免无限嵌套
|
||||
if (!props.workflowDispatchDeep) {
|
||||
|
@@ -20,6 +20,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { MongoApp } from '../../../core/app/schema';
|
||||
import { getMCPChildren } from '../../../core/app/mcp';
|
||||
import { getSystemToolRunTimeNodeFromSystemToolset } from '../utils';
|
||||
import type { localeType } from '@fastgpt/global/common/i18n/type';
|
||||
|
||||
export const getWorkflowResponseWrite = ({
|
||||
res,
|
||||
@@ -161,10 +162,12 @@ export const formatHttpError = (error: any) => {
|
||||
*/
|
||||
export const rewriteRuntimeWorkFlow = async ({
|
||||
nodes,
|
||||
edges
|
||||
edges,
|
||||
lang
|
||||
}: {
|
||||
nodes: RuntimeNodeItemType[];
|
||||
edges: RuntimeEdgeItemType[];
|
||||
lang?: localeType;
|
||||
}) => {
|
||||
const toolSetNodes = nodes.filter((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet);
|
||||
|
||||
@@ -195,7 +198,8 @@ export const rewriteRuntimeWorkFlow = async ({
|
||||
// systemTool
|
||||
if (systemToolId) {
|
||||
const children = await getSystemToolRunTimeNodeFromSystemToolset({
|
||||
toolSetNode
|
||||
toolSetNode,
|
||||
lang
|
||||
});
|
||||
children.forEach((node) => {
|
||||
nodes.push(node);
|
||||
|
@@ -6,6 +6,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { parseI18nString } from '@fastgpt/global/common/i18n/utils';
|
||||
import type { localeType } from '@fastgpt/global/common/i18n/type';
|
||||
|
||||
/* filter search result */
|
||||
export const filterSearchResultsByMaxChars = async (
|
||||
@@ -31,9 +32,11 @@ export const filterSearchResultsByMaxChars = async (
|
||||
};
|
||||
|
||||
export async function getSystemToolRunTimeNodeFromSystemToolset({
|
||||
toolSetNode
|
||||
toolSetNode,
|
||||
lang = 'en'
|
||||
}: {
|
||||
toolSetNode: RuntimeNodeItemType;
|
||||
lang?: localeType;
|
||||
}): Promise<RuntimeNodeItemType[]> {
|
||||
const systemToolId = toolSetNode.toolConfig?.systemToolSet?.toolId!;
|
||||
|
||||
@@ -41,13 +44,14 @@ export async function getSystemToolRunTimeNodeFromSystemToolset({
|
||||
(item) => item.key === NodeInputKeyEnum.systemInputConfig
|
||||
);
|
||||
const tools = await getSystemTools();
|
||||
const children = tools.filter((item) => item.parentId === systemToolId);
|
||||
|
||||
const children = tools.filter(
|
||||
(item) => item.parentId === systemToolId && item.isActive !== false
|
||||
);
|
||||
const nodes = await Promise.all(
|
||||
children.map(async (child) => {
|
||||
const toolListItem = toolSetNode.toolConfig?.systemToolSet?.toolList.find(
|
||||
(item) => item.toolId === child.id
|
||||
)!;
|
||||
);
|
||||
|
||||
const tool = await getSystemPluginByIdAndVersionId(child.id);
|
||||
|
||||
@@ -63,8 +67,8 @@ export async function getSystemToolRunTimeNodeFromSystemToolset({
|
||||
...tool,
|
||||
inputs,
|
||||
outputs: tool.outputs ?? [],
|
||||
name: toolListItem.name ?? parseI18nString(tool.name, 'en'),
|
||||
intro: toolListItem.description ?? parseI18nString(tool.intro, 'en'),
|
||||
name: toolListItem?.name || parseI18nString(tool.name, lang),
|
||||
intro: toolListItem?.description || parseI18nString(tool.intro, lang),
|
||||
flowNodeType: FlowNodeTypeEnum.tool,
|
||||
nodeId: getNanoid(),
|
||||
toolConfig: {
|
||||
|
@@ -3,7 +3,7 @@
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@fastgpt-sdk/plugin": "^0.1.7",
|
||||
"@fastgpt-sdk/plugin": "^0.1.8",
|
||||
"@fastgpt/global": "workspace:*",
|
||||
"@modelcontextprotocol/sdk": "^1.12.1",
|
||||
"@node-rs/jieba": "2.0.1",
|
||||
@@ -15,7 +15,7 @@
|
||||
"@opentelemetry/winston-transport": "^0.14.0",
|
||||
"@vercel/otel": "^1.13.0",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"@zilliz/milvus2-sdk-node": "2.4.2",
|
||||
"@zilliz/milvus2-sdk-node": "2.4.10",
|
||||
"axios": "^1.8.2",
|
||||
"bullmq": "^5.52.2",
|
||||
"chalk": "^5.3.0",
|
||||
|
@@ -2,17 +2,22 @@
|
||||
import { MongoApp } from '../../../core/app/schema';
|
||||
import { type AppDetailType } from '@fastgpt/global/core/app/type.d';
|
||||
import { parseHeaderCert } from '../controller';
|
||||
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import {
|
||||
PerResourceTypeEnum,
|
||||
ReadPermissionVal,
|
||||
ReadRoleVal
|
||||
} from '@fastgpt/global/support/permission/constant';
|
||||
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
import { getResourcePermission } from '../controller';
|
||||
import { AppPermission } from '@fastgpt/global/support/permission/app/controller';
|
||||
import { type PermissionValueType } from '@fastgpt/global/support/permission/type';
|
||||
import { AppFolderTypeList } from '@fastgpt/global/core/app/constants';
|
||||
import { AppFolderTypeList, AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
|
||||
import { type AuthModeType, type AuthResponseType } from '../type';
|
||||
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
|
||||
import { AppReadChatLogPerVal } from '@fastgpt/global/support/permission/app/constant';
|
||||
|
||||
export const authPluginByTmbId = async ({
|
||||
tmbId,
|
||||
@@ -68,6 +73,21 @@ export const authAppByTmbId = async ({
|
||||
return Promise.reject(AppErrEnum.unAuthApp);
|
||||
}
|
||||
|
||||
if (app.type === AppTypeEnum.hidden) {
|
||||
if (per === AppReadChatLogPerVal) {
|
||||
if (!tmbPer.hasManagePer) {
|
||||
return Promise.reject(AppErrEnum.unAuthApp);
|
||||
}
|
||||
} else if (per !== ReadPermissionVal) {
|
||||
return Promise.reject(AppErrEnum.unAuthApp);
|
||||
}
|
||||
|
||||
return {
|
||||
...app,
|
||||
permission: new AppPermission({ isOwner: false, role: ReadRoleVal })
|
||||
};
|
||||
}
|
||||
|
||||
const isOwner = tmbPer.isOwner || String(app.tmbId) === String(tmbId);
|
||||
|
||||
const { Per } = await (async () => {
|
||||
@@ -134,7 +154,7 @@ export const authApp = async ({
|
||||
appId: ParentIdType;
|
||||
per: PermissionValueType;
|
||||
}): Promise<
|
||||
AuthResponseType & {
|
||||
AuthResponseType<AppPermission> & {
|
||||
app: AppDetailType;
|
||||
}
|
||||
> => {
|
||||
|
35
packages/service/support/user/team/utils.ts
Normal file
35
packages/service/support/user/team/utils.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { MongoTeamMember } from '../../user/team/teamMemberSchema';
|
||||
import { type UserModelSchema } from '@fastgpt/global/support/user/type';
|
||||
import { type TeamSchema } from '@fastgpt/global/support/user/team/type';
|
||||
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
|
||||
|
||||
// TODO: 数据库优化
|
||||
export async function getRunningUserInfoByTmbId(tmbId: string) {
|
||||
if (tmbId) {
|
||||
const tmb = await MongoTeamMember.findById(tmbId, 'teamId name userId') // team_members name is the user's name
|
||||
.populate<{ team: TeamSchema; user: UserModelSchema }>([
|
||||
{
|
||||
path: 'team',
|
||||
select: 'name'
|
||||
},
|
||||
{
|
||||
path: 'user',
|
||||
select: 'username contact'
|
||||
}
|
||||
])
|
||||
.lean();
|
||||
|
||||
if (!tmb) return Promise.reject(TeamErrEnum.notUser);
|
||||
|
||||
return {
|
||||
username: tmb.user.username,
|
||||
teamName: tmb.team.name,
|
||||
memberName: tmb.name,
|
||||
contact: tmb.user.contact || '',
|
||||
teamId: tmb.teamId,
|
||||
tmbId: tmb._id
|
||||
};
|
||||
}
|
||||
|
||||
return Promise.reject(TeamErrEnum.notUser);
|
||||
}
|
Reference in New Issue
Block a user