* move file

* perf: dataset file manage

* v441 description

* fix: qa csv update file

* feat: rename file

* frontend show system-version
This commit is contained in:
Archer
2023-09-13 17:00:17 +08:00
committed by GitHub
parent be3b680bc6
commit a19afca148
53 changed files with 570 additions and 301 deletions

View File

@@ -1,7 +1,7 @@
{
"name": "fastgpt",
"version": "3.7",
"private": true,
"version": "4.2.1",
"private": false,
"scripts": {
"dev": "next dev",
"build": "next build",

View File

@@ -4,6 +4,7 @@
"Confirm": "Yes",
"Create New": "Create",
"Dataset": "Dataset",
"Export": "Export",
"Folder": "Folder",
"Move": "Move",
"Name": "Name",
@@ -215,6 +216,9 @@
"Response Detail": "Detail",
"Response Detail tips": "Whether detailed data such as references and full context need to be returned"
},
"system": {
"Help Document": "Document"
},
"user": {
"Account": "Account",
"Amount of earnings": "Earnings",

View File

@@ -4,6 +4,7 @@
"Confirm": "确认",
"Create New": "新建",
"Dataset": "知识库",
"Export": "导出",
"Folder": "文件夹",
"Move": "移动",
"Name": "名称",
@@ -215,6 +216,9 @@
"Response Detail": "返回详情",
"Response Detail tips": "是否需要返回引用、完整上下文等详细数据"
},
"system": {
"Help Document": "帮助文档"
},
"user": {
"Account": "账号",
"Amount of earnings": "收益(¥)",

8
client/src/api/core/dataset/file.d.ts vendored Normal file
View File

@@ -0,0 +1,8 @@
import { RequestPaging } from '../../../types/index';
export type GetFileListProps = RequestPaging & {
kbId: string;
searchText: string;
};
export type UpdateFileProps = { id: string; name?: string; datasetUsed?: boolean };

View File

@@ -0,0 +1,15 @@
import { GET, POST, PUT, DELETE } from '@/api/request';
import type { FileInfo, KbFileItemType } from '@/types/plugin';
import type { GetFileListProps, UpdateFileProps } from './file.d';
export const getDatasetFiles = (data: GetFileListProps) =>
POST<KbFileItemType[]>(`/core/dataset/file/list`, data);
export const delDatasetFileById = (params: { fileId: string; kbId: string }) =>
DELETE(`/core/dataset/file/delById`, params);
export const getFileInfoById = (fileId: string) =>
GET<FileInfo>(`/core/dataset/file/detail`, { fileId });
export const delDatasetEmptyFiles = (kbId: string) =>
DELETE(`/core/dataset/file/delEmptyFiles`, { kbId });
export const updateDatasetFile = (data: UpdateFileProps) => PUT(`/core/dataset/file/update`, data);

View File

@@ -1,12 +1,5 @@
import { GET, POST, PUT, DELETE } from '../request';
import type {
DatasetItemType,
FileInfo,
KbFileItemType,
KbItemType,
KbListItemType,
KbPathItemType
} from '@/types/plugin';
import type { DatasetItemType, KbItemType, KbListItemType, KbPathItemType } from '@/types/plugin';
import { TrainingModeEnum } from '@/constants/plugin';
import {
Props as PushDataProps,
@@ -17,12 +10,7 @@ import {
Response as SearchTestResponse
} from '@/pages/api/openapi/kb/searchTest';
import { Props as UpdateDataProps } from '@/pages/api/openapi/kb/updateData';
import type {
KbUpdateParams,
CreateKbParams,
GetKbDataListProps,
GetFileListProps
} from '../request/kb';
import type { KbUpdateParams, CreateKbParams, GetKbDataListProps } from '../request/kb';
import { QuoteItemType } from '@/types/chat';
import { KbTypeEnum } from '@/constants/kb';
@@ -42,16 +30,6 @@ export const putKbById = (data: KbUpdateParams) => PUT(`/plugins/kb/update`, dat
export const delKbById = (id: string) => DELETE(`/plugins/kb/delete?id=${id}`);
/* kb file */
export const getKbFiles = (data: GetFileListProps) =>
POST<KbFileItemType[]>(`/plugins/kb/file/list`, data);
export const deleteKbFileById = (params: { fileId: string; kbId: string }) =>
DELETE(`/plugins/kb/file/delFileByFileId`, params);
export const getFileInfoById = (fileId: string) =>
GET<FileInfo>(`/plugins/kb/file/getFileInfo`, { fileId });
export const delEmptyFiles = (kbId: string) =>
DELETE(`/plugins/kb/file/deleteEmptyFiles`, { kbId });
/* kb data */
export const getKbDataList = (data: GetKbDataListProps) =>
POST(`/plugins/kb/data/getDataList`, data);
@@ -59,7 +37,7 @@ export const getKbDataList = (data: GetKbDataListProps) =>
/**
* 获取导出数据(不分页)
*/
export const getExportDataList = (data: { kbId: string; fileId: string }) =>
export const getExportDataList = (data: { kbId: string }) =>
GET<[string, string, string][]>(`/plugins/kb/data/exportModelData`, data, {
timeout: 600000
});

View File

@@ -17,11 +17,6 @@ export type CreateKbParams = {
type: `${KbTypeEnum}`;
};
export type GetFileListProps = RequestPaging & {
kbId: string;
searchText: string;
};
export type GetKbDataListProps = RequestPaging & {
kbId: string;
searchText: string;

View File

@@ -0,0 +1,18 @@
import { GET, POST } from '../request';
import { AxiosProgressEvent } from 'axios';
export const uploadImg = (base64Img: string) => POST<string>('/system/uploadImage', { base64Img });
export const postUploadFiles = (
data: FormData,
onUploadProgress: (progressEvent: AxiosProgressEvent) => void
) =>
POST<string[]>('/support/file/upload', data, {
onUploadProgress,
headers: {
'Content-Type': 'multipart/form-data; charset=utf-8'
}
});
export const getFileViewUrl = (fileId: string) => GET<string>('/support/file/readUrl', { fileId });

View File

@@ -1,20 +1,4 @@
import { GET, POST, PUT } from './request';
import type { InitDateResponse } from '@/pages/api/system/getInitData';
import { AxiosProgressEvent } from 'axios';
export const getInitData = () => GET<InitDateResponse>('/system/getInitData');
export const uploadImg = (base64Img: string) => POST<string>('/system/uploadImage', { base64Img });
export const postUploadFiles = (
data: FormData,
onUploadProgress: (progressEvent: AxiosProgressEvent) => void
) =>
POST<string[]>('/plugins/file/upload', data, {
onUploadProgress,
headers: {
'Content-Type': 'multipart/form-data; charset=utf-8'
}
});
export const getFileViewUrl = (fileId: string) => GET<string>('/plugins/file/readUrl', { fileId });

View File

@@ -167,20 +167,7 @@ const Navbar = ({ unread }: { unread: number }) => {
</Link>
</Box>
)}
{feConfigs?.show_doc && (
<MyTooltip label={t('home.Docs')} placement={'right-end'}>
<Box
{...itemStyles}
mb={0}
color={'#9096a5'}
onClick={() => {
window.open(`https://doc.fastgpt.run/docs/intro`);
}}
>
<MyIcon name={'courseLight'} width={'26px'} height={'26px'} />
</Box>
</MyTooltip>
)}
<Language {...itemStyles} />
{feConfigs?.show_git && (
<MyTooltip label={`Git Star: ${gitStar}`} placement={'right-end'}>

View File

@@ -8,7 +8,7 @@ interface Props extends InputProps {
const MyInput = ({ leftIcon, ...props }: Props) => {
return (
<Flex position={'relative'} alignItems={'center'}>
<Input w={'100%'} pl={leftIcon ? '30px' : 3} {...props} />
<Input w={'100%'} pl={leftIcon ? '30px !important' : 3} {...props} />
{leftIcon && (
<Flex
alignItems={'center'}

View File

@@ -7,4 +7,4 @@ export const TrainingTypeMap = {
[TrainingModeEnum.index]: 'index'
};
export const PgTrainingTableName = 'modeldata';
export const PgDatasetTableName = 'modeldata';

View File

@@ -2,7 +2,7 @@ import React, { useCallback, useRef } from 'react';
import { ModalFooter, ModalBody, Input, useDisclosure, Button } from '@chakra-ui/react';
import MyModal from '@/components/MyModal';
export const useEditInfo = ({
export const useEditTitle = ({
title,
placeholder = ''
}: {

View File

@@ -9,7 +9,7 @@ import { useQuery } from '@tanstack/react-query';
import dynamic from 'next/dynamic';
import { useSelectFile } from '@/hooks/useSelectFile';
import { compressImg } from '@/utils/file';
import { feConfigs } from '@/store/static';
import { feConfigs, systemVersion } from '@/store/static';
import { useTranslation } from 'next-i18next';
import { timezoneList } from '@/utils/user';
import Loading from '@/components/Loading';
@@ -172,20 +172,49 @@ const UserInfo = () => {
{t('user.Change')}
</Button>
</Flex>
{feConfigs?.show_userDetail && (
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'}>{t('user.Balance')}:&nbsp;</Box>
<Box flex={1}>
<strong>{userInfo?.balance.toFixed(3)}</strong>
</Box>
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
</Flex>
</Box>
)}
{feConfigs?.show_doc && (
<>
<Flex
mt={4}
w={['85%', '300px']}
py={3}
px={6}
border={theme.borders.sm}
borderWidth={'1.5px'}
borderRadius={'md'}
alignItems={'center'}
cursor={'pointer'}
userSelect={'none'}
onClick={() => {
window.open(`https://doc.fastgpt.run/docs/intro`);
}}
>
<MyIcon name={'courseLight'} w={'18px'} />
<Box ml={2} flex={1}>
{t('system.Help Document')}
</Box>
<Box w={'8px'} h={'8px'} borderRadius={'50%'} bg={'#67c13b'} />
<Box fontSize={'md'} ml={2}>
V{systemVersion}
</Box>
</Flex>
</>
)}
{feConfigs?.show_userDetail && (
<>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'}>{t('user.Balance')}:&nbsp;</Box>
<Box flex={1}>
<strong>{userInfo?.balance.toFixed(3)}</strong>
</Box>
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
</Flex>
</Box>
<Divider my={3} />
<MyTooltip label={'点击配置账号'}>

View File

@@ -3,7 +3,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -12,7 +12,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { rowCount } = await PgClient.query(`SELECT 1
FROM information_schema.columns
WHERE table_schema = 'public'
AND table_name = '${PgTrainingTableName}'
AND table_name = '${PgDatasetTableName}'
AND column_name = 'file_id'`);
if (rowCount > 0) {
@@ -23,7 +23,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
jsonRes(res, {
data: await PgClient.query(
`ALTER TABLE ${PgTrainingTableName} ADD COLUMN file_id VARCHAR(100)`
`ALTER TABLE ${PgDatasetTableName} ADD COLUMN file_id VARCHAR(100)`
)
});
} catch (error) {

View File

@@ -5,7 +5,7 @@ import { authUser } from '@/service/utils/auth';
import { connectToDatabase, KB } from '@/service/mongo';
import { KbTypeEnum } from '@/constants/kb';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -24,7 +24,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
);
const response = await PgClient.update(PgTrainingTableName, {
const response = await PgClient.update(PgDatasetTableName, {
where: [['file_id', 'undefined']],
values: [{ key: 'file_id', value: '' }]
});

View File

@@ -0,0 +1,35 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { connectToDatabase } from '@/service/mongo';
import mongoose from 'mongoose';
import { PgClient } from '@/service/pg';
import { PgDatasetTableName } from '@/constants/plugin';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authUser({ req, authRoot: true });
const data = await mongoose.connection.db
.collection('dataset.files')
.updateMany({}, { $set: { 'metadata.datasetUsed': true } });
// update pg data
const pg = await PgClient.query(`UPDATE ${PgDatasetTableName}
SET file_id = ''
WHERE (file_id = 'undefined' OR LENGTH(file_id) < 20) AND file_id != '';`);
jsonRes(res, {
data: {
data,
pg
}
});
} catch (error) {
jsonRes(res, {
code: 500,
error
});
}
}

View File

@@ -4,7 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { Types } from 'mongoose';
import { OtherFileId } from '@/constants/kb';
@@ -22,7 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { userId } = await authUser({ req, authToken: true });
if (fileId === OtherFileId) {
await PgClient.delete(PgTrainingTableName, {
await PgClient.delete(PgDatasetTableName, {
where: [
['user_id', userId],
'AND',
@@ -37,7 +37,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await gridFs.findAndAuthFile(fileId);
// delete all pg data
await PgClient.delete(PgTrainingTableName, {
await PgClient.delete(PgDatasetTableName, {
where: [['user_id', userId], 'AND', ['kb_id', kbId], 'AND', ['file_id', fileId]]
});

View File

@@ -0,0 +1,31 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { kbId } = req.query as { kbId: string };
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
const gridFs = new GridFSStorage('dataset', userId);
const collection = gridFs.Collection();
const files = await collection.deleteMany({
uploadDate: { $lte: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) },
['metadata.kbId']: kbId,
['metadata.userId']: userId,
['metadata.datasetUsed']: { $ne: true }
});
jsonRes(res, {
data: files
});
} catch (err) {
jsonRes(res);
}
}

View File

@@ -4,9 +4,8 @@ import { connectToDatabase, TrainingData } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { FileStatusEnum, OtherFileId } from '@/constants/kb';
import mongoose from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -16,28 +15,37 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
pageNum = 1,
pageSize = 10,
kbId,
searchText
searchText = ''
} = req.body as { pageNum: number; pageSize: number; kbId: string; searchText: string };
searchText = searchText.replace(/'/g, '');
searchText = searchText?.replace(/'/g, '');
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
// find files
const gridFs = new GridFSStorage('dataset', userId);
const bucket = gridFs.GridFSBucket();
const collection = gridFs.Collection();
const mongoWhere = {
['metadata.kbId']: kbId,
['metadata.userId']: userId,
['metadata.datasetUsed']: true,
...(searchText && { filename: { $regex: searchText } })
};
const [files, total] = await Promise.all([
bucket
.find(mongoWhere)
.sort({ _id: -1 })
collection
.find(mongoWhere, {
projection: {
_id: 1,
filename: 1,
uploadDate: 1,
length: 1
}
})
.skip((pageNum - 1) * pageSize)
.limit(pageSize)
.toArray(),
mongoose.connection.db.collection('dataset.files').countDocuments(mongoWhere)
collection.countDocuments(mongoWhere)
]);
async function GetOtherData() {
@@ -49,7 +57,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
status: (await TrainingData.findOne({ userId, kbId, file_id: '' }))
? FileStatusEnum.embedding
: FileStatusEnum.ready,
chunkLength: await PgClient.count(PgTrainingTableName, {
chunkLength: await PgClient.count(PgDatasetTableName, {
fields: ['id'],
where: [
['user_id', userId],
@@ -72,7 +80,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
status: (await TrainingData.findOne({ userId, kbId, file_id: file._id }))
? FileStatusEnum.embedding
: FileStatusEnum.ready,
chunkLength: await PgClient.count(PgTrainingTableName, {
chunkLength: await PgClient.count(PgDatasetTableName, {
fields: ['id'],
where: [
['user_id', userId],
@@ -90,7 +98,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
data: {
pageNum,
pageSize,
data: data.flat().filter((item) => item.chunkLength > 0),
data: data.flat(),
total
}
});

View File

@@ -0,0 +1,66 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
import { UpdateFileProps } from '@/api/core/dataset/file.d';
import { Types } from 'mongoose';
import { PgClient } from '@/service/pg';
import { PgDatasetTableName } from '@/constants/plugin';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id, name, datasetUsed } = req.body as UpdateFileProps;
const { userId } = await authUser({ req, authToken: true });
const gridFs = new GridFSStorage('dataset', userId);
const collection = gridFs.Collection();
await collection.findOneAndUpdate(
{
_id: new Types.ObjectId(id)
},
{
$set: {
...(name && { filename: name }),
...(datasetUsed && { ['metadata.datasetUsed']: datasetUsed })
}
}
);
// data source
updateDatasetSource({
fileId: id,
userId,
name
});
jsonRes(res, {});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
async function updateDatasetSource(data: { fileId: string; userId: string; name?: string }) {
const { fileId, userId, name } = data;
if (!fileId || !name || !userId) return;
try {
await PgClient.update(PgDatasetTableName, {
where: [['user_id', userId], 'AND', ['file_id', fileId]],
values: [
{
key: 'source',
value: name
}
]
});
} catch (error) {
setTimeout(() => {
updateDatasetSource(data);
}, 2000);
}
}

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -18,7 +18,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
// 凭证校验
const { userId } = await authUser({ req });
await PgClient.delete(PgTrainingTableName, {
await PgClient.delete(PgDatasetTableName, {
where: [['user_id', userId], 'AND', ['id', dataId]]
});

View File

@@ -4,7 +4,7 @@ import { connectToDatabase, TrainingData, KB } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { authKb } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { PgTrainingTableName, TrainingModeEnum } from '@/constants/plugin';
import { PgDatasetTableName, TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
@@ -136,7 +136,7 @@ export async function pushDataToKb({
try {
const { rows } = await PgClient.query(`
SELECT COUNT(*) > 0 AS exists
FROM ${PgTrainingTableName}
FROM ${PgDatasetTableName}
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') AND user_id='${userId}' AND kb_id='${kbId}'
`);
const exists = rows[0]?.exists || false;

View File

@@ -5,7 +5,7 @@ import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { getVector } from '../plugin/vector';
import type { KbTestItemType } from '@/types/plugin';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { KB } from '@/service/mongo';
export type Props = {
@@ -43,7 +43,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select id, q, a, source, file_id, (vector <#> '[${
vectors[0]
}]') * -1 AS score from ${PgTrainingTableName} where kb_id='${kbId}' AND user_id='${userId}' order by vector <#> '[${
}]') * -1 AS score from ${PgDatasetTableName} where kb_id='${kbId}' AND user_id='${userId}' order by vector <#> '[${
vectors[0]
}]' limit 12;
COMMIT;`

View File

@@ -5,7 +5,7 @@ import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import { KB, connectToDatabase } from '@/service/mongo';
import { getVector } from '../plugin/vector';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
export type Props = {
dataId: string;
@@ -47,7 +47,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
})();
// 更新 pg 内容.仅修改a不需要更新向量。
await PgClient.update(PgTrainingTableName, {
await PgClient.update(PgDatasetTableName, {
where: [['id', dataId], 'AND', ['user_id', userId]],
values: [
{ key: 'a', value: a.replace(/'/g, '"') },

View File

@@ -3,14 +3,13 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, User } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { OtherFileId } from '@/constants/kb';
import { PgDatasetTableName } from '@/constants/plugin';
import { findAllChildrenIds } from '../delete';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let { kbId, fileId } = req.query as {
let { kbId } = req.query as {
kbId: string;
fileId: string;
};
if (!kbId) {
@@ -22,6 +21,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
const exportIds = [kbId, ...(await findAllChildrenIds(kbId))];
console.log(exportIds);
const thirtyMinutesAgo = new Date(
Date.now() - (global.feConfigs?.limit?.exportLimitMinutes || 0) * 60 * 1000
);
@@ -43,10 +45,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
throw new Error(`上次导出未到 ${minutes},每 ${minutes}仅可导出一次。`);
}
const where: any = [['kb_id', kbId], 'AND', ['user_id', userId]];
const where: any = [
['user_id', userId],
'AND',
`kb_id IN (${exportIds.map((id) => `'${id}'`).join(',')})`
];
// 从 pg 中获取所有数据
const pgData = await PgClient.select<{ q: string; a: string; source: string }>(
PgTrainingTableName,
PgDatasetTableName,
{
where,
fields: ['q', 'a', 'source'],

View File

@@ -4,7 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import type { KbDataItemType } from '@/types/plugin';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
export type Response = {
id: string;
@@ -29,7 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const where: any = [['user_id', userId], 'AND', ['id', dataId]];
const searchRes = await PgClient.select<KbDataItemType>(PgTrainingTableName, {
const searchRes = await PgClient.select<KbDataItemType>(PgDatasetTableName, {
fields: ['kb_id', 'id', 'q', 'a', 'source', 'file_id'],
where,
limit: 1

View File

@@ -4,7 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import type { KbDataItemType } from '@/types/plugin';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { OtherFileId } from '@/constants/kb';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -50,14 +50,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
];
const [searchRes, total] = await Promise.all([
PgClient.select<KbDataItemType>(PgTrainingTableName, {
PgClient.select<KbDataItemType>(PgDatasetTableName, {
fields: ['id', 'q', 'a', 'source', 'file_id'],
where,
order: [{ field: 'id', mode: 'DESC' }],
limit: pageSize,
offset: pageSize * (pageNum - 1)
}),
PgClient.count(PgTrainingTableName, {
PgClient.count(PgDatasetTableName, {
fields: ['id'],
where
})

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, KB } from '@/service/mongo';
import { authKb, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { insertKbItem, PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { getVectorModel } from '@/service/utils/data';
@@ -45,7 +45,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const { rows: existsRows } = await PgClient.query(`
SELECT COUNT(*) > 0 AS exists
FROM ${PgTrainingTableName}
FROM ${PgDatasetTableName}
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') AND user_id='${userId}' AND kb_id='${kbId}'
`);
const exists = existsRows[0]?.exists || false;

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, KB, App, TrainingData } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { GridFSStorage } from '@/service/lib/gridfs';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -29,7 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
// delete all pg data
await PgClient.delete(PgTrainingTableName, {
await PgClient.delete(PgDatasetTableName, {
where: [
['user_id', userId],
'AND',
@@ -56,7 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
}
async function findAllChildrenIds(id: string) {
export async function findAllChildrenIds(id: string) {
// find children
const children = await KB.find({ parentId: id });

View File

@@ -1,59 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
import { PgClient } from '@/service/pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { Types } from 'mongoose';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { kbId } = req.query as { kbId: string };
// 凭证校验
const { userId } = await authUser({ req, authToken: true });
const gridFs = new GridFSStorage('dataset', userId);
const bucket = gridFs.GridFSBucket();
const files = await bucket
// 1 hours expired
.find({
uploadDate: { $lte: new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) },
['metadata.kbId']: kbId,
['metadata.userId']: userId
})
.sort({ _id: -1 })
.toArray();
const data = await Promise.all(
files.map(async (file) => {
return {
id: file._id,
chunkLength: await PgClient.count(PgTrainingTableName, {
fields: ['id'],
where: [
['user_id', userId],
'AND',
['kb_id', kbId],
'AND',
['file_id', String(file._id)]
]
})
};
})
);
await Promise.all(
data
.filter((item) => item.chunkLength === 0)
.map((file) => bucket.delete(new Types.ObjectId(file.id)))
);
jsonRes(res);
} catch (err) {
jsonRes(res);
}
}

View File

@@ -23,7 +23,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
jsonRes(res, {
data: `/api/plugins/file/read?token=${token}`
data: `/api/support/file/read?token=${token}`
});
} catch (error) {
jsonRes(res, {

View File

@@ -13,6 +13,7 @@ export type InitDateResponse = {
qaModel: QAModelItemType;
vectorModels: VectorModelItemType[];
feConfigs: FeConfigsType;
systemVersion: string;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -24,7 +25,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
feConfigs: global.feConfigs,
chatModels: global.chatModels,
qaModel: global.qaModel,
vectorModels: global.vectorModels
vectorModels: global.vectorModels,
systemVersion: process.env.npm_package_version || '0.0.0'
}
});
}

View File

@@ -11,7 +11,7 @@ import {
IconButton
} from '@chakra-ui/react';
import { useGlobalStore } from '@/store/global';
import { useEditInfo } from '@/hooks/useEditInfo';
import { useEditTitle } from '@/hooks/useEditTitle';
import { useRouter } from 'next/router';
import Avatar from '@/components/Avatar';
import MyTooltip from '@/components/MyTooltip';
@@ -70,7 +70,7 @@ const ChatHistorySlider = ({
const isShare = useMemo(() => !appId || !userInfo, [appId, userInfo]);
// custom title edit
const { onOpenModal, EditModal: EditTitleModal } = useEditInfo({
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
title: '自定义历史记录标题',
placeholder: '如果设置为空,会自动跟随聊天记录。'
});

View File

@@ -1,19 +1,12 @@
import React, { useCallback, useState, useRef, useMemo } from 'react';
import { Box, Card, IconButton, Flex, Button, Grid, Image } from '@chakra-ui/react';
import { Box, Card, IconButton, Flex, Grid, Image } from '@chakra-ui/react';
import type { KbDataItemType } from '@/types/plugin';
import { usePagination } from '@/hooks/usePagination';
import {
getKbDataList,
getExportDataList,
delOneKbDataByDataId,
getTrainingData,
getFileInfoById
} from '@/api/plugins/kb';
import { getKbDataList, delOneKbDataByDataId, getTrainingData } from '@/api/plugins/kb';
import { getFileInfoById } from '@/api/core/dataset/file';
import { DeleteIcon, RepeatIcon } from '@chakra-ui/icons';
import { fileDownload } from '@/utils/file';
import { useMutation, useQuery } from '@tanstack/react-query';
import { useQuery } from '@tanstack/react-query';
import { useToast } from '@/hooks/useToast';
import Papa from 'papaparse';
import InputModal, { FormData as InputDataType } from './InputDataModal';
import { debounce } from 'lodash';
import { getErrText } from '@/utils/tools';
@@ -24,8 +17,6 @@ import MyIcon from '@/components/Icon';
import MyTooltip from '@/components/MyTooltip';
import MyInput from '@/components/MyInput';
import { fileImgs } from '@/constants/common';
import { useRequest } from '@/hooks/useRequest';
import { feConfigs } from '@/store/static';
const DataCard = ({ kbId }: { kbId: string }) => {
const BoxRef = useRef<HTMLDivElement>(null);
@@ -82,31 +73,27 @@ const DataCard = ({ kbId }: { kbId: string }) => {
[fileInfo?.filename]
);
// get al data and export csv
const { mutate: onclickExport, isLoading: isLoadingExport = false } = useRequest({
mutationFn: () => getExportDataList({ kbId, fileId }),
onSuccess(res) {
const text = Papa.unparse({
fields: ['question', 'answer', 'source'],
data: res
});
const filenameSplit = fileInfo?.filename?.split('.') || [];
const filename = filenameSplit?.length <= 1 ? 'data' : filenameSplit.slice(0, -1).join('.');
fileDownload({
text,
type: 'text/csv',
filename
});
},
successToast: `导出成功,下次导出需要 ${feConfigs?.limit?.exportLimitMinutes} 分钟后`,
errorToast: '导出异常'
});
return (
<Box ref={BoxRef} position={'relative'} px={5} py={[1, 5]} h={'100%'} overflow={'overlay'}>
<Flex alignItems={'center'}>
<IconButton
mr={3}
icon={<MyIcon name={'backFill'} w={'18px'} color={'myBlue.600'} />}
bg={'white'}
boxShadow={'1px 1px 9px rgba(0,0,0,0.15)'}
h={'28px'}
size={'sm'}
borderRadius={'50%'}
aria-label={''}
onClick={() =>
router.replace({
query: {
kbId,
currentTab: 'dataset'
}
})
}
/>
<Flex
className="textEllipsis"
flex={'1 0 0'}
@@ -117,18 +104,6 @@ const DataCard = ({ kbId }: { kbId: string }) => {
<Image src={fileIcon || '/imgs/files/file.svg'} w={'16px'} mr={2} alt={''} />
{t(fileInfo?.filename || 'Filename')}
</Flex>
<Button
mr={2}
size={['sm', 'md']}
variant={'base'}
borderColor={'myBlue.600'}
color={'myBlue.600'}
isLoading={isLoadingExport || isLoading}
title={`${feConfigs} 分钟能导出 1 次`}
onClick={onclickExport}
>
{t('dataset.Export')}
</Button>
<Box>
<MyTooltip label={'刷新'}>
<IconButton

View File

@@ -9,9 +9,14 @@ import {
Th,
Td,
Tbody,
Image
Image,
MenuButton,
Menu,
MenuList,
MenuItem
} from '@chakra-ui/react';
import { getKbFiles, deleteKbFileById, getTrainingData } from '@/api/plugins/kb';
import { getTrainingData } from '@/api/plugins/kb';
import { getDatasetFiles, delDatasetFileById, updateDatasetFile } from '@/api/core/dataset/file';
import { useQuery } from '@tanstack/react-query';
import { debounce } from 'lodash';
import { formatFileSize } from '@/utils/tools';
@@ -23,11 +28,13 @@ import dayjs from 'dayjs';
import { fileImgs } from '@/constants/common';
import { useRequest } from '@/hooks/useRequest';
import { useLoading } from '@/hooks/useLoading';
import { FileStatusEnum } from '@/constants/kb';
import { FileStatusEnum, OtherFileId } from '@/constants/kb';
import { useRouter } from 'next/router';
import { usePagination } from '@/hooks/usePagination';
import { KbFileItemType } from '@/types/plugin';
import { useGlobalStore } from '@/store/global';
import MyMenu from '@/components/MyMenu';
import { useEditTitle } from '@/hooks/useEditTitle';
const FileCard = ({ kbId }: { kbId: string }) => {
const BoxRef = useRef<HTMLDivElement>(null);
@@ -45,13 +52,13 @@ const FileCard = ({ kbId }: { kbId: string }) => {
data: files,
Pagination,
total,
isLoading,
getData,
isLoading,
pageNum,
pageSize
} = usePagination<KbFileItemType>({
api: getKbFiles,
pageSize: 40,
api: getDatasetFiles,
pageSize: 20,
params: {
kbId,
searchText
@@ -63,6 +70,7 @@ const FileCard = ({ kbId }: { kbId: string }) => {
}
});
// change search
const debounceRefetch = useCallback(
debounce(() => {
getData(1);
@@ -71,6 +79,7 @@ const FileCard = ({ kbId }: { kbId: string }) => {
[]
);
// add file icon
const formatFiles = useMemo(
() =>
files.map((file) => ({
@@ -79,15 +88,11 @@ const FileCard = ({ kbId }: { kbId: string }) => {
})),
[files]
);
const totalDataLength = useMemo(
() => files.reduce((sum, item) => sum + item.chunkLength, 0),
[files]
);
const { mutate: onDeleteFile } = useRequest({
mutationFn: (fileId: string) => {
setLoading(true);
return deleteKbFileById({
return delDatasetFileById({
fileId,
kbId
});
@@ -101,6 +106,24 @@ const FileCard = ({ kbId }: { kbId: string }) => {
successToast: t('common.Delete Success'),
errorToast: t('common.Delete Failed')
});
const { mutate: onUpdateFilename } = useRequest({
mutationFn: (data: { id: string; name: string }) => {
setLoading(true);
return updateDatasetFile(data);
},
onSuccess() {
getData(pageNum);
},
onSettled() {
setLoading(false);
},
successToast: t('common.Delete Success'),
errorToast: t('common.Delete Failed')
});
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
const statusMap = {
[FileStatusEnum.embedding]: {
@@ -121,17 +144,21 @@ const FileCard = ({ kbId }: { kbId: string }) => {
}
});
useQuery(['refetchTrainingData'], refetchTrainingData, {
refetchInterval: 8000,
enabled: qaListLen > 0 || vectorListLen > 0
});
useQuery(
['refetchTrainingData', kbId],
() => Promise.all([refetchTrainingData(), getData(pageNum)]),
{
refetchInterval: 8000,
enabled: qaListLen > 0 || vectorListLen > 0
}
);
return (
<Box ref={BoxRef} position={'relative'} py={[1, 5]} h={'100%'} overflow={'overlay'}>
<Flex justifyContent={'space-between'} px={5}>
<Box ref={BoxRef} py={[1, 5]} h={'100%'} overflow={'overlay'}>
<Flex justifyContent={'space-between'} px={[2, 5]}>
<Box>
<Box fontWeight={'bold'} fontSize={'lg'} mr={2}>
{t('kb.Files', { total: files.length })}
<Box fontWeight={'bold'} fontSize={['md', 'lg']} mr={2}>
{t('kb.Files', { total })}
</Box>
<Box as={'span'} fontSize={'sm'}>
{(qaListLen > 0 || vectorListLen > 0) && (
@@ -149,7 +176,8 @@ const FileCard = ({ kbId }: { kbId: string }) => {
leftIcon={
<MyIcon name="searchLight" position={'absolute'} w={'14px'} color={'myGray.500'} />
}
w={['100%', '200px']}
w={['100%', '250px']}
size={['sm', 'md']}
placeholder={t('common.Search') || ''}
value={searchText}
onChange={(e) => {
@@ -169,14 +197,12 @@ const FileCard = ({ kbId }: { kbId: string }) => {
/>
</Flex>
</Flex>
<TableContainer mt={[0, 3]}>
<TableContainer mt={[0, 3]} position={'relative'} minH={'70vh'}>
<Table variant={'simple'} fontSize={'sm'}>
<Thead>
<Tr>
<Th>{t('kb.Filename')}</Th>
<Th>
{t('kb.Chunk Length')}({totalDataLength})
</Th>
<Th>{t('kb.Chunk Length')}</Th>
<Th>{t('kb.Upload Time')}</Th>
<Th>{t('kb.File Size')}</Th>
<Th>{t('common.Status')}</Th>
@@ -213,45 +239,103 @@ const FileCard = ({ kbId }: { kbId: string }) => {
</Td>
<Td>{dayjs(file.uploadTime).format('YYYY/MM/DD HH:mm')}</Td>
<Td>{formatFileSize(file.size)}</Td>
<Td
display={'flex'}
alignItems={'center'}
_before={{
content: '""',
w: '10px',
h: '10px',
mr: 2,
borderRadius: 'lg',
bg: statusMap[file.status].color
}}
>
{statusMap[file.status].text}
<Td>
<Flex
alignItems={'center'}
_before={{
content: '""',
w: '10px',
h: '10px',
mr: 2,
borderRadius: 'lg',
bg: statusMap[file.status].color
}}
>
{statusMap[file.status].text}
</Flex>
</Td>
<Td onClick={(e) => e.stopPropagation()}>
<MyIcon
name={'delete'}
w={'14px'}
_hover={{ color: 'red.600' }}
onClick={() =>
openConfirm(() => {
onDeleteFile(file.id);
})()
<MyMenu
width={100}
Button={
<MenuButton
w={'22px'}
h={'22px'}
borderRadius={'md'}
_hover={{
color: 'myBlue.600',
'& .icon': {
bg: 'myGray.100'
}
}}
>
<MyIcon
className="icon"
name={'more'}
h={'16px'}
w={'16px'}
px={1}
py={1}
borderRadius={'md'}
cursor={'pointer'}
/>
</MenuButton>
}
menuList={[
...(file.id !== OtherFileId
? [
{
child: (
<Flex alignItems={'center'}>
<MyIcon name={'edit'} w={'14px'} mr={2} />
{t('Rename')}
</Flex>
),
onClick: () =>
onOpenModal({
defaultVal: file.filename,
onSuccess: (newName) => {
onUpdateFilename({
id: file.id,
name: newName
});
}
})
}
]
: []),
{
child: (
<Flex alignItems={'center'}>
<MyIcon
mr={1}
name={'delete'}
w={'14px'}
_hover={{ color: 'red.600' }}
/>
<Box>{t('common.Delete')}</Box>
</Flex>
),
onClick: openConfirm(() => {
onDeleteFile(file.id);
})
}
]}
/>
</Td>
</Tr>
))}
</Tbody>
</Table>
<Loading loading={isLoading && files.length === 0} fixed={false} />
{total > pageSize && (
<Flex mt={2} justifyContent={'center'}>
<Pagination />
</Flex>
)}
</TableContainer>
{total > pageSize && (
<Flex mt={2} justifyContent={'center'}>
<Pagination />
</Flex>
)}
<ConfirmModal />
<Loading loading={isLoading} />
<EditTitleModal />
</Box>
);
};

View File

@@ -27,6 +27,7 @@ import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { TrainingModeEnum } from '@/constants/plugin';
import FileSelect, { type FileItemType } from './FileSelect';
import { useDatasetStore } from '@/store/dataset';
import { updateDatasetFile } from '@/api/core/dataset/file';
const fileExtension = '.txt, .doc, .docx, .pdf, .md';
@@ -64,6 +65,16 @@ const ChunkImport = ({ kbId }: { kbId: string }) => {
mutationFn: async () => {
const chunks = files.map((file) => file.chunks).flat();
// mark the file is used
await Promise.all(
files.map((file) =>
updateDatasetFile({
id: file.id,
datasetUsed: true
})
)
);
// subsection import
let success = 0;
const step = 300;

View File

@@ -11,6 +11,7 @@ import { TrainingModeEnum } from '@/constants/plugin';
import FileSelect, { type FileItemType } from './FileSelect';
import { useRouter } from 'next/router';
import { useDatasetStore } from '@/store/dataset';
import { updateDatasetFile } from '@/api/core/dataset/file';
const fileExtension = '.csv';
@@ -37,6 +38,16 @@ const CsvImport = ({ kbId }: { kbId: string }) => {
const { mutate: onclickUpload, isLoading: uploading } = useMutation({
mutationFn: async () => {
// mark the file is used
await Promise.all(
files.map((file) =>
updateDatasetFile({
id: file.id,
datasetUsed: true
})
)
);
const chunks = files
.map((file) => file.chunks)
.flat()

View File

@@ -152,7 +152,7 @@ const FileSelect = ({
throw new Error('csv 文件格式有误,请确保 question 和 answer 两列');
}
const fileItem: FileItemType = {
id: nanoid(),
id: filesId[0],
filename: file.name,
icon,
tokens: 0,

View File

@@ -16,6 +16,7 @@ import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { TrainingModeEnum } from '@/constants/plugin';
import FileSelect, { type FileItemType } from './FileSelect';
import { useRouter } from 'next/router';
import { updateDatasetFile } from '@/api/core/dataset/file';
const fileExtension = '.txt, .doc, .docx, .pdf, .md';
@@ -55,6 +56,16 @@ const QAImport = ({ kbId }: { kbId: string }) => {
mutationFn: async () => {
const chunks = files.map((file) => file.chunks).flat();
// mark the file is used
await Promise.all(
files.map((file) =>
updateDatasetFile({
id: file.id,
datasetUsed: true
})
)
);
// subsection import
let success = 0;
const step = 200;

View File

@@ -2,7 +2,7 @@ import React, { useState, useCallback } from 'react';
import { Box, Flex, Button, Textarea, IconButton, BoxProps } from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { insertData2Kb, putKbDataById, delOneKbDataByDataId } from '@/api/plugins/kb';
import { getFileViewUrl } from '@/api/system';
import { getFileViewUrl } from '@/api/support/file';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
import MyIcon from '@/components/Icon';

View File

@@ -17,7 +17,8 @@ import Avatar from '@/components/Avatar';
import Info from './components/Info';
import { serviceSideProps } from '@/utils/i18n';
import { useTranslation } from 'react-i18next';
import { delEmptyFiles, getTrainingQueueLen } from '@/api/plugins/kb';
import { getTrainingQueueLen } from '@/api/plugins/kb';
import { delDatasetEmptyFiles } from '@/api/core/dataset/file';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { feConfigs } from '@/store/static';
@@ -96,7 +97,7 @@ const Detail = ({ kbId, currentTab }: { kbId: string; currentTab: `${TabEnum}` }
useEffect(() => {
return () => {
try {
delEmptyFiles(kbId);
delDatasetEmptyFiles(kbId);
} catch (error) {}
};
}, [kbId]);

View File

@@ -15,7 +15,7 @@ import PageContainer from '@/components/PageContainer';
import { useConfirm } from '@/hooks/useConfirm';
import { AddIcon } from '@chakra-ui/icons';
import { useQuery } from '@tanstack/react-query';
import { delKbById, getKbPaths, putKbById } from '@/api/plugins/kb';
import { delKbById, getExportDataList, getKbPaths, putKbById } from '@/api/plugins/kb';
import { useTranslation } from 'react-i18next';
import Avatar from '@/components/Avatar';
import MyIcon from '@/components/Icon';
@@ -26,7 +26,10 @@ import Tag from '@/components/Tag';
import MyMenu from '@/components/MyMenu';
import { useRequest } from '@/hooks/useRequest';
import { useGlobalStore } from '@/store/global';
import { useEditInfo } from '@/hooks/useEditInfo';
import { useEditTitle } from '@/hooks/useEditTitle';
import Papa from 'papaparse';
import { fileDownload } from '@/utils/file';
import { feConfigs } from '@/store/static';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const EditFolderModal = dynamic(() => import('./component/EditFolderModal'), { ssr: false });
@@ -49,7 +52,7 @@ const Kb = () => {
content: ''
});
const { myKbList, loadKbList, setKbList, updateDataset } = useDatasetStore();
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditInfo({
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
const [moveDataId, setMoveDataId] = useState<string>();
@@ -83,7 +86,32 @@ const Kb = () => {
errorToast: t('kb.Delete Dataset Error')
});
const { data, refetch } = useQuery(['loadKbList', parentId], () => {
// export dataset to csv
const { mutate: onclickExport } = useRequest({
mutationFn: (kbId: string) => {
setLoading(true);
return getExportDataList({ kbId });
},
onSuccess(res) {
const text = Papa.unparse({
fields: ['question', 'answer', 'source'],
data: res
});
fileDownload({
text,
type: 'text/csv',
filename: 'dataset.csv'
});
},
onSettled() {
setLoading(false);
},
successToast: `导出成功,下次导出需要 ${feConfigs?.limit?.exportLimitMinutes} 分钟后`,
errorToast: '导出异常'
});
const { data, refetch } = useQuery(['loadDataset', parentId], () => {
return Promise.all([loadKbList(parentId), getKbPaths(parentId)]);
});
@@ -318,6 +346,15 @@ const Kb = () => {
),
onClick: () => setMoveDataId(kb._id)
},
{
child: (
<Flex alignItems={'center'}>
<MyIcon name={'export'} w={'14px'} mr={2} />
{t('Export')}
</Flex>
),
onClick: () => onclickExport(kb._id)
},
{
child: (
<Flex alignItems={'center'}>

View File

@@ -17,6 +17,9 @@ export class GridFSStorage {
this.bucket = bucket;
this.uid = String(uid);
}
Collection() {
return mongoose.connection.db.collection(`${this.bucket}.files`);
}
GridFSBucket() {
return new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
bucketName: this.bucket

View File

@@ -5,7 +5,7 @@ import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice } from '@/service/events/pushBill';
import type { SelectedKbType } from '@/types/plugin';
import type { QuoteItemType } from '@/types/chat';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
type KBSearchProps = {
kbList: SelectedKbType;
@@ -42,7 +42,7 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select kb_id,id,q,a,source,file_id from ${PgTrainingTableName} where kb_id IN (${kbList
select kb_id,id,q,a,source,file_id from ${PgDatasetTableName} where kb_id IN (${kbList
.map((item) => `'${item.kbId}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
vectors[0]

View File

@@ -1,6 +1,6 @@
import { Pool } from 'pg';
import type { QueryResultRow } from 'pg';
import { PgTrainingTableName } from '@/constants/plugin';
import { PgDatasetTableName } from '@/constants/plugin';
import { addLog } from './utils/tools';
import { DatasetItemType } from '@/types/plugin';
@@ -174,7 +174,7 @@ export const insertKbItem = ({
vector: number[];
})[];
}) => {
return PgClient.insert(PgTrainingTableName, {
return PgClient.insert(PgDatasetTableName, {
values: data.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
@@ -192,7 +192,7 @@ export async function initPg() {
await connectPg();
await PgClient.query(`
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS ${PgTrainingTableName} (
CREATE TABLE IF NOT EXISTS ${PgDatasetTableName} (
id BIGSERIAL PRIMARY KEY,
vector VECTOR(1536) NOT NULL,
user_id VARCHAR(50) NOT NULL,
@@ -202,9 +202,9 @@ export async function initPg() {
q TEXT NOT NULL,
a TEXT
);
CREATE INDEX IF NOT EXISTS modelData_userId_index ON ${PgTrainingTableName} USING HASH (user_id);
CREATE INDEX IF NOT EXISTS modelData_kbId_index ON ${PgTrainingTableName} USING HASH (kb_id);
CREATE INDEX IF NOT EXISTS idx_model_data_md5_q_a_user_id_kb_id ON ${PgTrainingTableName} (md5(q), md5(a), user_id, kb_id);
CREATE INDEX IF NOT EXISTS modelData_userId_index ON ${PgDatasetTableName} USING HASH (user_id);
CREATE INDEX IF NOT EXISTS modelData_kbId_index ON ${PgDatasetTableName} USING HASH (kb_id);
CREATE INDEX IF NOT EXISTS idx_model_data_md5_q_a_user_id_kb_id ON ${PgDatasetTableName} (md5(q), md5(a), user_id, kb_id);
`);
console.log('init pg successful');
} catch (error) {

View File

@@ -8,6 +8,7 @@ import { getInitData } from '@/api/system';
import { delay } from '@/utils/tools';
import { FeConfigsType } from '@/types';
export let systemVersion = '0.0.0';
export let chatModelList: ChatModelItemType[] = [];
export let qaModel: QAModelItemType = {
model: 'gpt-3.5-turbo-16k',
@@ -28,6 +29,7 @@ export const clientInitData = async (): Promise<InitDateResponse> => {
qaModel = res.qaModel;
vectorModelList = res.vectorModels;
feConfigs = res.feConfigs;
systemVersion = res.systemVersion;
return res;
} catch (error) {

View File

@@ -2,7 +2,7 @@ import mammoth from 'mammoth';
import Papa from 'papaparse';
import { getOpenAiEncMap } from './plugin/openai';
import { getErrText } from './tools';
import { uploadImg, postUploadFiles } from '@/api/system';
import { uploadImg, postUploadFiles } from '@/api/support/file';
/**
* upload file to mongo gridfs

View File

@@ -0,0 +1,23 @@
---
title: '升级到 V4.4.1'
description: 'FastGPT 从旧版本升级到 V4.4.1 操作指南'
icon: 'upgrade'
draft: false
toc: true
weight: 994
---
## 执行初始化 API
发起 1 个 HTTP 请求(记得携带 `headers.rootkey`,这个值是环境变量里的)
1. https://xxxxx/api/admin/initv441
```bash
curl --location --request POST 'https://{{host}}/api/admin/initv441' \
--header 'rootkey: {{rootkey}}' \
--header 'Content-Type: application/json'
```
会给初始化 Mongo 的 dataset.files将所有数据设置为可用。