mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
4.6.2-alpha (#517)
This commit is contained in:
@@ -20,6 +20,7 @@
|
||||
"@fastgpt/service": "workspace:*",
|
||||
"@fastgpt/web": "workspace:*",
|
||||
"@mozilla/readability": "^0.4.4",
|
||||
"@node-rs/jieba": "^1.7.2",
|
||||
"@tanstack/react-query": "^4.24.10",
|
||||
"@types/nprogress": "^0.2.0",
|
||||
"axios": "^1.5.1",
|
||||
|
@@ -261,6 +261,9 @@
|
||||
"data": {
|
||||
"Edit": "Edit Data",
|
||||
"id": "Data ID"
|
||||
},
|
||||
"test": {
|
||||
"Test Result": "Results"
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
|
@@ -261,6 +261,9 @@
|
||||
"data": {
|
||||
"Edit": "编辑数据",
|
||||
"id": "数据ID"
|
||||
},
|
||||
"test": {
|
||||
"Test Result": "测试结果"
|
||||
}
|
||||
},
|
||||
"module": {
|
||||
|
@@ -342,7 +342,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
|
||||
const items = clipboardData.items;
|
||||
const files = Array.from(items)
|
||||
.map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
|
||||
.filter((item) => item) as File[];
|
||||
.filter(Boolean) as File[];
|
||||
onSelectFile(files);
|
||||
}
|
||||
}}
|
||||
|
@@ -40,7 +40,7 @@ const ResponseTags = ({ responseData = [] }: { responseData?: ChatHistoryItemRes
|
||||
.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode)
|
||||
.map((item) => item.quoteList)
|
||||
.flat()
|
||||
.filter((item) => item) as SearchDataResponseItemType[];
|
||||
.filter(Boolean) as SearchDataResponseItemType[];
|
||||
const sourceList = quoteList.reduce(
|
||||
(acc: Record<string, SearchDataResponseItemType[]>, cur) => {
|
||||
if (!acc[cur.sourceName]) {
|
||||
|
@@ -18,13 +18,6 @@ export type DatasetUpdateParams = {
|
||||
agentModel?: LLMModelItemType;
|
||||
};
|
||||
|
||||
export type SearchTestProps = {
|
||||
datasetId: string;
|
||||
text: string;
|
||||
limit?: number;
|
||||
rerank?: boolean;
|
||||
};
|
||||
|
||||
/* ======= collections =========== */
|
||||
export type GetDatasetCollectionsProps = RequestPaging & {
|
||||
datasetId: string;
|
||||
|
17
projects/app/src/global/core/dataset/api.d.ts
vendored
17
projects/app/src/global/core/dataset/api.d.ts
vendored
@@ -1,6 +1,9 @@
|
||||
import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
|
||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { DatasetDataIndexItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import {
|
||||
DatasetDataIndexItemType,
|
||||
SearchDataResponseItemType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
|
||||
/* ================= dataset ===================== */
|
||||
export type CreateDatasetParams = {
|
||||
@@ -34,3 +37,15 @@ export type UpdateDatasetDataProps = {
|
||||
dataId?: string; // pg data id
|
||||
})[];
|
||||
};
|
||||
|
||||
/* -------------- search ---------------- */
|
||||
export type SearchTestProps = {
|
||||
datasetId: string;
|
||||
text: string;
|
||||
limit?: number;
|
||||
rerank?: boolean;
|
||||
};
|
||||
export type SearchTestResponse = {
|
||||
list: SearchDataResponseItemType[];
|
||||
duration: string;
|
||||
};
|
||||
|
@@ -93,7 +93,7 @@ function App({ Component, pageProps }: AppProps) {
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>{feConfigs?.systemTitle || process.env.SYSTEM_NAME || 'GPT'}</title>
|
||||
<title>{feConfigs?.systemTitle || process.env.SYSTEM_NAME || ''}</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="FastGPT 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!"
|
||||
|
@@ -295,7 +295,7 @@ async function initPgData() {
|
||||
]);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log('column exits');
|
||||
console.log('column exists');
|
||||
}
|
||||
|
||||
const { rows } = await PgClient.query<{ user_id: string }>(`
|
||||
|
@@ -2,18 +2,9 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { delay } from '@/utils/tools';
|
||||
import { PgClient } from '@fastgpt/service/common/pg';
|
||||
import {
|
||||
DatasetDataIndexTypeEnum,
|
||||
PgDatasetTableName
|
||||
} from '@fastgpt/global/core/dataset/constant';
|
||||
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { getUserDefaultTeam } from '@fastgpt/service/support/user/team/controller';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { defaultQAModels } from '@fastgpt/global/core/ai/model';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { jiebaSplit } from '@/service/core/dataset/utils';
|
||||
|
||||
let success = 0;
|
||||
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
|
||||
@@ -22,6 +13,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
const { limit = 50 } = req.body as { limit: number };
|
||||
await authCert({ req, authRoot: true });
|
||||
await connectToDatabase();
|
||||
success = 0;
|
||||
|
||||
console.log(
|
||||
'total',
|
||||
await MongoDatasetData.countDocuments({ fullTextToken: { $exists: false } })
|
||||
);
|
||||
|
||||
await initFullTextToken(limit);
|
||||
|
||||
@@ -37,4 +34,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
});
|
||||
}
|
||||
}
|
||||
export async function initFullTextToken(limit = 50) {}
|
||||
export async function initFullTextToken(limit = 50): Promise<any> {
|
||||
try {
|
||||
const dataList = await MongoDatasetData.find({ fullTextToken: { $exists: false } }, '_id q a')
|
||||
.limit(limit)
|
||||
.lean();
|
||||
if (dataList.length === 0) return;
|
||||
|
||||
const result = await Promise.allSettled(
|
||||
dataList.map((item) => {
|
||||
const text = item.q + (item.a || '');
|
||||
const tokens = jiebaSplit({ text });
|
||||
|
||||
return MongoDatasetData.findByIdAndUpdate(item._id, {
|
||||
$set: {
|
||||
fullTextToken: tokens
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
success += result.filter((item) => item.status === 'fulfilled').length;
|
||||
console.log(`success: ${success}`);
|
||||
return initFullTextToken(limit);
|
||||
} catch (error) {
|
||||
await delay(1000);
|
||||
return initFullTextToken(limit);
|
||||
}
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import type { CreateAppParams } from '@fastgpt/global/core/app/api.d';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { authUserNotVisitor } from '@fastgpt/service/support/permission/auth/user';
|
||||
import { SimpleModeTemplate_FastGPT_Universal } from '@/global/core/app/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
@@ -39,7 +40,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
teamId,
|
||||
tmbId,
|
||||
modules,
|
||||
type
|
||||
type,
|
||||
simpleTemplateId: SimpleModeTemplate_FastGPT_Universal.id
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
|
@@ -41,7 +41,7 @@ function simpleChatTemplate({
|
||||
{
|
||||
moduleId: 'userChatInput',
|
||||
name: '用户问题(对话入口)',
|
||||
logo: '/imgs/module/userChatInput.png',
|
||||
avatar: '/imgs/module/userChatInput.png',
|
||||
flowType: 'questionInput',
|
||||
position: {
|
||||
x: 464.32198615344566,
|
||||
@@ -73,7 +73,7 @@ function simpleChatTemplate({
|
||||
{
|
||||
moduleId: 'history',
|
||||
name: '聊天记录',
|
||||
logo: '/imgs/module/history.png',
|
||||
avatar: '/imgs/module/history.png',
|
||||
flowType: 'historyNode',
|
||||
position: {
|
||||
x: 452.5466249541586,
|
||||
@@ -114,7 +114,7 @@ function simpleChatTemplate({
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
name: 'AI 对话',
|
||||
logo: '/imgs/module/AI.png',
|
||||
avatar: '/imgs/module/AI.png',
|
||||
flowType: 'chatNode',
|
||||
showStatus: true,
|
||||
position: {
|
||||
@@ -284,7 +284,7 @@ function datasetTemplate({
|
||||
{
|
||||
moduleId: 'userChatInput',
|
||||
name: '用户问题(对话入口)',
|
||||
logo: '/imgs/module/userChatInput.png',
|
||||
avatar: '/imgs/module/userChatInput.png',
|
||||
flowType: 'questionInput',
|
||||
position: {
|
||||
x: 464.32198615344566,
|
||||
@@ -320,7 +320,7 @@ function datasetTemplate({
|
||||
{
|
||||
moduleId: 'history',
|
||||
name: '聊天记录',
|
||||
logo: '/imgs/module/history.png',
|
||||
avatar: '/imgs/module/history.png',
|
||||
flowType: 'historyNode',
|
||||
position: {
|
||||
x: 452.5466249541586,
|
||||
@@ -361,7 +361,7 @@ function datasetTemplate({
|
||||
{
|
||||
moduleId: 'datasetSearch',
|
||||
name: '知识库搜索',
|
||||
logo: '/imgs/module/db.png',
|
||||
avatar: '/imgs/module/db.png',
|
||||
flowType: 'datasetSearchNode',
|
||||
showStatus: true,
|
||||
position: {
|
||||
@@ -454,7 +454,7 @@ function datasetTemplate({
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
name: 'AI 对话',
|
||||
logo: '/imgs/module/AI.png',
|
||||
avatar: '/imgs/module/AI.png',
|
||||
flowType: 'chatNode',
|
||||
showStatus: true,
|
||||
position: {
|
||||
|
@@ -15,6 +15,7 @@ import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controll
|
||||
import { authTeamBalance } from '@/service/support/permission/auth/bill';
|
||||
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
|
||||
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
|
||||
import { simpleText } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
@@ -46,8 +47,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
] = await Promise.all([getCollectionWithDataset(collectionId), authTeamBalance(teamId)]);
|
||||
|
||||
// format data
|
||||
const formatQ = q.replace(/\\n/g, '\n').trim().replace(/'/g, '"');
|
||||
const formatA = a?.replace(/\\n/g, '\n').trim().replace(/'/g, '"') || '';
|
||||
const formatQ = simpleText(q);
|
||||
const formatA = simpleText(a);
|
||||
const formatIndexes = indexes?.map((item) => ({
|
||||
...item,
|
||||
text: simpleText(item.text)
|
||||
}));
|
||||
|
||||
// token check
|
||||
const token = countPromptTokens(formatQ, 'system');
|
||||
@@ -72,7 +77,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
q: formatQ,
|
||||
a: formatA,
|
||||
model: vectorModelData.model,
|
||||
indexes
|
||||
indexes: formatIndexes
|
||||
});
|
||||
|
||||
pushGenerateVectorBill({
|
||||
|
@@ -13,6 +13,7 @@ import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
|
||||
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
|
||||
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
|
||||
import { simpleText } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
@@ -72,6 +73,21 @@ export async function pushDataToDatasetCollection({
|
||||
collectionId
|
||||
});
|
||||
|
||||
// format q and a, remove empty char
|
||||
data.forEach((item) => {
|
||||
item.q = simpleText(item.q);
|
||||
item.a = simpleText(item.a);
|
||||
|
||||
item.indexes = item.indexes
|
||||
?.map((index) => {
|
||||
return {
|
||||
...index,
|
||||
text: simpleText(index.text)
|
||||
};
|
||||
})
|
||||
.filter(Boolean);
|
||||
});
|
||||
|
||||
// filter repeat or equal content
|
||||
const set = new Set();
|
||||
const filterResult: Record<string, PushDatasetDataChunkProps[]> = {
|
||||
|
@@ -1,9 +1,8 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { withNextCors } from '@fastgpt/service/common/middle/cors';
|
||||
import type { SearchTestProps } from '@/global/core/api/datasetReq.d';
|
||||
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { authTeamBalance } from '@/service/support/permission/auth/bill';
|
||||
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
|
||||
@@ -22,6 +21,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
// auth dataset role
|
||||
const { dataset, teamId, tmbId, apikey } = await authDataset({
|
||||
req,
|
||||
@@ -61,8 +62,11 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
});
|
||||
}
|
||||
|
||||
jsonRes<SearchDataResponseItemType[]>(res, {
|
||||
data: searchRes
|
||||
jsonRes<SearchTestResponse>(res, {
|
||||
data: {
|
||||
list: searchRes,
|
||||
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
|
@@ -1,17 +1,16 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
|
||||
import { authPluginCrud } from '@fastgpt/service/support/permission/auth/plugin';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
const { id } = req.query as { id: string };
|
||||
await connectToDatabase();
|
||||
await authPluginCrud({ req, authToken: true, id, per: 'r' });
|
||||
const { plugin } = await authPluginCrud({ req, authToken: true, id, per: 'r' });
|
||||
|
||||
jsonRes(res, {
|
||||
data: await MongoPlugin.findOne({ id })
|
||||
data: plugin
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
|
@@ -230,16 +230,13 @@ const Info = (
|
||||
placeholder={'标签,使用空格分割。'}
|
||||
maxLength={30}
|
||||
onChange={(e) => {
|
||||
setValue(
|
||||
'tags',
|
||||
e.target.value.split(' ').filter((item) => item)
|
||||
);
|
||||
setValue('tags', e.target.value.split(' ').filter(Boolean));
|
||||
setRefresh(!refresh);
|
||||
}}
|
||||
/>
|
||||
<Flex w={'100%'} pl={['90px', '160px']} mt={2}>
|
||||
{getValues('tags')
|
||||
.filter((item) => item)
|
||||
.filter(Boolean)
|
||||
.map((item, i) => (
|
||||
<Tag mr={2} mb={2} key={i} whiteSpace={'nowrap'}>
|
||||
{item}
|
||||
|
@@ -16,6 +16,7 @@ import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { feConfigs } from '@/web/common/system/staticData';
|
||||
import { SearchTestResponse } from '../../../../global/core/dataset/api';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||
|
||||
const Test = ({ datasetId }: { datasetId: string }) => {
|
||||
@@ -37,20 +38,21 @@ const Test = ({ datasetId }: { datasetId: string }) => {
|
||||
);
|
||||
|
||||
const { mutate, isLoading } = useRequest({
|
||||
mutationFn: () => postSearchText({ datasetId, text: inputText.trim(), rerank, limit: 20 }),
|
||||
onSuccess(res: SearchDataResponseItemType[]) {
|
||||
if (!res || res.length === 0) {
|
||||
mutationFn: () => postSearchText({ datasetId, text: inputText.trim(), rerank, limit: 30 }),
|
||||
onSuccess(res: SearchTestResponse) {
|
||||
if (!res || res.list.length === 0) {
|
||||
return toast({
|
||||
status: 'warning',
|
||||
title: t('dataset.test.noResult')
|
||||
});
|
||||
}
|
||||
const testItem = {
|
||||
const testItem: SearchTestStoreItemType = {
|
||||
id: nanoid(),
|
||||
datasetId,
|
||||
text: inputText.trim(),
|
||||
time: new Date(),
|
||||
results: res
|
||||
results: res.list,
|
||||
duration: res.duration
|
||||
};
|
||||
pushDatasetTestItem(testItem);
|
||||
setDatasetTestItem(testItem);
|
||||
@@ -176,7 +178,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
|
||||
<>
|
||||
<Flex alignItems={'center'}>
|
||||
<Box fontSize={'3xl'} color={'myGray.600'}>
|
||||
测试结果
|
||||
{t('core.dataset.test.Test Result')}
|
||||
</Box>
|
||||
<MyTooltip
|
||||
label={
|
||||
@@ -185,12 +187,13 @@ const Test = ({ datasetId }: { datasetId: string }) => {
|
||||
forceShow
|
||||
>
|
||||
<QuestionOutlineIcon
|
||||
ml={2}
|
||||
mx={2}
|
||||
color={'myGray.600'}
|
||||
cursor={'pointer'}
|
||||
fontSize={'lg'}
|
||||
/>
|
||||
</MyTooltip>
|
||||
<Box>({datasetTestItem.duration})</Box>
|
||||
</Flex>
|
||||
<Grid
|
||||
mt={1}
|
||||
|
@@ -178,7 +178,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
|
||||
<Flex mt={2} flexWrap={'wrap'}>
|
||||
{getValues('tags')
|
||||
.split(' ')
|
||||
.filter((item) => item)
|
||||
.filter(Boolean)
|
||||
.map((item, i) => (
|
||||
<Tag mr={2} mb={2} key={i} whiteSpace={'nowrap'}>
|
||||
{item}
|
||||
|
@@ -390,13 +390,11 @@ const Kb = () => {
|
||||
</Flex>
|
||||
<Box flex={'1 0 0'} overflow={'hidden'} pt={2}>
|
||||
<Flex>
|
||||
{dataset.tags
|
||||
.filter((item) => item)
|
||||
.map((tag, i) => (
|
||||
<Tag key={i} mr={2} mb={2}>
|
||||
{tag}
|
||||
</Tag>
|
||||
))}
|
||||
{dataset.tags.filter(Boolean).map((tag, i) => (
|
||||
<Tag key={i} mr={2} mb={2}>
|
||||
{tag}
|
||||
</Tag>
|
||||
))}
|
||||
</Flex>
|
||||
</Box>
|
||||
<Flex alignItems={'center'} fontSize={'sm'}>
|
||||
|
@@ -34,7 +34,7 @@ const PreviewPlugin = ({
|
||||
item: {
|
||||
moduleId: 'plugin',
|
||||
flowType: FlowNodeTypeEnum.pluginModule,
|
||||
logo: plugin.avatar,
|
||||
avatar: plugin.avatar,
|
||||
name: plugin.name,
|
||||
intro: plugin.intro,
|
||||
...formatPluginToPreviewModule(plugin._id, modules)
|
||||
|
@@ -48,27 +48,32 @@ const Render = ({ pluginId }: Props) => {
|
||||
return copyTemplates;
|
||||
}, [nodes]);
|
||||
|
||||
const { data } = useQuery(['getOnePlugin', pluginId], () => getOnePlugin(pluginId), {
|
||||
onError: (error) => {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: getErrText(error, t('plugin.Load Plugin Failed'))
|
||||
});
|
||||
router.replace('/plugin/list');
|
||||
const { data: pluginDetail } = useQuery(
|
||||
['getOnePlugin', pluginId],
|
||||
() => getOnePlugin(pluginId),
|
||||
{
|
||||
onError: (error) => {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: getErrText(error, t('plugin.Load Plugin Failed'))
|
||||
});
|
||||
router.replace('/plugin/list');
|
||||
}
|
||||
}
|
||||
});
|
||||
);
|
||||
console.log(pluginDetail);
|
||||
|
||||
useQuery(['getPlugTemplates'], () => loadPluginTemplates());
|
||||
const filterPlugins = useMemo(() => {
|
||||
return pluginModuleTemplates.filter((item) => item.id !== pluginId);
|
||||
}, [pluginId, pluginModuleTemplates]);
|
||||
|
||||
return data ? (
|
||||
return pluginDetail ? (
|
||||
<Flow
|
||||
systemTemplates={filterTemplates}
|
||||
pluginTemplates={filterPlugins}
|
||||
modules={data?.modules || []}
|
||||
Header={<Header plugin={data} onClose={() => router.back()} />}
|
||||
modules={pluginDetail?.modules || []}
|
||||
Header={<Header plugin={pluginDetail} onClose={() => router.back()} />}
|
||||
/>
|
||||
) : (
|
||||
<Loading />
|
||||
|
@@ -24,17 +24,81 @@ import MyModal from '@/components/MyModal';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { useConfirm } from '@/web/common/hooks/useConfirm';
|
||||
import MyIcon from '@/components/Icon';
|
||||
import { CreateOnePluginParams } from '@fastgpt/global/core/plugin/controller';
|
||||
|
||||
export type FormType = {
|
||||
export type FormType = CreateOnePluginParams & {
|
||||
id?: string;
|
||||
avatar: string;
|
||||
name: string;
|
||||
intro: string;
|
||||
};
|
||||
export const defaultForm = {
|
||||
export const defaultForm: FormType = {
|
||||
avatar: '/icon/logo.svg',
|
||||
name: '',
|
||||
intro: ''
|
||||
intro: '',
|
||||
modules: [
|
||||
{
|
||||
moduleId: 'w90mfp',
|
||||
name: '定义插件输入',
|
||||
avatar: '/imgs/module/input.png',
|
||||
flowType: 'pluginInput',
|
||||
showStatus: false,
|
||||
position: {
|
||||
x: 616.4226348688949,
|
||||
y: -165.05298493910115
|
||||
},
|
||||
inputs: [
|
||||
{
|
||||
key: 'question',
|
||||
valueType: 'string',
|
||||
type: 'target',
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
edit: true,
|
||||
connected: false
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'question',
|
||||
valueType: 'string',
|
||||
label: '用户问题',
|
||||
type: 'source',
|
||||
edit: true,
|
||||
targets: []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
moduleId: 'tze1ju',
|
||||
name: '定义插件输出',
|
||||
avatar: '/imgs/module/output.png',
|
||||
flowType: 'pluginOutput',
|
||||
showStatus: false,
|
||||
position: {
|
||||
x: 1607.7142331269126,
|
||||
y: -151.8669210746189
|
||||
},
|
||||
inputs: [
|
||||
{
|
||||
key: 'answer',
|
||||
type: 'target',
|
||||
valueType: 'string',
|
||||
label: '答案',
|
||||
required: true,
|
||||
edit: true,
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'answer',
|
||||
valueType: 'string',
|
||||
label: '答案',
|
||||
type: 'source',
|
||||
edit: true,
|
||||
targets: []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const CreateModal = ({
|
||||
|
@@ -8,5 +8,5 @@ export const getChatModelNameListByModules = (modules: ModuleItemType[]): string
|
||||
const model = item.inputs.find((input) => input.key === 'model')?.value;
|
||||
return global.chatModels.find((item) => item.model === model)?.name || '';
|
||||
})
|
||||
.filter((item) => item);
|
||||
.filter(Boolean);
|
||||
};
|
||||
|
@@ -8,6 +8,7 @@ import { deletePgDataById, insertData2Pg, updatePgDataById } from './pg';
|
||||
import { Types } from 'mongoose';
|
||||
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
|
||||
import { jiebaSplit } from '../utils';
|
||||
|
||||
/* insert data.
|
||||
* 1. create data id
|
||||
@@ -34,9 +35,6 @@ export async function insertData2Dataset({
|
||||
return Promise.reject("teamId and tmbId can't be the same");
|
||||
}
|
||||
|
||||
q = q.trim();
|
||||
a = a.trim();
|
||||
|
||||
const id = new Types.ObjectId();
|
||||
const qaStr = `${q}\n${a}`.trim();
|
||||
|
||||
@@ -74,6 +72,7 @@ export async function insertData2Dataset({
|
||||
collectionId,
|
||||
q,
|
||||
a,
|
||||
fullTextToken: jiebaSplit({ text: q + a }),
|
||||
indexes: indexes.map((item, i) => ({
|
||||
...item,
|
||||
dataId: result[i].insertId
|
||||
@@ -203,6 +202,7 @@ export async function updateData2Dataset({
|
||||
// update mongo
|
||||
mongoData.q = q || mongoData.q;
|
||||
mongoData.a = a ?? mongoData.a;
|
||||
mongoData.fullTextToken = jiebaSplit({ text: mongoData.q + mongoData.a });
|
||||
// @ts-ignore
|
||||
mongoData.indexes = indexes;
|
||||
await mongoData.save();
|
||||
|
@@ -1,5 +1,8 @@
|
||||
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import type {
|
||||
DatasetDataWithCollectionType,
|
||||
SearchDataResponseItemType
|
||||
} from '@fastgpt/global/core/dataset/type.d';
|
||||
import { PgClient } from '@fastgpt/service/common/pg';
|
||||
import { getVectorsByText } from '@/service/core/ai/vector';
|
||||
import { delay } from '@/utils/tools';
|
||||
@@ -8,6 +11,7 @@ import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { POST } from '@fastgpt/service/common/api/plusRequest';
|
||||
import { PostReRankResponse } from '@fastgpt/global/core/ai/api';
|
||||
import { jiebaSplit } from '../utils';
|
||||
|
||||
export async function insertData2Pg({
|
||||
mongoDataId,
|
||||
@@ -125,39 +129,100 @@ export async function deletePgDataById(
|
||||
};
|
||||
}
|
||||
|
||||
// search
|
||||
export async function searchDatasetData({
|
||||
text,
|
||||
model,
|
||||
similarity = 0,
|
||||
limit,
|
||||
datasetIds = [],
|
||||
rerank = false
|
||||
}: {
|
||||
// ------------------ search start ------------------
|
||||
type SearchProps = {
|
||||
text: string;
|
||||
model: string;
|
||||
similarity?: number; // min distance
|
||||
limit: number;
|
||||
datasetIds: string[];
|
||||
rerank?: boolean;
|
||||
}) {
|
||||
};
|
||||
export async function searchDatasetData(props: SearchProps) {
|
||||
const { text, similarity = 0, limit, rerank = false } = props;
|
||||
|
||||
const [{ tokenLen, embeddingRecallResults }, { fullTextRecallResults }] = await Promise.all([
|
||||
embeddingRecall({
|
||||
...props,
|
||||
limit: rerank ? Math.max(50, limit * 3) : limit * 2
|
||||
}),
|
||||
fullTextRecall({
|
||||
...props,
|
||||
limit: 40
|
||||
})
|
||||
]);
|
||||
|
||||
// concat recall result
|
||||
let set = new Set<string>();
|
||||
const concatRecallResults = embeddingRecallResults;
|
||||
for (const item of fullTextRecallResults) {
|
||||
if (!set.has(item.id)) {
|
||||
concatRecallResults.push(item);
|
||||
set.add(item.id);
|
||||
}
|
||||
}
|
||||
|
||||
// remove same q and a data
|
||||
set = new Set<string>();
|
||||
const filterSameDataResults = concatRecallResults.filter((item) => {
|
||||
const str = `${item.q}${item.a}`.trim();
|
||||
if (set.has(str)) return false;
|
||||
set.add(str);
|
||||
return true;
|
||||
});
|
||||
|
||||
if (!rerank) {
|
||||
return {
|
||||
searchRes: filterSameDataResults.slice(0, limit),
|
||||
tokenLen
|
||||
};
|
||||
}
|
||||
|
||||
// ReRank result
|
||||
const reRankResults = await reRankSearchResult({
|
||||
query: text,
|
||||
data: filterSameDataResults
|
||||
});
|
||||
|
||||
// similarity filter
|
||||
const filterReRankResults = reRankResults.filter((item) => item.score > similarity);
|
||||
|
||||
// concat rerank and embedding data
|
||||
set = new Set<string>(filterReRankResults.map((item) => item.id));
|
||||
const concatResult = filterReRankResults.concat(
|
||||
filterSameDataResults.filter((item) => {
|
||||
if (set.has(item.id)) return false;
|
||||
set.add(item.id);
|
||||
return true;
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
searchRes: concatResult.slice(0, limit),
|
||||
tokenLen
|
||||
};
|
||||
}
|
||||
export async function embeddingRecall({
|
||||
text,
|
||||
model,
|
||||
similarity = 0,
|
||||
limit,
|
||||
datasetIds = [],
|
||||
rerank = false
|
||||
}: SearchProps) {
|
||||
const { vectors, tokenLen } = await getVectorsByText({
|
||||
model,
|
||||
input: [text]
|
||||
});
|
||||
|
||||
const minLimit = global.systemEnv.pluginBaseUrl ? Math.max(50, limit * 4) : limit * 2;
|
||||
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL hnsw.ef_search = ${global.systemEnv.pgHNSWEfSearch || 100};
|
||||
select id, collection_id, data_id, (vector <#> '[${
|
||||
vectors[0]
|
||||
}]') * -1 AS score from ${PgDatasetTableName}
|
||||
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')}) AND vector <#> '[${
|
||||
vectors[0]
|
||||
}]' < -${similarity}
|
||||
order by score desc limit ${minLimit};
|
||||
select id, collection_id, data_id, (vector <#> '[${vectors[0]}]') * -1 AS score
|
||||
from ${PgDatasetTableName}
|
||||
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
|
||||
${rerank ? '' : `AND vector <#> '[${vectors[0]}]' < -${similarity}`}
|
||||
order by score desc limit ${limit};
|
||||
COMMIT;`
|
||||
);
|
||||
|
||||
@@ -212,47 +277,54 @@ export async function searchDatasetData({
|
||||
})
|
||||
.filter((item) => item !== null) as SearchDataResponseItemType[];
|
||||
|
||||
// remove same q and a data
|
||||
set = new Set<string>();
|
||||
const filterData = formatResult.filter((item) => {
|
||||
const str = `${item.q}${item.a}`.trim();
|
||||
if (set.has(str)) return false;
|
||||
set.add(str);
|
||||
return true;
|
||||
});
|
||||
|
||||
if (!rerank) {
|
||||
return {
|
||||
searchRes: filterData.slice(0, limit),
|
||||
tokenLen
|
||||
};
|
||||
}
|
||||
|
||||
// ReRank result
|
||||
const reRankResult = await reRankSearchResult({
|
||||
query: text,
|
||||
data: filterData
|
||||
});
|
||||
|
||||
// similarity filter
|
||||
const filterReRankResult = reRankResult.filter((item) => item.score > similarity);
|
||||
|
||||
// concat rerank and embedding data
|
||||
set = new Set<string>(filterReRankResult.map((item) => item.id));
|
||||
const concatResult = filterReRankResult.concat(
|
||||
filterData.filter((item) => {
|
||||
if (set.has(item.id)) return false;
|
||||
set.add(item.id);
|
||||
return true;
|
||||
})
|
||||
);
|
||||
|
||||
return {
|
||||
searchRes: concatResult.slice(0, limit),
|
||||
embeddingRecallResults: formatResult,
|
||||
tokenLen
|
||||
};
|
||||
}
|
||||
export async function fullTextRecall({
|
||||
text,
|
||||
limit,
|
||||
datasetIds = [],
|
||||
rerank = false
|
||||
}: SearchProps): Promise<{
|
||||
fullTextRecallResults: SearchDataResponseItemType[];
|
||||
tokenLen: number;
|
||||
}> {
|
||||
if (!rerank) {
|
||||
return {
|
||||
fullTextRecallResults: [],
|
||||
tokenLen: 0
|
||||
};
|
||||
}
|
||||
|
||||
const result = (await MongoDatasetData.find(
|
||||
{
|
||||
datasetId: { $in: datasetIds.map((item) => item) },
|
||||
$text: { $search: jiebaSplit({ text }) }
|
||||
},
|
||||
{ score: { $meta: 'textScore' } }
|
||||
)
|
||||
.sort({ score: { $meta: 'textScore' } })
|
||||
.limit(limit)
|
||||
.populate('collectionId')
|
||||
.lean()) as DatasetDataWithCollectionType[];
|
||||
|
||||
return {
|
||||
fullTextRecallResults: result.map((item) => ({
|
||||
id: String(item._id),
|
||||
datasetId: String(item.datasetId),
|
||||
collectionId: String(item.collectionId._id),
|
||||
sourceName: item.collectionId.name || '',
|
||||
sourceId: item.collectionId.metadata?.fileId || item.collectionId.metadata?.rawLink,
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
indexes: item.indexes,
|
||||
score: 1
|
||||
})),
|
||||
tokenLen: 0
|
||||
};
|
||||
}
|
||||
// plus reRank search result
|
||||
export async function reRankSearchResult({
|
||||
data,
|
||||
@@ -279,7 +351,7 @@ export async function reRankSearchResult({
|
||||
score: item.score ?? target.score
|
||||
};
|
||||
})
|
||||
.filter((item) => item) as SearchDataResponseItemType[];
|
||||
.filter(Boolean) as SearchDataResponseItemType[];
|
||||
|
||||
return mergeResult;
|
||||
} catch (error) {
|
||||
@@ -288,3 +360,4 @@ export async function reRankSearchResult({
|
||||
return data;
|
||||
}
|
||||
}
|
||||
// ------------------ search end ------------------
|
||||
|
34
projects/app/src/service/core/dataset/utils.ts
Normal file
34
projects/app/src/service/core/dataset/utils.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { cut, extract } from '@node-rs/jieba';
|
||||
|
||||
/**
|
||||
* Same value judgment
|
||||
*/
|
||||
export async function hasSameValue({
|
||||
collectionId,
|
||||
q,
|
||||
a = ''
|
||||
}: {
|
||||
collectionId: string;
|
||||
q: string;
|
||||
a?: string;
|
||||
}) {
|
||||
const count = await MongoDatasetData.countDocuments({
|
||||
q,
|
||||
a,
|
||||
collectionId
|
||||
});
|
||||
|
||||
if (count > 0) {
|
||||
return Promise.reject('已经存在完全一致的数据');
|
||||
}
|
||||
}
|
||||
|
||||
export function jiebaSplit({ text }: { text: string }) {
|
||||
const tokens = cut(text, true);
|
||||
|
||||
return tokens
|
||||
.map((item) => item.replace(/[^\u4e00-\u9fa5a-zA-Z0-9\s]/g, '').trim())
|
||||
.filter(Boolean)
|
||||
.join(' ');
|
||||
}
|
@@ -13,8 +13,15 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { authTeamBalance } from '../support/permission/auth/bill';
|
||||
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
|
||||
|
||||
const reduceQueue = () => {
|
||||
const reduceQueue = (retry = false) => {
|
||||
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
|
||||
if (global.qaQueueLen === 0 && retry) {
|
||||
setTimeout(() => {
|
||||
generateQA();
|
||||
}, 60000);
|
||||
}
|
||||
|
||||
return global.vectorQueueLen === 0;
|
||||
};
|
||||
|
||||
export async function generateQA(): Promise<any> {
|
||||
@@ -32,7 +39,7 @@ export async function generateQA(): Promise<any> {
|
||||
const data = await MongoDatasetTraining.findOneAndUpdate(
|
||||
{
|
||||
mode: TrainingModeEnum.qa,
|
||||
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
|
||||
lockTime: { $lte: new Date(Date.now() - 6 * 60 * 1000) }
|
||||
},
|
||||
{
|
||||
lockTime: new Date()
|
||||
@@ -70,12 +77,13 @@ export async function generateQA(): Promise<any> {
|
||||
}
|
||||
})();
|
||||
|
||||
if (done) {
|
||||
reduceQueue();
|
||||
global.vectorQueueLen <= 0 && console.log(`【QA】Task Done`);
|
||||
if (done || !data) {
|
||||
if (reduceQueue()) {
|
||||
console.log(`【QA】Task Done`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (error || !data) {
|
||||
if (error) {
|
||||
reduceQueue();
|
||||
return generateQA();
|
||||
}
|
||||
@@ -171,7 +179,7 @@ export async function generateQA(): Promise<any> {
|
||||
reduceQueue();
|
||||
generateQA();
|
||||
} catch (err: any) {
|
||||
reduceQueue();
|
||||
reduceQueue(true);
|
||||
// log
|
||||
if (err?.response) {
|
||||
addLog.info('openai error: 生成QA错误', {
|
||||
|
@@ -7,8 +7,16 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { authTeamBalance } from '@/service/support/permission/auth/bill';
|
||||
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
|
||||
|
||||
const reduceQueue = () => {
|
||||
const reduceQueue = (retry = false) => {
|
||||
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
|
||||
|
||||
if (global.vectorQueueLen === 0 && retry) {
|
||||
setTimeout(() => {
|
||||
generateVector();
|
||||
}, 60000);
|
||||
}
|
||||
|
||||
return global.vectorQueueLen === 0;
|
||||
};
|
||||
|
||||
/* 索引生成队列。每导入一次,就是一个单独的线程 */
|
||||
@@ -57,8 +65,8 @@ export async function generateVector(): Promise<any> {
|
||||
return {
|
||||
data,
|
||||
dataItem: {
|
||||
q: data.q.replace(/[\x00-\x08]/g, ' '),
|
||||
a: data.a?.replace(/[\x00-\x08]/g, ' ') || '',
|
||||
q: data.q,
|
||||
a: data.a || '',
|
||||
indexes: data.indexes
|
||||
}
|
||||
};
|
||||
@@ -70,12 +78,13 @@ export async function generateVector(): Promise<any> {
|
||||
}
|
||||
})();
|
||||
|
||||
if (done) {
|
||||
reduceQueue();
|
||||
global.vectorQueueLen <= 0 && console.log(`【index】Task done`);
|
||||
if (done || !data) {
|
||||
if (reduceQueue()) {
|
||||
console.log(`【index】Task done`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (error || !data) {
|
||||
if (error) {
|
||||
reduceQueue();
|
||||
return generateVector();
|
||||
}
|
||||
@@ -108,8 +117,15 @@ export async function generateVector(): Promise<any> {
|
||||
}
|
||||
|
||||
// create vector and insert
|
||||
|
||||
try {
|
||||
// invalid data
|
||||
if (!data.q.trim()) {
|
||||
await MongoDatasetTraining.findByIdAndDelete(data._id);
|
||||
reduceQueue();
|
||||
generateVector();
|
||||
return;
|
||||
}
|
||||
|
||||
// insert data to pg
|
||||
const { tokenLen } = await insertData2Dataset({
|
||||
teamId: data.teamId,
|
||||
@@ -135,7 +151,7 @@ export async function generateVector(): Promise<any> {
|
||||
reduceQueue();
|
||||
generateVector();
|
||||
} catch (err: any) {
|
||||
reduceQueue();
|
||||
reduceQueue(true);
|
||||
// log
|
||||
if (err?.response) {
|
||||
addLog.info('openai error: 生成向量错误', {
|
||||
|
@@ -2,7 +2,7 @@ const decoder = new TextDecoder();
|
||||
|
||||
export const parseStreamChunk = (value: BufferSource) => {
|
||||
const chunk = decoder.decode(value);
|
||||
const chunkLines = chunk.split('\n\n').filter((item) => item);
|
||||
const chunkLines = chunk.split('\n\n').filter(Boolean);
|
||||
const chunkResponse = chunkLines.map((item) => {
|
||||
const splitEvent = item.split('\n');
|
||||
if (splitEvent.length === 2) {
|
||||
|
@@ -3,12 +3,12 @@ import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder
|
||||
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import type {
|
||||
DatasetUpdateParams,
|
||||
SearchTestProps,
|
||||
GetDatasetCollectionsProps,
|
||||
GetDatasetDataListProps,
|
||||
CreateDatasetCollectionParams,
|
||||
UpdateDatasetCollectionParams
|
||||
} from '@/global/core/api/datasetReq.d';
|
||||
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
|
||||
import type {
|
||||
PushDatasetDataProps,
|
||||
UpdateDatasetDataProps,
|
||||
@@ -21,8 +21,6 @@ import type {
|
||||
SearchDataResponseItemType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { getToken } from '@/web/support/user/auth';
|
||||
import download from 'downloadjs';
|
||||
import type { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
|
||||
import { PagingData } from '@/types';
|
||||
@@ -53,7 +51,7 @@ export const getCheckExportLimit = (datasetId: string) =>
|
||||
|
||||
/* =========== search test ============ */
|
||||
export const postSearchText = (data: SearchTestProps) =>
|
||||
POST<SearchDataResponseItemType[]>(`/core/dataset/searchTest`, data);
|
||||
POST<SearchTestResponse>(`/core/dataset/searchTest`, data);
|
||||
|
||||
/* ============================= collections ==================================== */
|
||||
export const getDatasetCollections = (data: GetDatasetCollectionsProps) =>
|
||||
|
@@ -8,6 +8,7 @@ export type SearchTestStoreItemType = {
|
||||
datasetId: string;
|
||||
text: string;
|
||||
time: Date;
|
||||
duration: string;
|
||||
results: SearchDataResponseItemType[];
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user