mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
V4.8.16 dev (#3431)
* feat: add feishu & yuque dataset (#3379) * feat: add feishu & yuque dataset * fix ts * fix ts * move type position * fix * fix: merge interface * fix * feat: dingtalk sso support (#3408) * fix: optional sso state * feat: dingtalk bot * feat: dingtalk sso login * chore: move i18n to user namespace * feat: dingtalk bot integration (#3415) * feat: dingtalk bot integration * docs: config dingtalk bot * feat:sear XNG服务 (#3413) * feat:sear XNG服务 * 补充了courseUrl * 添加了官方文档 * 错误时返回情况修正了一下 * Tracks (#3420) * feat: node intro * feat: add domain track * dingding sso login * perf: api dataset code and add doc * feat: tracks * feat: searXNG plugins * fix: ts * feat: delete node tracks (#3423) * fix: dingtalk bot GET verification (#3424) * 4.8.16 test: fix: plugin inputs render;fix: ui offset (#3426) * fix: ui offset * perf: dingding talk * fix: plugin inputs render * feat: menu all folder (#3429) * fix: recall code --------- Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com> Co-authored-by: Jiangween <145003935+Jiangween@users.noreply.github.com>
This commit is contained in:
@@ -7,8 +7,9 @@ import { TextSplitProps, splitText2Chunks } from '@fastgpt/global/common/string/
|
||||
import axios from 'axios';
|
||||
import { readRawContentByFileBuffer } from '../../common/file/read/utils';
|
||||
import { parseFileExtensionFromUrl } from '@fastgpt/global/common/string/tools';
|
||||
import { APIFileServer } from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { APIFileServer, FeishuServer, YuqueServer } from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { useApiDatasetRequest } from './apiDataset/api';
|
||||
import { POST } from '../../common/api/plusRequest';
|
||||
|
||||
export const readFileRawTextByUrl = async ({
|
||||
teamId,
|
||||
@@ -53,7 +54,9 @@ export const readDatasetSourceRawText = async ({
|
||||
isQAImport,
|
||||
selector,
|
||||
externalFileId,
|
||||
apiServer
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer
|
||||
}: {
|
||||
teamId: string;
|
||||
type: DatasetSourceReadTypeEnum;
|
||||
@@ -63,6 +66,8 @@ export const readDatasetSourceRawText = async ({
|
||||
selector?: string; // link selector
|
||||
externalFileId?: string; // external file dataset
|
||||
apiServer?: APIFileServer; // api dataset
|
||||
feishuServer?: FeishuServer; // feishu dataset
|
||||
yuqueServer?: YuqueServer; // yuque dataset
|
||||
}): Promise<string> => {
|
||||
if (type === DatasetSourceReadTypeEnum.fileLocal) {
|
||||
const { rawText } = await readFileContentFromMongo({
|
||||
@@ -88,28 +93,45 @@ export const readDatasetSourceRawText = async ({
|
||||
});
|
||||
return rawText;
|
||||
} else if (type === DatasetSourceReadTypeEnum.apiFile) {
|
||||
if (!apiServer) return Promise.reject('apiServer not found');
|
||||
const rawText = await readApiServerFileContent({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiFileId: sourceId,
|
||||
teamId
|
||||
});
|
||||
return rawText;
|
||||
}
|
||||
|
||||
return '';
|
||||
};
|
||||
|
||||
export const readApiServerFileContent = async ({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiFileId,
|
||||
teamId
|
||||
}: {
|
||||
apiServer: APIFileServer;
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
yuqueServer?: YuqueServer;
|
||||
apiFileId: string;
|
||||
teamId: string;
|
||||
}) => {
|
||||
return useApiDatasetRequest({ apiServer }).getFileContent({ teamId, apiFileId });
|
||||
if (apiServer) {
|
||||
return useApiDatasetRequest({ apiServer }).getFileContent({ teamId, apiFileId });
|
||||
}
|
||||
|
||||
if (feishuServer || yuqueServer) {
|
||||
return POST<string>(`/core/dataset/systemApiDataset`, {
|
||||
type: 'content',
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
apiFileId
|
||||
});
|
||||
}
|
||||
|
||||
return Promise.reject('No apiServer or feishuServer or yuqueServer');
|
||||
};
|
||||
|
||||
export const rawText2Chunks = ({
|
||||
|
Reference in New Issue
Block a user