mirror of
https://github.com/labring/FastGPT.git
synced 2025-12-24 02:01:45 +08:00
4.7.1 production (#1173)
Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
---
|
||||
title: 'V4.7.1(进行中)'
|
||||
title: 'V4.7.1(需要初始化)'
|
||||
description: 'FastGPT V4.7.1 更新说明'
|
||||
icon: 'upgrade'
|
||||
draft: false
|
||||
@@ -32,8 +32,10 @@ curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
|
||||
3. 新增 - 集成 Laf 云函数,可以读取 Laf 账号中的云函数作为 HTTP 模块。
|
||||
4. 新增 - 定时器,清理垃圾数据。(采用小范围清理,会清理最近n个小时的,所以请保证服务持续运行,长时间不允许,可以继续执行 clearInvalidData 的接口进行全量清理。)
|
||||
5. 商业版新增 - 后台配置系统通知。
|
||||
6. 修改 - csv导入模板,取消 header 校验,自动获取前两列。
|
||||
7. 修复 - 工具调用模块连线数据类型校验错误。
|
||||
8. 修复 - 自定义索引输入时,解构数据失败。
|
||||
9. 修复 - rerank 模型数据格式。
|
||||
10. 修复 - 问题补全历史记录BUG
|
||||
6. 优化 - 支持ip模式导出知识库。
|
||||
7. 修改 - csv导入模板,取消 header 校验,自动获取前两列。
|
||||
8. 修复 - 工具调用模块连线数据类型校验错误。
|
||||
9. 修复 - 自定义索引输入时,解构数据失败。
|
||||
10. 修复 - rerank 模型数据格式。
|
||||
11. 修复 - 问题补全历史记录BUG
|
||||
12. 修复 - 分享页面特殊情况下加载缓慢问题(由于ssr时候数据库不会触发连接)
|
||||
@@ -10,6 +10,6 @@ export const formatFileSize = (bytes: number): string => {
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
};
|
||||
|
||||
export const detectFileEncoding = (buffers: string | Buffer) => {
|
||||
return (detect(buffers)?.encoding || 'utf-8') as BufferEncoding;
|
||||
export const detectFileEncoding = (buffer: Buffer) => {
|
||||
return detect(buffer.slice(0, 200))?.encoding?.toLocaleLowerCase();
|
||||
};
|
||||
|
||||
@@ -9,6 +9,7 @@ import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
import { ReadFileByBufferParams } from '../read/type';
|
||||
import { MongoRwaTextBuffer } from '../../buffer/rawText/schema';
|
||||
import { readFileRawContent } from '../read/utils';
|
||||
import { PassThrough } from 'stream';
|
||||
|
||||
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
|
||||
MongoFileSchema;
|
||||
@@ -113,31 +114,39 @@ export async function getDownloadStream({
|
||||
fileId: string;
|
||||
}) {
|
||||
const bucket = getGridBucket(bucketName);
|
||||
const stream = bucket.openDownloadStream(new Types.ObjectId(fileId));
|
||||
const copyStream = stream.pipe(new PassThrough());
|
||||
|
||||
return bucket.openDownloadStream(new Types.ObjectId(fileId));
|
||||
}
|
||||
|
||||
export const readFileEncode = async ({
|
||||
bucketName,
|
||||
fileId
|
||||
}: {
|
||||
bucketName: `${BucketNameEnum}`;
|
||||
fileId: string;
|
||||
}) => {
|
||||
const encodeStream = await getDownloadStream({ bucketName, fileId });
|
||||
let buffers: Buffer = Buffer.from([]);
|
||||
for await (const chunk of encodeStream) {
|
||||
buffers = Buffer.concat([buffers, chunk]);
|
||||
if (buffers.length > 10) {
|
||||
encodeStream.abort();
|
||||
break;
|
||||
/* get encoding */
|
||||
const buffer = await (() => {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let tmpBuffer: Buffer = Buffer.from([]);
|
||||
|
||||
stream.on('data', (chunk) => {
|
||||
if (tmpBuffer.length < 20) {
|
||||
tmpBuffer = Buffer.concat([tmpBuffer, chunk]);
|
||||
}
|
||||
if (tmpBuffer.length >= 20) {
|
||||
resolve(tmpBuffer);
|
||||
}
|
||||
});
|
||||
stream.on('end', () => {
|
||||
resolve(tmpBuffer);
|
||||
});
|
||||
stream.on('error', (err) => {
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
})();
|
||||
|
||||
const encoding = detectFileEncoding(buffers);
|
||||
const encoding = detectFileEncoding(buffer);
|
||||
|
||||
return encoding as BufferEncoding;
|
||||
return {
|
||||
fileStream: copyStream,
|
||||
encoding
|
||||
// encoding: 'utf-8'
|
||||
};
|
||||
}
|
||||
|
||||
export const readFileContentFromMongo = async ({
|
||||
teamId,
|
||||
@@ -162,9 +171,8 @@ export const readFileContentFromMongo = async ({
|
||||
};
|
||||
}
|
||||
|
||||
const [file, encoding, fileStream] = await Promise.all([
|
||||
const [file, { encoding, fileStream }] = await Promise.all([
|
||||
getFileById({ bucketName, fileId }),
|
||||
readFileEncode({ bucketName, fileId }),
|
||||
getDownloadStream({ bucketName, fileId })
|
||||
]);
|
||||
|
||||
@@ -176,12 +184,12 @@ export const readFileContentFromMongo = async ({
|
||||
|
||||
const fileBuffers = await (() => {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let buffers = Buffer.from([]);
|
||||
let buffer = Buffer.from([]);
|
||||
fileStream.on('data', (chunk) => {
|
||||
buffers = Buffer.concat([buffers, chunk]);
|
||||
buffer = Buffer.concat([buffer, chunk]);
|
||||
});
|
||||
fileStream.on('end', () => {
|
||||
resolve(buffers);
|
||||
resolve(buffer);
|
||||
});
|
||||
fileStream.on('error', (err) => {
|
||||
reject(err);
|
||||
|
||||
@@ -6,7 +6,11 @@ export const readPptxRawText = async ({
|
||||
buffer,
|
||||
encoding
|
||||
}: ReadFileByBufferParams): Promise<ReadFileResponse> => {
|
||||
const result = await parseOffice({ buffer, encoding, extension: 'pptx' });
|
||||
const result = await parseOffice({
|
||||
buffer,
|
||||
encoding: encoding as BufferEncoding,
|
||||
extension: 'pptx'
|
||||
});
|
||||
|
||||
return {
|
||||
rawText: result
|
||||
|
||||
@@ -1,8 +1,26 @@
|
||||
import { ReadFileByBufferParams, ReadFileResponse } from './type.d';
|
||||
import iconv from 'iconv-lite';
|
||||
|
||||
const rawEncodingList = [
|
||||
'ascii',
|
||||
'utf8',
|
||||
'utf-8',
|
||||
'utf16le',
|
||||
'utf-16le',
|
||||
'ucs2',
|
||||
'ucs-2',
|
||||
'base64',
|
||||
'base64url',
|
||||
'latin1',
|
||||
'binary',
|
||||
'hex'
|
||||
];
|
||||
|
||||
// 加载源文件内容
|
||||
export const readFileRawText = ({ buffer, encoding }: ReadFileByBufferParams): ReadFileResponse => {
|
||||
const content = buffer.toString(encoding);
|
||||
const content = rawEncodingList.includes(encoding)
|
||||
? buffer.toString(encoding as BufferEncoding)
|
||||
: iconv.decode(buffer, 'gbk');
|
||||
|
||||
return {
|
||||
rawText: content
|
||||
|
||||
2
packages/service/common/file/read/type.d.ts
vendored
2
packages/service/common/file/read/type.d.ts
vendored
@@ -1,7 +1,7 @@
|
||||
export type ReadFileByBufferParams = {
|
||||
teamId: string;
|
||||
buffer: Buffer;
|
||||
encoding: BufferEncoding;
|
||||
encoding: string;
|
||||
metadata?: Record<string, any>;
|
||||
};
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ export const deleteDatasetDataVector = async (
|
||||
}
|
||||
return Promise.reject('deleteDatasetData: no where');
|
||||
})();
|
||||
console.log(where, '===');
|
||||
|
||||
try {
|
||||
await PgClient.delete(PgDatasetTableName, {
|
||||
where: [where]
|
||||
|
||||
@@ -13,8 +13,11 @@ export const connectPg = async (): Promise<Pool> => {
|
||||
max: Number(process.env.DB_MAX_LINK || 20),
|
||||
min: 10,
|
||||
keepAlive: true,
|
||||
idleTimeoutMillis: 60000,
|
||||
connectionTimeoutMillis: 20000
|
||||
idleTimeoutMillis: 600000,
|
||||
connectionTimeoutMillis: 20000,
|
||||
query_timeout: 30000,
|
||||
statement_timeout: 40000,
|
||||
idle_in_transaction_session_timeout: 60000
|
||||
});
|
||||
|
||||
global.pgClient.on('error', async (err) => {
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
"decompress": "^4.2.1",
|
||||
"encoding": "^0.1.13",
|
||||
"file-type": "^19.0.0",
|
||||
"iconv-lite": "^0.6.3",
|
||||
"json5": "^2.2.3",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"mammoth": "^1.6.0",
|
||||
|
||||
@@ -173,7 +173,6 @@ export function registerLexicalTextEntity<T extends TextNode>(
|
||||
|
||||
export function textToEditorState(text: string = '') {
|
||||
const paragraph = text?.split('\n');
|
||||
|
||||
return JSON.stringify({
|
||||
root: {
|
||||
children: paragraph.map((p) => {
|
||||
@@ -206,11 +205,23 @@ export function textToEditorState(text: string = '') {
|
||||
}
|
||||
|
||||
export function editorStateToText(editor: LexicalEditor) {
|
||||
const stringifiedEditorState = JSON.stringify(editor.getEditorState().toJSON());
|
||||
const parsedEditorState = editor.parseEditorState(stringifiedEditorState);
|
||||
const editorStateTextString = parsedEditorState.read(() => $getRoot().getTextContent());
|
||||
|
||||
return editorStateTextString;
|
||||
const editorStateTextString: string[] = [];
|
||||
const paragraphs = editor.getEditorState().toJSON().root.children;
|
||||
paragraphs.forEach((paragraph: any) => {
|
||||
const children = paragraph.children;
|
||||
const paragraphText: string[] = [];
|
||||
children.forEach((child: any) => {
|
||||
if (child.type === 'linebreak') {
|
||||
paragraphText.push(`
|
||||
`);
|
||||
} else if (child.text) {
|
||||
paragraphText.push(child.text);
|
||||
}
|
||||
});
|
||||
editorStateTextString.push(paragraphText.join(''));
|
||||
});
|
||||
return editorStateTextString.join(`
|
||||
`);
|
||||
}
|
||||
|
||||
const varRegex = /\{\{([a-zA-Z_][a-zA-Z0-9_]*)\}\}/g;
|
||||
|
||||
4
pnpm-lock.yaml
generated
4
pnpm-lock.yaml
generated
@@ -126,6 +126,9 @@ importers:
|
||||
file-type:
|
||||
specifier: ^19.0.0
|
||||
version: 19.0.0
|
||||
iconv-lite:
|
||||
specifier: ^0.6.3
|
||||
version: 0.6.3
|
||||
json5:
|
||||
specifier: ^2.2.3
|
||||
version: 2.2.3
|
||||
@@ -9970,6 +9973,7 @@ packages:
|
||||
/path2d-polyfill@2.1.1:
|
||||
resolution: {integrity: sha512-4Rka5lN+rY/p0CdD8+E+BFv51lFaFvJOrlOhyQ+zjzyQrzyh3ozmxd1vVGGDdIbUFSBtIZLSnspxTgPT0iJhvA==}
|
||||
engines: {node: '>=18'}
|
||||
deprecated: this package has been deprecated
|
||||
requiresBuild: true
|
||||
dependencies:
|
||||
path2d: 0.1.1
|
||||
|
||||
@@ -97,10 +97,11 @@ const ChatItem = ({
|
||||
<Flex flexDirection={'column'} key={chat.dataId} gap={2}>
|
||||
{chat.value.map((value, i) => {
|
||||
const key = `${chat.dataId}-ai-${i}`;
|
||||
|
||||
if (value.text) {
|
||||
let source = (value.text?.content || '').trim();
|
||||
|
||||
if (!source && chat.value.length > 1) return <></>;
|
||||
if (!source && chat.value.length > 1) return null;
|
||||
|
||||
if (
|
||||
isLastChild &&
|
||||
|
||||
@@ -90,10 +90,23 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {
|
||||
|
||||
const lafFunctionSelectList = useMemo(
|
||||
() =>
|
||||
lafData?.lafFunctions.map((item) => ({
|
||||
label: item.description ? `${item.name} (${item.description})` : item.name,
|
||||
lafData?.lafFunctions.map((item) => {
|
||||
const functionName = item.path.slice(1);
|
||||
return {
|
||||
alias: functionName,
|
||||
label: item.description ? (
|
||||
<Box>
|
||||
<Box>{functionName}</Box>
|
||||
<Box fontSize={'xs'} color={'gray.500'}>
|
||||
{item.description}
|
||||
</Box>
|
||||
</Box>
|
||||
) : (
|
||||
functionName
|
||||
),
|
||||
value: item.requestUrl
|
||||
})) || [],
|
||||
};
|
||||
}) || [],
|
||||
[lafData?.lafFunctions]
|
||||
);
|
||||
|
||||
@@ -111,6 +124,16 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {
|
||||
|
||||
if (!lafFunction) return;
|
||||
|
||||
// update intro
|
||||
if (lafFunction.description) {
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'attr',
|
||||
key: 'intro',
|
||||
value: lafFunction.description
|
||||
});
|
||||
}
|
||||
|
||||
const bodyParams =
|
||||
lafFunction?.request?.content?.['application/json']?.schema?.properties || {};
|
||||
|
||||
@@ -232,7 +255,7 @@ const NodeLaf = (props: NodeProps<FlowModuleItemType>) => {
|
||||
);
|
||||
|
||||
if (!lafFunction) return;
|
||||
const url = `${feConfigs.lafEnv}/app/${lafData?.lafApp?.appid}/function${lafFunction?.path}?templateid=fastgptflow`;
|
||||
const url = `${feConfigs.lafEnv}/app/${lafData?.lafApp?.appid}/function${lafFunction?.path}?templateid=FastGPT_Laf`;
|
||||
window.open(url, '_blank');
|
||||
}}
|
||||
>
|
||||
|
||||
@@ -2,11 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authFileToken } from '@fastgpt/service/support/permission/controller';
|
||||
import {
|
||||
getDownloadStream,
|
||||
getFileById,
|
||||
readFileEncode
|
||||
} from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { getDownloadStream, getFileById } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
@@ -21,9 +17,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
throw new Error('fileId is empty');
|
||||
}
|
||||
|
||||
const [file, encoding, fileStream] = await Promise.all([
|
||||
const [file, { fileStream, encoding }] = await Promise.all([
|
||||
getFileById({ bucketName, fileId }),
|
||||
readFileEncode({ bucketName, fileId }),
|
||||
getDownloadStream({ bucketName, fileId })
|
||||
]);
|
||||
|
||||
|
||||
@@ -25,10 +25,12 @@ import { useTranslation } from 'next-i18next';
|
||||
import { getInitOutLinkChatInfo } from '@/web/core/chat/api';
|
||||
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
|
||||
import { useChatStore } from '@/web/core/chat/storeChat';
|
||||
import { ChatRoleEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import MyBox from '@/components/common/MyBox';
|
||||
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
|
||||
import { OutLinkWithAppType } from '@fastgpt/global/support/outLink/type';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
|
||||
const OutLink = ({
|
||||
appName,
|
||||
@@ -397,6 +399,7 @@ export async function getServerSideProps(context: any) {
|
||||
|
||||
const app = await (async () => {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const app = (await MongoOutLink.findOne(
|
||||
{
|
||||
shareId
|
||||
@@ -407,6 +410,7 @@ export async function getServerSideProps(context: any) {
|
||||
.lean()) as OutLinkWithAppType;
|
||||
return app;
|
||||
} catch (error) {
|
||||
addLog.error('getServerSideProps', error);
|
||||
return undefined;
|
||||
}
|
||||
})();
|
||||
|
||||
@@ -34,7 +34,7 @@ import ParentPaths from '@/components/common/ParentPaths';
|
||||
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { getToken } from '@/web/support/user/auth';
|
||||
import { xmlDownloadFetch } from '@/web/common/api/xmlFetch';
|
||||
|
||||
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
||||
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
|
||||
@@ -91,9 +91,11 @@ const Kb = () => {
|
||||
mutationFn: async (dataset: DatasetItemType) => {
|
||||
setLoading(true);
|
||||
await checkTeamExportDatasetLimit(dataset._id);
|
||||
const url = `/api/core/dataset/exportAll?datasetId=${dataset._id}`;
|
||||
const name = `${dataset.name}.csv`;
|
||||
localDownLoadWithToken(url, name, getToken());
|
||||
|
||||
xmlDownloadFetch({
|
||||
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
|
||||
filename: `${dataset.name}.csv`
|
||||
});
|
||||
},
|
||||
onSettled() {
|
||||
setLoading(false);
|
||||
@@ -101,26 +103,6 @@ const Kb = () => {
|
||||
errorToast: t('dataset.Export Dataset Limit Error')
|
||||
});
|
||||
|
||||
const localDownLoadWithToken = (url: string | URL, filename: string, token: string) => {
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.setRequestHeader("token", token);
|
||||
xhr.responseType = 'blob';
|
||||
xhr.onload = function (e) {
|
||||
if (this.status == 200) {
|
||||
var blob = this.response;
|
||||
var a = document.createElement('a');
|
||||
var url = URL.createObjectURL(blob);
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
}
|
||||
};
|
||||
xhr.send();
|
||||
};
|
||||
|
||||
|
||||
const { data, refetch, isFetching } = useQuery(
|
||||
['loadDataset', parentId],
|
||||
() => {
|
||||
|
||||
@@ -10,7 +10,7 @@ export async function authDatasetData({
|
||||
}: AuthModeType & {
|
||||
dataId: string;
|
||||
}) {
|
||||
// get pg data
|
||||
// get mongo dataset.data
|
||||
const datasetData = await MongoDatasetData.findById(dataId);
|
||||
|
||||
if (!datasetData) {
|
||||
|
||||
20
projects/app/src/web/common/api/xmlFetch.ts
Normal file
20
projects/app/src/web/common/api/xmlFetch.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { getToken } from '@/web/support/user/auth';
|
||||
|
||||
export const xmlDownloadFetch = ({ url, filename }: { url: string; filename: string }) => {
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.open('GET', url, true);
|
||||
xhr.setRequestHeader('token', getToken());
|
||||
xhr.responseType = 'blob';
|
||||
xhr.onload = function (e) {
|
||||
if (this.status == 200) {
|
||||
const blob = this.response;
|
||||
const a = document.createElement('a');
|
||||
const url = URL.createObjectURL(blob);
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
}
|
||||
};
|
||||
xhr.send();
|
||||
};
|
||||
@@ -121,6 +121,9 @@ export const useSpeech = (props?: OutLinkChatAuthProps & { appId?: string }) =>
|
||||
}
|
||||
}
|
||||
|
||||
// close media stream
|
||||
stream.getTracks().forEach((track) => track.stop());
|
||||
|
||||
setIsTransCription(false);
|
||||
setIsSpeaking(false);
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user