mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-14 15:11:13 +00:00
feat: Store pdfparse in local (#5534)
This commit is contained in:
@@ -48,10 +48,9 @@ https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409b
|
|||||||
|
|
||||||
`1` 应用编排能力
|
`1` 应用编排能力
|
||||||
- [x] 对话工作流、插件工作流,包含基础的 RPA 节点。
|
- [x] 对话工作流、插件工作流,包含基础的 RPA 节点。
|
||||||
- [x] Agent 调用
|
- [x] 用户交互
|
||||||
- [x] 用户交互节点
|
|
||||||
- [x] 双向 MCP
|
- [x] 双向 MCP
|
||||||
- [ ] 上下文管理
|
- [ ] Agent 模式
|
||||||
- [ ] AI 生成工作流
|
- [ ] AI 生成工作流
|
||||||
|
|
||||||
`2` 应用调试能力
|
`2` 应用调试能力
|
||||||
@@ -75,13 +74,13 @@ https://github.com/labring/FastGPT/assets/15308462/7d3a38df-eb0e-4388-9250-2409b
|
|||||||
- [x] completions 接口 (chat 模式对齐 GPT 接口)
|
- [x] completions 接口 (chat 模式对齐 GPT 接口)
|
||||||
- [x] 知识库 CRUD
|
- [x] 知识库 CRUD
|
||||||
- [x] 对话 CRUD
|
- [x] 对话 CRUD
|
||||||
- [ ] 完整 API Documents
|
- [ ] 自动化 OpenAPI 接口
|
||||||
|
|
||||||
`5` 运营能力
|
`5` 运营能力
|
||||||
- [x] 免登录分享窗口
|
- [x] 免登录分享窗口
|
||||||
- [x] Iframe 一键嵌入
|
- [x] Iframe 一键嵌入
|
||||||
- [x] 统一查阅对话记录,并对数据进行标注
|
- [x] 统一查阅对话记录,并对数据进行标注
|
||||||
- [ ] 应用运营日志
|
- [x] 应用运营日志
|
||||||
|
|
||||||
`6` 其他
|
`6` 其他
|
||||||
- [x] 可视化模型配置。
|
- [x] 可视化模型配置。
|
||||||
|
@@ -9,6 +9,7 @@ description: 'FastGPT V4.12.2 更新说明'
|
|||||||
1. 向量模型并发请求设置,不统一设置成 10,避免部分向量模型不支持并发,默认均为 1,可在模型配置中设置。
|
1. 向量模型并发请求设置,不统一设置成 10,避免部分向量模型不支持并发,默认均为 1,可在模型配置中设置。
|
||||||
2. 对话页支持管理员配置精选应用,便于推荐给团队成员使用。
|
2. 对话页支持管理员配置精选应用,便于推荐给团队成员使用。
|
||||||
3. 对话页首页,支持管理员配置快捷应用,可以设置团队常用的应用。
|
3. 对话页首页,支持管理员配置快捷应用,可以设置团队常用的应用。
|
||||||
|
4. 支持关闭团队的对话首页。
|
||||||
|
|
||||||
## ⚙️ 优化
|
## ⚙️ 优化
|
||||||
|
|
||||||
|
@@ -104,7 +104,7 @@
|
|||||||
"document/content/docs/upgrading/4-11/4111.mdx": "2025-08-07T22:49:09+08:00",
|
"document/content/docs/upgrading/4-11/4111.mdx": "2025-08-07T22:49:09+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:45:19+08:00",
|
"document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:45:19+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4121.mdx": "2025-08-15T22:53:06+08:00",
|
"document/content/docs/upgrading/4-12/4121.mdx": "2025-08-15T22:53:06+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4122.mdx": "2025-08-25T14:44:42+08:00",
|
"document/content/docs/upgrading/4-12/4122.mdx": "2025-08-25T19:19:43+08:00",
|
||||||
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
|
@@ -45,7 +45,7 @@ export const defaultChatInputGuideConfig = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const defaultAppSelectFileConfig: AppFileSelectConfigType = {
|
export const defaultAppSelectFileConfig: AppFileSelectConfigType = {
|
||||||
canSelectFile: false,
|
canSelectFile: true,
|
||||||
canSelectImg: false,
|
canSelectImg: false,
|
||||||
maxFiles: 10
|
maxFiles: 10
|
||||||
};
|
};
|
||||||
|
@@ -45,7 +45,7 @@ const ChatTest = ({ appForm, setRenderEdit }: Props) => {
|
|||||||
setRenderEdit(!datasetCiteData);
|
setRenderEdit(!datasetCiteData);
|
||||||
}, [datasetCiteData, setRenderEdit]);
|
}, [datasetCiteData, setRenderEdit]);
|
||||||
|
|
||||||
const { ChatContainer, restartChat, loading } = useChatTest({
|
const { ChatContainer, restartChat } = useChatTest({
|
||||||
...workflowData,
|
...workflowData,
|
||||||
chatConfig: appForm.chatConfig,
|
chatConfig: appForm.chatConfig,
|
||||||
isReady: true
|
isReady: true
|
||||||
|
@@ -71,7 +71,6 @@ const EditForm = ({
|
|||||||
const { appDetail } = useContextSelector(AppContext, (v) => v);
|
const { appDetail } = useContextSelector(AppContext, (v) => v);
|
||||||
const selectDatasets = useMemo(() => appForm?.dataset?.datasets, [appForm]);
|
const selectDatasets = useMemo(() => appForm?.dataset?.datasets, [appForm]);
|
||||||
const [, startTst] = useTransition();
|
const [, startTst] = useTransition();
|
||||||
const { llmModelList, defaultModels } = useSystemStore();
|
|
||||||
|
|
||||||
const {
|
const {
|
||||||
isOpen: isOpenDatasetSelect,
|
isOpen: isOpenDatasetSelect,
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
import { useRouter } from 'next/router';
|
import { useRouter } from 'next/router';
|
||||||
import { type SetStateAction, useMemo, useState } from 'react';
|
import { type SetStateAction, useEffect, useMemo, useState } from 'react';
|
||||||
import { useTranslation } from 'next-i18next';
|
import { useTranslation } from 'next-i18next';
|
||||||
import { createContext, useContextSelector } from 'use-context-selector';
|
import { createContext, useContextSelector } from 'use-context-selector';
|
||||||
import {
|
import {
|
||||||
@@ -20,6 +20,7 @@ import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContex
|
|||||||
import { DataChunkSplitModeEnum } from '@fastgpt/global/core/dataset/constants';
|
import { DataChunkSplitModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||||
import { chunkAutoChunkSize, getAutoIndexSize } from '@fastgpt/global/core/dataset/training/utils';
|
import { chunkAutoChunkSize, getAutoIndexSize } from '@fastgpt/global/core/dataset/training/utils';
|
||||||
import { type CollectionChunkFormType } from '../Form/CollectionChunkForm';
|
import { type CollectionChunkFormType } from '../Form/CollectionChunkForm';
|
||||||
|
import { useLocalStorageState } from 'ahooks';
|
||||||
|
|
||||||
export type ImportFormType = {
|
export type ImportFormType = {
|
||||||
customPdfParse: boolean;
|
customPdfParse: boolean;
|
||||||
@@ -38,7 +39,7 @@ type DatasetImportContextType = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const defaultFormData: ImportFormType = {
|
export const defaultFormData: ImportFormType = {
|
||||||
customPdfParse: false,
|
customPdfParse: true,
|
||||||
|
|
||||||
trainingType: DatasetCollectionDataProcessModeEnum.chunk,
|
trainingType: DatasetCollectionDataProcessModeEnum.chunk,
|
||||||
|
|
||||||
@@ -198,12 +199,23 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
|||||||
|
|
||||||
const vectorModel = datasetDetail.vectorModel;
|
const vectorModel = datasetDetail.vectorModel;
|
||||||
|
|
||||||
|
const [localCustomPdfParse, setLocalCustomPdfParse] = useLocalStorageState(
|
||||||
|
'dataset_customPdfParse',
|
||||||
|
{
|
||||||
|
defaultValue: true
|
||||||
|
}
|
||||||
|
);
|
||||||
const processParamsForm = useForm<ImportFormType>({
|
const processParamsForm = useForm<ImportFormType>({
|
||||||
defaultValues: (() => ({
|
defaultValues: (() => ({
|
||||||
...defaultFormData,
|
...defaultFormData,
|
||||||
|
customPdfParse: localCustomPdfParse,
|
||||||
indexSize: getAutoIndexSize(vectorModel)
|
indexSize: getAutoIndexSize(vectorModel)
|
||||||
}))()
|
}))()
|
||||||
});
|
});
|
||||||
|
const customPdfParse = processParamsForm.watch('customPdfParse');
|
||||||
|
useEffect(() => {
|
||||||
|
setLocalCustomPdfParse(customPdfParse);
|
||||||
|
}, [customPdfParse, setLocalCustomPdfParse]);
|
||||||
|
|
||||||
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
|
const [sources, setSources] = useState<ImportSourceItemType[]>([]);
|
||||||
|
|
||||||
|
@@ -46,7 +46,7 @@ const ReTraining = () => {
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
processParamsForm.reset({
|
processParamsForm.reset({
|
||||||
customPdfParse: collection.customPdfParse || false,
|
customPdfParse: collection.customPdfParse ?? defaultFormData.customPdfParse,
|
||||||
trainingType: collection.trainingType,
|
trainingType: collection.trainingType,
|
||||||
|
|
||||||
chunkTriggerType: collection.chunkTriggerType || defaultFormData.chunkTriggerType,
|
chunkTriggerType: collection.chunkTriggerType || defaultFormData.chunkTriggerType,
|
||||||
|
Reference in New Issue
Block a user