File input (#2270)

* doc

* feat: file upload  config

* perf: chat box file params

* feat: markdown show file

* feat: chat file store and clear

* perf: read file contentType

* feat: llm vision config

* feat: file url output

* perf: plugin error text

* perf: image load

* feat: ai chat document

* perf: file block ui

* feat: read file node

* feat: file read response field

* feat: simple mode support read files

* feat: tool call

* feat: read file histories

* perf: select file

* perf: select file config

* i18n

* i18n

* fix: ts; feat: tool response preview result
This commit is contained in:
Archer
2024-08-06 10:00:22 +08:00
committed by GitHub
parent 10dcdb5491
commit e36d9d794f
121 changed files with 2600 additions and 1142 deletions

View File

@@ -38,7 +38,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
})();
res.setHeader('Content-Type', `${file.contentType}; charset=${encoding}`);
res.setHeader('Cache-Control', 'public, max-age=3600');
res.setHeader('Cache-Control', 'public, max-age=31536000');
res.setHeader('Content-Disposition', `inline; filename="${encodeURIComponent(file.filename)}"`);
stream.pipe(res);

View File

@@ -1,12 +1,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { NextAPI } from '@/service/middleware/entry';
import { createFileToken } from '@fastgpt/service/support/permission/controller';
import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
/* Creates the multer uploader */
const upload = getUploadModel({
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
@@ -14,11 +16,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const filePaths: string[] = [];
try {
await connectToDatabase();
const { file, bucketName, metadata } = await upload.doUpload(req, res);
filePaths.push(file.path);
const { teamId, tmbId } = await authCert({ req, authToken: true });
if (!bucketName) {
@@ -35,8 +34,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
metadata: metadata
});
jsonRes(res, {
data: fileId
jsonRes<{
fileId: string;
previewUrl: string;
}>(res, {
data: {
fileId,
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken(
{
bucketName,
teamId,
tmbId,
fileId
}
)}`
}
});
} catch (error) {
jsonRes(res, {
@@ -48,6 +60,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
removeFilesByPaths(filePaths);
}
export default NextAPI(handler);
export const config = {
api: {
bodyParser: false

View File

@@ -15,6 +15,7 @@ import {
import { findAppAndAllChildren } from '@fastgpt/service/core/app/controller';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { ClientSession } from '@fastgpt/service/common/mongo';
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const { appId } = req.query as { appId: string };
@@ -53,6 +54,7 @@ export const onDelOneApp = async ({
for await (const app of apps) {
const appId = app._id;
// Chats
await deleteChatFiles({ appId });
await MongoChatItem.deleteMany(
{
appId

View File

@@ -21,6 +21,7 @@ import {
import { NextAPI } from '@/service/middleware/entry';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
export type Props = {
messages: ChatCompletionMessageParam[];
@@ -29,6 +30,7 @@ export type Props = {
variables: Record<string, any>;
appId: string;
appName: string;
chatConfig: AppChatConfigType;
};
async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -40,7 +42,15 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
res.end();
});
let { nodes = [], edges = [], messages = [], variables = {}, appName, appId } = req.body as Props;
let {
nodes = [],
edges = [],
messages = [],
variables = {},
appName,
appId,
chatConfig
} = req.body as Props;
try {
// [histories, user]
const chatMessages = GPTMessages2Chats(messages);
@@ -79,6 +89,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
/* start process */
const { flowResponses, flowUsages } = await dispatchWorkFlow({
res,
requestOrigin: req.headers.origin,
mode: 'test',
teamId,
tmbId,
@@ -88,6 +99,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
runtimeEdges: edges,
variables,
query: removeEmptyUserInput(userInput),
chatConfig,
histories: chatMessages,
stream: true,
detail: true,

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
@@ -8,64 +7,71 @@ import { ClearHistoriesProps } from '@/global/core/chat/api';
import { authOutLink } from '@/service/support/permission/auth/outLink';
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
import { NextAPI } from '@/service/middleware/entry';
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
/* clear chat history */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { appId, shareId, outLinkUid, teamId, teamToken } = req.query as ClearHistoriesProps;
async function handler(req: NextApiRequest, res: NextApiResponse) {
const { appId, shareId, outLinkUid, teamId, teamToken } = req.query as ClearHistoriesProps;
let chatAppId = appId;
let chatAppId = appId!;
const match = await (async () => {
if (shareId && outLinkUid) {
const { appId, uid } = await authOutLink({ shareId, outLinkUid });
const match = await (async () => {
if (shareId && outLinkUid) {
const { appId, uid } = await authOutLink({ shareId, outLinkUid });
chatAppId = appId;
return {
shareId,
outLinkUid: uid
};
}
if (teamId && teamToken) {
const { uid } = await authTeamSpaceToken({ teamId, teamToken });
return {
teamId,
appId,
outLinkUid: uid
};
}
if (appId) {
const { tmbId } = await authCert({ req, authToken: true });
chatAppId = appId;
return {
shareId,
outLinkUid: uid
};
}
if (teamId && teamToken) {
const { uid } = await authTeamSpaceToken({ teamId, teamToken });
return {
teamId,
appId,
outLinkUid: uid
};
}
if (appId) {
const { tmbId } = await authCert({ req, authToken: true });
return {
tmbId,
appId,
source: ChatSourceEnum.online
};
}
return {
tmbId,
appId,
source: ChatSourceEnum.online
};
}
return Promise.reject('Param are error');
})();
return Promise.reject('Param are error');
})();
// find chatIds
const list = await MongoChat.find(match, 'chatId').lean();
const idList = list.map((item) => item.chatId);
// find chatIds
const list = await MongoChat.find(match, 'chatId').lean();
const idList = list.map((item) => item.chatId);
await MongoChatItem.deleteMany({
appId: chatAppId,
chatId: { $in: idList }
});
await MongoChat.deleteMany({
appId: chatAppId,
chatId: { $in: idList }
});
await deleteChatFiles({ chatIdList: idList });
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
await mongoSessionRun(async (session) => {
await MongoChatItem.deleteMany(
{
appId: chatAppId,
chatId: { $in: idList }
},
{ session }
);
await MongoChat.deleteMany(
{
appId: chatAppId,
chatId: { $in: idList }
},
{ session }
);
});
jsonRes(res);
}
export default NextAPI(handler);

View File

@@ -1,4 +1,4 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import type { NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
@@ -8,6 +8,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
/* clear chat history */
async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiResponse) {
@@ -20,6 +21,7 @@ async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiRe
per: WritePermissionVal
});
await deleteChatFiles({ chatIdList: [chatId] });
await mongoSessionRun(async (session) => {
await MongoChatItem.deleteMany(
{
@@ -28,7 +30,7 @@ async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiRe
},
{ session }
);
await MongoChat.findOneAndRemove(
await MongoChat.deleteOne(
{
appId,
chatId

View File

@@ -41,6 +41,7 @@ async function handler(
/* start process */
const { flowUsages, flowResponses, debugResponse } = await dispatchWorkFlow({
res,
requestOrigin: req.headers.origin,
mode: 'debug',
teamId,
tmbId,
@@ -50,6 +51,7 @@ async function handler(
runtimeEdges: edges,
variables,
query: [],
chatConfig: defaultApp.chatConfig,
histories: [],
stream: false,
detail: true,

View File

@@ -249,6 +249,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
if (app.version === 'v2') {
return dispatchWorkFlow({
res,
requestOrigin: req.headers.origin,
mode: 'chat',
user,
teamId: String(teamId),
@@ -260,6 +261,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
runtimeEdges: initWorkflowEdgeStatus(edges),
variables: runtimeVariables,
query: removeEmptyUserInput(userQuestion.value),
chatConfig,
histories: newHistories,
stream,
detail,

View File

@@ -27,9 +27,10 @@ const ChatTest = ({ appForm }: { appForm: AppSimpleEditFormType }) => {
});
useEffect(() => {
const { nodes, edges } = form2AppWorkflow(appForm);
const { nodes, edges } = form2AppWorkflow(appForm, t);
// console.log(form2AppWorkflow(appForm, t));
setWorkflowData({ nodes, edges });
}, [appForm, setWorkflowData, allDatasets]);
}, [appForm, setWorkflowData, allDatasets, t]);
const { restartChat, ChatContainer } = useChatTest({
...workflowData,

View File

@@ -47,6 +47,7 @@ const ScheduledTriggerConfig = dynamic(
() => import('@/components/core/app/ScheduledTriggerConfig')
);
const WelcomeTextConfig = dynamic(() => import('@/components/core/app/WelcomeTextConfig'));
const FileSelectConfig = dynamic(() => import('@/components/core/app/FileSelect'));
const BoxStyles: BoxProps = {
px: [4, 6],
@@ -120,11 +121,11 @@ const EditForm = ({
[appForm.chatConfig.variables, t]
);
const selectedModel =
llmModelList.find((item) => item.model === appForm.aiSettings.model) ?? llmModelList[0];
const tokenLimit = useMemo(() => {
return (
llmModelList.find((item) => item.model === appForm.aiSettings.model)?.quoteMaxToken || 3000
);
}, [llmModelList, appForm.aiSettings.model]);
return selectedModel.quoteMaxToken || 3000;
}, [selectedModel.quoteMaxToken]);
return (
<>
@@ -338,6 +339,23 @@ const EditForm = ({
</Grid>
</Box>
{/* File select */}
<Box {...BoxStyles}>
<FileSelectConfig
forbidVision={!selectedModel.vision}
value={appForm.chatConfig.fileSelectConfig}
onChange={(e) => {
setAppForm((state) => ({
...state,
chatConfig: {
...state.chatConfig,
fileSelectConfig: e
}
}));
}}
/>
</Box>
{/* variable */}
<Box {...BoxStyles}>
<VariableEdit

View File

@@ -12,7 +12,6 @@ import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConf
import { AppSimpleEditFormType } from '@fastgpt/global/core/app/type';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { form2AppWorkflow } from '@/web/core/app/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { TabEnum } from '../context';
import PublishHistoriesSlider, { type InitProps } from '../PublishHistoriesSlider';
import { appWorkflow2Form } from '@fastgpt/global/core/app/utils';
@@ -52,7 +51,7 @@ const Header = ({
);
const isPublished = useMemo(() => {
const data = form2AppWorkflow(appForm);
const data = form2AppWorkflow(appForm, t);
return compareWorkflow(
{
@@ -66,11 +65,11 @@ const Header = ({
chatConfig: data.chatConfig
}
);
}, [appDetail.chatConfig, appDetail.modules, appForm]);
}, [appDetail.chatConfig, appDetail.modules, appForm, t]);
const onSubmitPublish = useCallback(
async (data: AppSimpleEditFormType) => {
const { nodes, edges } = form2AppWorkflow(data);
const { nodes, edges } = form2AppWorkflow(data, t);
await onPublish({
nodes,
edges,
@@ -78,7 +77,7 @@ const Header = ({
type: AppTypeEnum.simple
});
},
[onPublish]
[onPublish, t]
);
const [historiesDefaultData, setHistoriesDefaultData] = useState<InitProps>();
@@ -119,9 +118,11 @@ const Header = ({
: publishStatusStyle.unPublish.colorSchema
}
>
{isPublished
? publishStatusStyle.published.text
: publishStatusStyle.unPublish.text}
{t(
isPublished
? publishStatusStyle.published.text
: publishStatusStyle.unPublish.text
)}
</MyTag>
)}
@@ -133,7 +134,7 @@ const Header = ({
w={'30px'}
variant={'whitePrimary'}
onClick={() => {
const { nodes, edges } = form2AppWorkflow(appForm);
const { nodes, edges } = form2AppWorkflow(appForm, t);
setHistoriesDefaultData({
nodes,
edges,

View File

@@ -190,9 +190,11 @@ const AppCard = ({ showSaveStatus }: { showSaveStatus: boolean }) => {
: publishStatusStyle.unPublish.colorSchema
}
>
{isPublished
? publishStatusStyle.published.text
: publishStatusStyle.unPublish.text}
{t(
isPublished
? publishStatusStyle.published.text
: publishStatusStyle.unPublish.text
)}
</MyTag>
</Flex>
</MyTooltip>

View File

@@ -36,6 +36,7 @@ const nodeTypes: Record<FlowNodeTypeEnum, any> = {
[FlowNodeTypeEnum.systemConfig]: dynamic(() => import('./nodes/NodeSystemConfig')),
[FlowNodeTypeEnum.workflowStart]: dynamic(() => import('./nodes/NodeWorkflowStart')),
[FlowNodeTypeEnum.chatNode]: NodeSimple,
[FlowNodeTypeEnum.readFiles]: NodeSimple,
[FlowNodeTypeEnum.datasetSearchNode]: NodeSimple,
[FlowNodeTypeEnum.datasetConcatNode]: dynamic(() => import('./nodes/NodeDatasetConcat')),
[FlowNodeTypeEnum.answerNode]: dynamic(() => import('./nodes/NodeAnswer')),

View File

@@ -174,7 +174,7 @@ function Reference({
<>
<Flex alignItems={'center'} mb={1}>
<FormLabel required={input.required}>{input.label}</FormLabel>
{input.description && <QuestionTip label={input.description}></QuestionTip>}
{input.description && <QuestionTip ml={0.5} label={input.description}></QuestionTip>}
{/* value */}
<ValueTypeLabel valueType={input.valueType} />

View File

@@ -1,6 +1,6 @@
import React, { Dispatch, useMemo, useTransition } from 'react';
import { NodeProps } from 'reactflow';
import { Box, useTheme } from '@chakra-ui/react';
import { Box } from '@chakra-ui/react';
import { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
import QGSwitch from '@/components/core/app/QGSwitch';
@@ -19,6 +19,7 @@ import { useMemoizedFn } from 'ahooks';
import VariableEdit from '@/components/core/app/VariableEdit';
import { AppContext } from '@/pages/app/detail/components/context';
import WelcomeTextConfig from '@/components/core/app/WelcomeTextConfig';
import FileSelect from '@/components/core/app/FileSelect';
type ComponentProps = {
chatConfig: AppChatConfigType;
@@ -26,7 +27,6 @@ type ComponentProps = {
};
const NodeUserGuide = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const theme = useTheme();
const { appDetail, setAppDetail } = useContextSelector(AppContext, (v) => v);
const chatConfig = useMemo<AppChatConfigType>(() => {
@@ -63,19 +63,22 @@ const NodeUserGuide = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
<Box pt={4}>
<ChatStartVariable {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={theme.borders.base}>
<Box mt={3} pt={3} borderTop={'base'}>
<FileSelectConfig {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={'base'}>
<TTSGuide {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={theme.borders.base}>
<Box mt={3} pt={3} borderTop={'base'}>
<WhisperGuide {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={theme.borders.base}>
<Box mt={3} pt={3} borderTop={'base'}>
<QuestionGuide {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={theme.borders.base}>
<Box mt={3} pt={3} borderTop={'base'}>
<ScheduledTrigger {...componentsProps} />
</Box>
<Box mt={3} pt={3} borderTop={theme.borders.base}>
<Box mt={3} pt={3} borderTop={'base'}>
<QuestionInputGuide {...componentsProps} />
</Box>
</Box>
@@ -219,3 +222,20 @@ function QuestionInputGuide({ chatConfig: { chatInputGuide }, setAppDetail }: Co
/>
) : null;
}
function FileSelectConfig({ chatConfig: { fileSelectConfig }, setAppDetail }: ComponentProps) {
return (
<FileSelect
value={fileSelectConfig}
onChange={(e) => {
setAppDetail((state) => ({
...state,
chatConfig: {
...state.chatConfig,
fileSelectConfig: e
}
}));
}}
/>
);
}

View File

@@ -1,4 +1,4 @@
import React, { useMemo } from 'react';
import React, { useEffect, useMemo } from 'react';
import { NodeProps } from 'reactflow';
import NodeCard from './render/NodeCard';
import { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
@@ -14,11 +14,13 @@ import { FlowNodeOutputItemType } from '@fastgpt/global/core/workflow/type/io';
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
import { AppContext } from '@/pages/app/detail/components/context';
import { userFilesInput } from '@fastgpt/global/core/workflow/template/system/workflowStart';
const NodeStart = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
const { nodeId, outputs } = data;
const nodeList = useContextSelector(WorkflowContext, (v) => v.nodeList);
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
const { appDetail } = useContextSelector(AppContext, (v) => v);
const variablesOutputs = useCreation(() => {
@@ -38,6 +40,30 @@ const NodeStart = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
}));
}, [nodeList, t]);
// Dynamic add or delete userFilesInput
useEffect(() => {
const canUploadFiles =
appDetail.chatConfig?.fileSelectConfig?.canSelectFile ||
appDetail.chatConfig?.fileSelectConfig?.canSelectImg;
const repeatKey = outputs.find((item) => item.key === userFilesInput.key);
if (canUploadFiles) {
!repeatKey &&
onChangeNode({
nodeId,
type: 'addOutput',
value: userFilesInput
});
} else {
repeatKey &&
onChangeNode({
nodeId,
type: 'delOutput',
key: userFilesInput.key
});
}
}, [appDetail.chatConfig?.fileSelectConfig, nodeId, onChangeNode, outputs]);
return (
<NodeCard
minW={'240px'}

View File

@@ -84,6 +84,8 @@ const InputLabel = ({ nodeId, input }: Props) => {
);
}, [
description,
input.renderTypeList,
input.selectedTypeIndex,
label,
onChangeRenderType,
renderTypeList,

View File

@@ -36,9 +36,10 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatMaxToken)?.value ?? 2048,
temperature:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatTemperature)?.value ?? 1,
isResponseAnswerText: inputs.find(
(input) => input.key === NodeInputKeyEnum.aiChatIsResponseText
)?.value
isResponseAnswerText:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatIsResponseText)?.value ?? true,
aiChatVision:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatVision)?.value ?? true
}),
[inputs]
);

View File

@@ -35,7 +35,7 @@ const OutputLabel = ({ nodeId, output }: { nodeId: string; output: FlowNodeOutpu
>
{t(label as any)}
</Box>
{description && <QuestionTip label={t(description as any)} />}
{description && <QuestionTip ml={1} label={t(description as any)} />}
<ValueTypeLabel valueType={valueType} />
</Flex>
{output.type === FlowNodeOutputTypeEnum.source && (

View File

@@ -523,7 +523,7 @@ const WorkflowContextProvider = ({
version: 'v2'
});
setSaveLabel(
t('core.app.Saved time', {
t('common:core.app.Saved time', {
time: formatTime2HM()
})
);

View File

@@ -51,7 +51,8 @@ export const useChatTest = ({
edges: initWorkflowEdgeStatus(edges),
variables,
appId: appDetail._id,
appName: `调试-${appDetail.name}`
appName: `调试-${appDetail.name}`,
chatConfig
},
onMessage: generatingMessage,
abortCtrl: controller
@@ -99,7 +100,6 @@ export const useChatTest = ({
userAvatar={userInfo?.avatar}
showMarkIcon
chatConfig={chatConfig}
showFileSelector={checkChatSupportSelectFileByModules(nodes)}
onStartChat={startChat}
onDelMessage={() => {}}
/>

View File

@@ -255,7 +255,6 @@ const Chat = ({
appAvatar={chatData.app.avatar}
userAvatar={userInfo?.avatar}
chatConfig={chatData.app?.chatConfig}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onStartChat={onStartChat}
onDelMessage={({ contentId }) => delChatRecordById({ contentId, appId, chatId })}
@@ -339,7 +338,7 @@ export async function getServerSideProps(context: any) {
props: {
appId: context?.query?.appId || '',
chatId: context?.query?.chatId || '',
...(await serviceSideProps(context, ['file', 'app']))
...(await serviceSideProps(context, ['file', 'app', 'chat']))
}
};
}

View File

@@ -318,7 +318,6 @@ const OutLink = ({ appName, appIntro, appAvatar }: Props) => {
appAvatar={chatData.app.avatar}
userAvatar={chatData.userAvatar}
chatConfig={chatData.app?.chatConfig}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onStartChat={startChat}
onDelMessage={({ contentId }) =>
@@ -395,7 +394,7 @@ export async function getServerSideProps(context: any) {
appIntro: app?.appId?.intro ?? 'intro',
shareId: shareId ?? '',
authToken: authToken ?? '',
...(await serviceSideProps(context, ['file', 'app']))
...(await serviceSideProps(context, ['file', 'app', 'chat']))
}
};
}

View File

@@ -252,7 +252,6 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
appAvatar={chatData.app.avatar}
userAvatar={chatData.userAvatar}
chatConfig={chatData.app?.chatConfig}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onStartChat={startChat}
onDelMessage={({ contentId }) =>
@@ -338,7 +337,7 @@ export async function getServerSideProps(context: any) {
chatId: context?.query?.chatId || '',
teamId: context?.query?.teamId || '',
teamToken: context?.query?.teamToken || '',
...(await serviceSideProps(context, ['file', 'app']))
...(await serviceSideProps(context, ['file', 'app', 'chat']))
}
};
}

View File

@@ -89,7 +89,7 @@ const FileSelector = ({
// upload file
await Promise.all(
files.map(async ({ fileId, file }) => {
const uploadFileId = await uploadFile2DB({
const { fileId: uploadFileId } = await uploadFile2DB({
file,
bucketName: BucketNameEnum.dataset,
percentListen: (e) => {
@@ -230,7 +230,7 @@ const FileSelector = ({
let isErr = files.some((item) => item.type === '');
if (isErr) {
return toast({
title: fileT('upload_error_description'),
title: t('file:upload_error_description'),
status: 'error'
});
}

View File

@@ -54,7 +54,7 @@ const LoginForm = ({ setPageType, loginSuccess }: Props) => {
}
setRequesting(false);
},
[loginSuccess, toast]
[loginSuccess, t, toast]
);
const isCommunityVersion = feConfigs?.show_register === false && !feConfigs?.isPlus;

View File

@@ -129,7 +129,7 @@ const Login = () => {
export async function getServerSideProps(context: any) {
return {
props: { ...(await serviceSideProps(context, ['app'])) }
props: { ...(await serviceSideProps(context, ['app', 'user'])) }
};
}