mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 04:06:18 +00:00
perf: collection created response (#1947)
* perf: collection created response * update openapi doc * remove default collection * perf: chat ui * fix: system prompt concat * perf: published check * perf: update app
This commit is contained in:
@@ -13,7 +13,7 @@ weight: 853
|
||||
|
||||
|
||||
|
||||
## 创建训练订单(4.6.9地址发生改动)
|
||||
## 创建训练订单
|
||||
|
||||
{{< tabs tabTotal="2" >}}
|
||||
{{< tab tabName="请求示例" >}}
|
||||
@@ -26,6 +26,7 @@ curl --location --request POST 'https://api.fastgpt.in/api/support/wallet/usage/
|
||||
--header 'Authorization: Bearer {{apikey}}' \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"datasetId": "知识库 ID",
|
||||
"name": "可选,自定义订单名称,例如:文档训练-fastgpt.docx"
|
||||
}'
|
||||
```
|
||||
|
@@ -38,5 +38,7 @@ curl --location --request POST 'https://{{host}}/api/admin/initv486' \
|
||||
3. 新增 - 网页抓取和数学计算器系统插件
|
||||
4. 新增 - 移动文本加工和自定义反馈到基础节点中
|
||||
5. 优化 - Read file 默认选中从节点,实现 MongoDB 读写分离,减轻主节点压力
|
||||
6. 修复 - 工作流中团队插件加载异常
|
||||
7. 修复 - 知识库集合目录导航失效
|
||||
6. 优化 - 知识库导入接口,返回值对齐
|
||||
7. 修复 - 工作流中团队插件加载异常
|
||||
8. 修复 - 知识库集合目录导航失效
|
||||
9. 修复 - 通过 API 调用 chat 接口,传递 System 异常
|
@@ -3,6 +3,17 @@ import { FlowNodeTypeEnum } from '../workflow/node/constant';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from './constants';
|
||||
import { ChatHistoryItemResType, ChatItemType, UserChatItemValueItemType } from './type.d';
|
||||
|
||||
// Concat 2 -> 1, and sort by role
|
||||
export const concatHistories = (histories1: ChatItemType[], histories2: ChatItemType[]) => {
|
||||
const newHistories = [...histories1, ...histories2];
|
||||
return newHistories.sort((a, b) => {
|
||||
if (a.obj === ChatRoleEnum.System) {
|
||||
return -1;
|
||||
}
|
||||
return 1;
|
||||
});
|
||||
};
|
||||
|
||||
export const getChatTitleFromChatMessage = (message?: ChatItemType, defaultValue = '新对话') => {
|
||||
// @ts-ignore
|
||||
const textMsg = message?.value.find((item) => item.type === ChatItemValueTypeEnum.text);
|
||||
|
@@ -88,7 +88,7 @@
|
||||
"x": 1050.9890727421412,
|
||||
"y": -415.2085119990912
|
||||
},
|
||||
"version": "486",
|
||||
"version": "481",
|
||||
"inputs": [
|
||||
{
|
||||
"key": "system_addInputParam",
|
||||
|
@@ -13,24 +13,24 @@ export async function getChatItems({
|
||||
chatId?: string;
|
||||
limit?: number;
|
||||
field: string;
|
||||
}): Promise<{ history: ChatItemType[] }> {
|
||||
}): Promise<{ histories: ChatItemType[] }> {
|
||||
if (!chatId) {
|
||||
return { history: [] };
|
||||
return { histories: [] };
|
||||
}
|
||||
|
||||
const history = await MongoChatItem.find({ appId, chatId }, field)
|
||||
const histories = await MongoChatItem.find({ appId, chatId }, field)
|
||||
.sort({ _id: -1 })
|
||||
.limit(limit)
|
||||
.lean();
|
||||
|
||||
history.reverse();
|
||||
histories.reverse();
|
||||
|
||||
history.forEach((item) => {
|
||||
histories.forEach((item) => {
|
||||
// @ts-ignore
|
||||
item.value = adaptStringValue(item.value);
|
||||
});
|
||||
|
||||
return { history };
|
||||
return { histories };
|
||||
}
|
||||
/* 临时适配旧的对话记录 */
|
||||
export const adaptStringValue = (value: any): ChatItemValueItemType[] => {
|
||||
|
@@ -75,54 +75,9 @@ export async function createOneCollection({
|
||||
{ session }
|
||||
);
|
||||
|
||||
// create default collection
|
||||
if (type === DatasetCollectionTypeEnum.folder) {
|
||||
await createDefaultCollection({
|
||||
datasetId,
|
||||
parentId: collection._id,
|
||||
teamId,
|
||||
tmbId,
|
||||
session
|
||||
});
|
||||
}
|
||||
|
||||
return collection;
|
||||
}
|
||||
|
||||
// create default collection
|
||||
export function createDefaultCollection({
|
||||
name = '手动录入',
|
||||
datasetId,
|
||||
parentId,
|
||||
teamId,
|
||||
tmbId,
|
||||
session
|
||||
}: {
|
||||
name?: '手动录入' | '手动标注';
|
||||
datasetId: string;
|
||||
parentId?: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
session?: ClientSession;
|
||||
}) {
|
||||
return MongoDatasetCollection.create(
|
||||
[
|
||||
{
|
||||
name,
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId,
|
||||
parentId,
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
trainingType: TrainingModeEnum.chunk,
|
||||
chunkSize: 0,
|
||||
updateTime: new Date('2099')
|
||||
}
|
||||
],
|
||||
{ session }
|
||||
);
|
||||
}
|
||||
|
||||
/* delete collection related images/files */
|
||||
export const delCollectionRelatedSource = async ({
|
||||
collections,
|
||||
|
@@ -10,6 +10,7 @@ import {
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
import { ClientSession } from '../../../common/mongo';
|
||||
import { PushDatasetDataResponse } from '@fastgpt/global/core/dataset/api';
|
||||
|
||||
/**
|
||||
* get all collection by top collectionId
|
||||
@@ -138,7 +139,7 @@ export const reloadCollectionChunks = async ({
|
||||
billId?: string;
|
||||
rawText?: string;
|
||||
session: ClientSession;
|
||||
}) => {
|
||||
}): Promise<PushDatasetDataResponse> => {
|
||||
const {
|
||||
title,
|
||||
rawText: newRawText,
|
||||
@@ -149,7 +150,10 @@ export const reloadCollectionChunks = async ({
|
||||
newRawText: rawText
|
||||
});
|
||||
|
||||
if (isSameRawText) return;
|
||||
if (isSameRawText)
|
||||
return {
|
||||
insertLen: 0
|
||||
};
|
||||
|
||||
// split data
|
||||
const { chunks } = splitText2Chunks({
|
||||
@@ -164,7 +168,7 @@ export const reloadCollectionChunks = async ({
|
||||
return Promise.reject('Training model error');
|
||||
})();
|
||||
|
||||
await MongoDatasetTraining.insertMany(
|
||||
const result = await MongoDatasetTraining.insertMany(
|
||||
chunks.map((item, i) => ({
|
||||
teamId: col.teamId,
|
||||
tmbId,
|
||||
@@ -191,4 +195,8 @@ export const reloadCollectionChunks = async ({
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
|
||||
return {
|
||||
insertLen: result.length
|
||||
};
|
||||
};
|
||||
|
@@ -1,12 +1,11 @@
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { replaceSensitiveText } from '@fastgpt/global/common/string/tools';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import {
|
||||
WorkflowIOValueTypeEnum,
|
||||
NodeOutputKeyEnum
|
||||
} from '@fastgpt/global/core/workflow/constants';
|
||||
import { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io';
|
||||
|
||||
export const filterToolNodeIdByEdges = ({
|
||||
nodeId,
|
||||
@@ -45,10 +44,16 @@ export const filterToolNodeIdByEdges = ({
|
||||
|
||||
export const getHistories = (history?: ChatItemType[] | number, histories: ChatItemType[] = []) => {
|
||||
if (!history) return [];
|
||||
|
||||
const systemHistories = histories.filter((item) => item.obj === ChatRoleEnum.System);
|
||||
|
||||
const filterHistories = (() => {
|
||||
if (typeof history === 'number') return histories.slice(-(history * 2));
|
||||
if (Array.isArray(history)) return history;
|
||||
|
||||
return [];
|
||||
})();
|
||||
|
||||
return [...systemHistories, ...filterHistories];
|
||||
};
|
||||
|
||||
/* value type format */
|
||||
|
@@ -41,7 +41,6 @@ export const useChatBox = () => {
|
||||
|
||||
const map: Record<ExportChatType, () => void> = {
|
||||
md: () => {
|
||||
console.log(history);
|
||||
fileDownload({
|
||||
text: history
|
||||
.map((item) => {
|
||||
|
@@ -1,4 +1,7 @@
|
||||
import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
|
||||
import {
|
||||
PushDatasetDataChunkProps,
|
||||
PushDatasetDataResponse
|
||||
} from '@fastgpt/global/core/dataset/api';
|
||||
import {
|
||||
DatasetSearchModeEnum,
|
||||
DatasetSourceReadTypeEnum,
|
||||
@@ -31,6 +34,10 @@ export type RebuildEmbeddingProps = {
|
||||
};
|
||||
|
||||
/* ================= collection ===================== */
|
||||
export type CreateCollectionResponse = Promise<{
|
||||
collectionId: string;
|
||||
results: PushDatasetDataResponse;
|
||||
}>;
|
||||
|
||||
/* ================= data ===================== */
|
||||
export type InsertOneDatasetDataProps = PushDatasetDataChunkProps & {
|
||||
|
@@ -154,10 +154,8 @@ async function handler(req: ApiRequestProps<AppUpdateParams, { appId: string }>)
|
||||
|
||||
return onUpdate(session);
|
||||
});
|
||||
}
|
||||
|
||||
} else if (isDefaultPermissionChanged) {
|
||||
// Update default permission
|
||||
if (isDefaultPermissionChanged) {
|
||||
await mongoSessionRun(async (session) => {
|
||||
if (isFolder) {
|
||||
// Sync children default permission
|
||||
@@ -193,6 +191,8 @@ async function handler(req: ApiRequestProps<AppUpdateParams, { appId: string }>)
|
||||
|
||||
return onUpdate(session, defaultPermission);
|
||||
});
|
||||
} else {
|
||||
return onUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -42,7 +42,7 @@ async function handler(
|
||||
}
|
||||
|
||||
// get app and history
|
||||
const [{ history }, { nodes }] = await Promise.all([
|
||||
const [{ histories }, { nodes }] = await Promise.all([
|
||||
getChatItems({
|
||||
appId,
|
||||
chatId,
|
||||
@@ -60,7 +60,7 @@ async function handler(
|
||||
title: chat?.title || '新对话',
|
||||
userAvatar: undefined,
|
||||
variables: chat?.variables || {},
|
||||
history,
|
||||
history: histories,
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -41,7 +41,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
throw new Error(ChatErrEnum.unAuthChat);
|
||||
}
|
||||
|
||||
const [{ history }, { nodes }] = await Promise.all([
|
||||
const [{ histories }, { nodes }] = await Promise.all([
|
||||
getChatItems({
|
||||
appId: app._id,
|
||||
chatId,
|
||||
@@ -56,7 +56,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
]);
|
||||
|
||||
// pick share response field
|
||||
history.forEach((item) => {
|
||||
histories.forEach((item) => {
|
||||
if (item.obj === ChatRoleEnum.AI) {
|
||||
item.responseData = filterPublicNodeResponseData({ flowResponses: item.responseData });
|
||||
}
|
||||
@@ -70,7 +70,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
//@ts-ignore
|
||||
userAvatar: tmb?.userId?.avatar,
|
||||
variables: chat?.variables || {},
|
||||
history,
|
||||
history: histories,
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -47,7 +47,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
}
|
||||
|
||||
// get app and history
|
||||
const [{ history }, { nodes }] = await Promise.all([
|
||||
const [{ histories }, { nodes }] = await Promise.all([
|
||||
getChatItems({
|
||||
appId,
|
||||
chatId,
|
||||
@@ -58,7 +58,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
]);
|
||||
|
||||
// pick share response field
|
||||
history.forEach((item) => {
|
||||
histories.forEach((item) => {
|
||||
if (item.obj === ChatRoleEnum.AI) {
|
||||
item.responseData = filterPublicNodeResponseData({ flowResponses: item.responseData });
|
||||
}
|
||||
@@ -71,7 +71,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
title: chat?.title || '新对话',
|
||||
userAvatar: team?.avatar,
|
||||
variables: chat?.variables || {},
|
||||
history,
|
||||
history: histories,
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -18,8 +18,9 @@ import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { CreateCollectionResponse } from '@/global/core/dataset/api';
|
||||
|
||||
async function handler(req: NextApiRequest) {
|
||||
async function handler(req: NextApiRequest): CreateCollectionResponse {
|
||||
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
|
||||
const trainingType = TrainingModeEnum.chunk;
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
@@ -50,7 +51,7 @@ async function handler(req: NextApiRequest) {
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
});
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
return mongoSessionRun(async (session) => {
|
||||
// 4. create collection
|
||||
const { _id: collectionId } = await createOneCollection({
|
||||
teamId,
|
||||
@@ -80,7 +81,7 @@ async function handler(req: NextApiRequest) {
|
||||
});
|
||||
|
||||
// 6. insert to training queue
|
||||
await pushDataListToTrainingQueue({
|
||||
const insertResult = await pushDataListToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
datasetId: dataset._id,
|
||||
@@ -97,7 +98,7 @@ async function handler(req: NextApiRequest) {
|
||||
session
|
||||
});
|
||||
|
||||
return collectionId;
|
||||
return { collectionId, results: insertResult };
|
||||
});
|
||||
}
|
||||
export default NextAPI(handler);
|
||||
|
@@ -15,8 +15,9 @@ import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CreateCollectionResponse } from '@/global/core/dataset/api';
|
||||
|
||||
async function handler(req: NextApiRequest) {
|
||||
async function handler(req: NextApiRequest): CreateCollectionResponse {
|
||||
const {
|
||||
link,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
@@ -40,7 +41,7 @@ async function handler(req: NextApiRequest) {
|
||||
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
||||
});
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
return mongoSessionRun(async (session) => {
|
||||
// 2. create collection
|
||||
const collection = await createOneCollection({
|
||||
...body,
|
||||
@@ -70,7 +71,7 @@ async function handler(req: NextApiRequest) {
|
||||
});
|
||||
|
||||
// load
|
||||
await reloadCollectionChunks({
|
||||
const result = await reloadCollectionChunks({
|
||||
collection: {
|
||||
...collection.toObject(),
|
||||
datasetId: dataset
|
||||
@@ -80,7 +81,12 @@ async function handler(req: NextApiRequest) {
|
||||
session
|
||||
});
|
||||
|
||||
return collection;
|
||||
return {
|
||||
collectionId: collection._id,
|
||||
results: {
|
||||
insertLen: result.insertLen
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
|
@@ -23,8 +23,9 @@ import { MongoImage } from '@fastgpt/service/common/file/image/schema';
|
||||
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CreateCollectionResponse } from '@/global/core/dataset/api';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>): CreateCollectionResponse {
|
||||
/**
|
||||
* Creates the multer uploader
|
||||
*/
|
||||
|
@@ -17,8 +17,9 @@ import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { CreateCollectionResponse } from '@/global/core/dataset/api';
|
||||
|
||||
async function handler(req: NextApiRequest) {
|
||||
async function handler(req: NextApiRequest): CreateCollectionResponse {
|
||||
const {
|
||||
name,
|
||||
text,
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
|
||||
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
|
||||
@@ -50,14 +49,6 @@ async function handler(req: NextApiRequest) {
|
||||
defaultPermission
|
||||
});
|
||||
|
||||
if (type === DatasetTypeEnum.dataset) {
|
||||
await createDefaultCollection({
|
||||
datasetId: _id,
|
||||
teamId,
|
||||
tmbId
|
||||
});
|
||||
}
|
||||
|
||||
return _id;
|
||||
}
|
||||
|
||||
|
@@ -26,6 +26,7 @@ import requestIp from 'request-ip';
|
||||
import { getUsageSourceByAuthType } from '@fastgpt/global/support/wallet/usage/tools';
|
||||
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
|
||||
import {
|
||||
concatHistories,
|
||||
filterPublicNodeResponseData,
|
||||
removeEmptyUserInput
|
||||
} from '@fastgpt/global/core/chat/utils';
|
||||
@@ -119,6 +120,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
|
||||
let startTime = Date.now();
|
||||
|
||||
// Web chat params: [Human, AI]
|
||||
const chatMessages = GPTMessages2Chats(messages);
|
||||
if (chatMessages[chatMessages.length - 1].obj !== ChatRoleEnum.Human) {
|
||||
chatMessages.pop();
|
||||
@@ -170,7 +172,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
|
||||
// 1. get and concat history; 2. get app workflow
|
||||
const limit = getMaxHistoryLimitFromNodes(app.modules);
|
||||
const [{ history }, { nodes, edges, chatConfig }] = await Promise.all([
|
||||
const [{ histories }, { nodes, edges, chatConfig }] = await Promise.all([
|
||||
getChatItems({
|
||||
appId: app._id,
|
||||
chatId,
|
||||
@@ -179,7 +181,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
}),
|
||||
getAppLatestVersion(app._id, app)
|
||||
]);
|
||||
const concatHistories = history.concat(chatMessages);
|
||||
const newHistories = concatHistories(histories, chatMessages);
|
||||
const responseChatItemId: string | undefined = messages[messages.length - 1].dataId;
|
||||
|
||||
/* start flow controller */
|
||||
@@ -198,7 +200,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
runtimeEdges: initWorkflowEdgeStatus(edges),
|
||||
variables,
|
||||
query: removeEmptyUserInput(question.value),
|
||||
histories: concatHistories,
|
||||
histories: newHistories,
|
||||
stream,
|
||||
detail,
|
||||
maxRunTimes: 200
|
||||
@@ -217,7 +219,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
modules: setEntryEntries(app.modules),
|
||||
variables,
|
||||
inputFiles: files,
|
||||
histories: concatHistories,
|
||||
histories: newHistories,
|
||||
startParams: {
|
||||
userChatInput: text
|
||||
},
|
||||
|
@@ -10,7 +10,6 @@ import {
|
||||
Checkbox,
|
||||
ModalFooter
|
||||
} from '@chakra-ui/react';
|
||||
import { DragHandleIcon } from '@chakra-ui/icons';
|
||||
import { useRouter } from 'next/router';
|
||||
import { AppSchema } from '@fastgpt/global/core/app/type.d';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
@@ -28,11 +27,13 @@ import MyModal from '@fastgpt/web/components/common/MyModal';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
import { postTransition2Workflow } from '@/web/core/app/api/app';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
|
||||
const AppCard = () => {
|
||||
const router = useRouter();
|
||||
const { t } = useTranslation();
|
||||
const { appT } = useI18n();
|
||||
const { isPc } = useSystem();
|
||||
|
||||
const { appDetail, setAppDetail, onOpenInfoEdit, onDelApp } = useContextSelector(
|
||||
AppContext,
|
||||
@@ -68,7 +69,7 @@ const AppCard = () => {
|
||||
return (
|
||||
<>
|
||||
{/* basic info */}
|
||||
<Box px={6} py={4} position={'relative'}>
|
||||
<Box px={[4, 6]} py={4} position={'relative'}>
|
||||
<Flex alignItems={'center'}>
|
||||
<Avatar src={appDetail.avatar} borderRadius={'md'} w={'28px'} />
|
||||
<Box ml={3} fontWeight={'bold'} fontSize={'md'} flex={'1 0 0'} color={'myGray.900'}>
|
||||
@@ -149,6 +150,7 @@ const AppCard = () => {
|
||||
/>
|
||||
)}
|
||||
<Box flex={1} />
|
||||
{isPc && (
|
||||
<MyTag
|
||||
type="borderFill"
|
||||
colorSchema="gray"
|
||||
@@ -156,6 +158,7 @@ const AppCard = () => {
|
||||
>
|
||||
<PermissionIconText defaultPermission={appDetail.defaultPermission} />
|
||||
</MyTag>
|
||||
)}
|
||||
</HStack>
|
||||
</Box>
|
||||
{TeamTagsSet && <TagsEditModal onClose={() => setTeamTagsSet(undefined)} />}
|
||||
|
@@ -29,7 +29,6 @@ import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
|
||||
import DeleteIcon, { hoverDeleteStyles } from '@fastgpt/web/components/common/Icon/delete';
|
||||
import { TTSTypeEnum } from '@/web/core/app/constants';
|
||||
import { getSystemVariables } from '@/web/core/app/utils';
|
||||
import { useUpdate } from 'ahooks';
|
||||
import { useI18n } from '@/web/context/I18n';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { AppContext } from '@/pages/app/detail/components/context';
|
||||
@@ -49,7 +48,7 @@ const ScheduledTriggerConfig = dynamic(
|
||||
const WelcomeTextConfig = dynamic(() => import('@/components/core/app/WelcomeTextConfig'));
|
||||
|
||||
const BoxStyles: BoxProps = {
|
||||
px: 5,
|
||||
px: [4, 6],
|
||||
py: '16px',
|
||||
borderBottomWidth: '1px',
|
||||
borderBottomColor: 'borderColor.low'
|
||||
|
@@ -140,7 +140,7 @@ const ChatHistorySlider = ({
|
||||
fontSize={'sm'}
|
||||
onClick={() =>
|
||||
canRouteToDetail &&
|
||||
router.replace({
|
||||
router.push({
|
||||
pathname: '/app/detail',
|
||||
query: { appId }
|
||||
})
|
||||
@@ -156,9 +156,9 @@ const ChatHistorySlider = ({
|
||||
|
||||
{/* menu */}
|
||||
<Flex w={'100%'} px={[2, 5]} h={'36px'} my={5} alignItems={'center'}>
|
||||
<Box flex={'1 0 0'}>
|
||||
{!isPc && appId && (
|
||||
<LightRowTabs<TabEnum>
|
||||
flex={'1 0 0'}
|
||||
mr={1}
|
||||
inlineStyles={{
|
||||
px: 1
|
||||
@@ -172,11 +172,10 @@ const ChatHistorySlider = ({
|
||||
onChange={setCurrentTab}
|
||||
/>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
<Button
|
||||
variant={'whitePrimary'}
|
||||
flex={['auto', 1]}
|
||||
flex={[appId ? '0 0 auto' : 1, 1]}
|
||||
h={'100%'}
|
||||
color={'primary.600'}
|
||||
borderRadius={'xl'}
|
||||
|
@@ -11,7 +11,7 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
|
||||
const { t } = useTranslation();
|
||||
const { onExportChat } = useChatBox();
|
||||
const router = useRouter();
|
||||
console.log(history);
|
||||
|
||||
return history.length > 0 ? (
|
||||
<MyMenu
|
||||
Button={
|
||||
|
@@ -34,7 +34,6 @@ export async function insertData2Dataset({
|
||||
session?: ClientSession;
|
||||
}) {
|
||||
if (!q || !datasetId || !collectionId || !model) {
|
||||
console.log(q, a, datasetId, collectionId, model);
|
||||
return Promise.reject('q, datasetId, collectionId, model is required');
|
||||
}
|
||||
if (String(teamId) === String(tmbId)) {
|
||||
@@ -140,7 +139,7 @@ export async function updateData2Dataset({
|
||||
formatIndexes.unshift(defaultIndex ? defaultIndex : getDefaultIndex({ q, a }));
|
||||
}
|
||||
formatIndexes = formatIndexes.slice(0, 6);
|
||||
console.log(formatIndexes);
|
||||
|
||||
// patch indexes, create, update, delete
|
||||
const patchResult: PatchIndexesProps[] = [];
|
||||
|
||||
|
@@ -441,31 +441,27 @@ export const compareWorkflow = (workflow1: WorkflowType, workflow2: WorkflowType
|
||||
return false;
|
||||
}
|
||||
|
||||
const node1 = clone1.nodes.filter(Boolean).map((node) => ({
|
||||
const formatNodes = (nodes: StoreNodeItemType[]) => {
|
||||
return nodes
|
||||
.filter((node) => {
|
||||
if (!node) return;
|
||||
if ([FlowNodeTypeEnum.systemConfig].includes(node.flowNodeType)) return;
|
||||
|
||||
return true;
|
||||
})
|
||||
.map((node) => ({
|
||||
flowNodeType: node.flowNodeType,
|
||||
inputs: node.inputs.map((input) => ({
|
||||
...input,
|
||||
key: input.key,
|
||||
selectedTypeIndex: input.selectedTypeIndex ?? 0,
|
||||
renderTypeLis: input.renderTypeList,
|
||||
valueType: input.valueType,
|
||||
value: input.value ?? undefined
|
||||
})),
|
||||
outputs: node.outputs.map((input) => ({
|
||||
...input,
|
||||
value: input.value ?? undefined
|
||||
})),
|
||||
name: node.name,
|
||||
intro: node.intro,
|
||||
avatar: node.avatar,
|
||||
version: node.version,
|
||||
position: node.position
|
||||
}));
|
||||
const node2 = clone2.nodes.filter(Boolean).map((node) => ({
|
||||
flowNodeType: node.flowNodeType,
|
||||
inputs: node.inputs.map((input) => ({
|
||||
...input,
|
||||
value: input.value ?? undefined
|
||||
})),
|
||||
outputs: node.outputs.map((input) => ({
|
||||
...input,
|
||||
value: input.value ?? undefined
|
||||
outputs: node.outputs.map((item) => ({
|
||||
key: item.key,
|
||||
type: item.type,
|
||||
value: item.value ?? undefined
|
||||
})),
|
||||
name: node.name,
|
||||
intro: node.intro,
|
||||
@@ -473,9 +469,18 @@ export const compareWorkflow = (workflow1: WorkflowType, workflow2: WorkflowType
|
||||
version: node.version,
|
||||
position: node.position
|
||||
}));
|
||||
};
|
||||
const node1 = formatNodes(clone1.nodes);
|
||||
const node2 = formatNodes(clone2.nodes);
|
||||
|
||||
// console.log(node1);
|
||||
// console.log(node2);
|
||||
|
||||
node1.forEach((node, i) => {
|
||||
if (!isEqual(node, node2[i])) {
|
||||
console.log('node not equal');
|
||||
}
|
||||
});
|
||||
|
||||
return isEqual(node1, node2);
|
||||
};
|
||||
|
Reference in New Issue
Block a user