diff --git a/admin/service/route/app.js b/admin/service/route/app.js index aa088c77f..ba5162ff8 100644 --- a/admin/service/route/app.js +++ b/admin/service/route/app.js @@ -39,7 +39,6 @@ export const useAppRoute = (app) => { userId: app.userId, name: app.name, intro: app.intro, - app: app.chat?.chatModel, relatedKbs: kbNames, // 将relatedKbs的id转换为相应的Kb名称 systemPrompt: app.chat?.systemPrompt || '', temperature: app.chat?.temperature || 0, diff --git a/admin/service/schema.js b/admin/service/schema.js index 7d427d8f5..8557dcae3 100644 --- a/admin/service/schema.js +++ b/admin/service/schema.js @@ -62,12 +62,6 @@ const appSchema = new mongoose.Schema({ avatar: String, status: String, intro: String, - chat: { - relatedKbs: [mongoose.Schema.Types.ObjectId], - systemPrompt: String, - temperature: Number, - chatModel: String - }, share: { topNum: Number, isShare: Boolean, diff --git a/client/data/ChatModels.json b/client/data/ChatModels.json new file mode 100644 index 000000000..d8aa6b8a7 --- /dev/null +++ b/client/data/ChatModels.json @@ -0,0 +1,26 @@ +{ + "Gpt35-4k": { + "model": "gpt-3.5-turbo", + "name": "Gpt35-4k", + "contextMaxToken": 4000, + "systemMaxToken": 2400, + "maxTemperature": 1.2, + "price": 1.5 + }, + "Gpt35-16k": { + "model": "gpt-3.5-turbo-16k", + "name": "Gpt35-16k", + "contextMaxToken": 16000, + "systemMaxToken": 8000, + "maxTemperature": 1.2, + "price": 3 + }, + "Gpt4": { + "model": "gpt-4", + "name": "Gpt4", + "contextMaxToken": 8000, + "systemMaxToken": 4000, + "maxTemperature": 1.2, + "price": 45 + } +} diff --git a/client/data/QAModels.json b/client/data/QAModels.json new file mode 100644 index 000000000..42e96848f --- /dev/null +++ b/client/data/QAModels.json @@ -0,0 +1,8 @@ +{ + "Gpt35-16k": { + "model": "gpt-3.5-turbo-16k", + "name": "Gpt35-16k", + "maxToken": 16000, + "price": 3 + } +} diff --git a/client/data/SystemParams.json b/client/data/SystemParams.json new file mode 100644 index 000000000..08a807e07 --- /dev/null +++ b/client/data/SystemParams.json @@ -0,0 +1,6 @@ +{ + "vectorMaxProcess": 10, + "qaMaxProcess": 10, + "pgIvfflatProbe": 10, + "sensitiveCheck": false +} diff --git a/client/data/VectorModels.json b/client/data/VectorModels.json new file mode 100644 index 000000000..3e3aee8b0 --- /dev/null +++ b/client/data/VectorModels.json @@ -0,0 +1,7 @@ +{ + "text-embedding-ada-002": { + "model": "text-embedding-ada-002", + "name": "Embedding-2", + "price": 0.2 + } +} diff --git a/client/public/js/baidutongji.js b/client/public/js/baidutongji.js deleted file mode 100644 index 632c5ee9c..000000000 --- a/client/public/js/baidutongji.js +++ /dev/null @@ -1,8 +0,0 @@ -var _hmt = _hmt || []; - -(function () { - const hm = document.createElement('script'); - hm.src = 'https://hm.baidu.com/hm.js?a5357e9dab086658bac0b6faf148882e'; - const s = document.getElementsByTagName('script')[0]; - s.parentNode.insertBefore(hm, s); -})(); diff --git a/client/src/api/plugins/kb.ts b/client/src/api/plugins/kb.ts index f01efdb7b..9dd391d5e 100644 --- a/client/src/api/plugins/kb.ts +++ b/client/src/api/plugins/kb.ts @@ -2,7 +2,6 @@ import { GET, POST, PUT, DELETE } from '../request'; import type { KbItemType } from '@/types/plugin'; import { RequestPaging } from '@/types/index'; import { TrainingModeEnum } from '@/constants/plugin'; -import { type QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch'; import { Props as PushDataProps, Response as PushDateResponse @@ -60,7 +59,7 @@ export const getTrainingData = (data: { kbId: string; init: boolean }) => }>(`/plugins/kb/data/getTrainingData`, data); export const getKbDataItemById = (dataId: string) => - GET(`/plugins/kb/data/getDataById`, { dataId }); + GET(`/plugins/kb/data/getDataById`, { dataId }); /** * 直接push数据 diff --git a/client/src/api/system.ts b/client/src/api/system.ts index e46006474..cf384ace5 100644 --- a/client/src/api/system.ts +++ b/client/src/api/system.ts @@ -1,9 +1,6 @@ import { GET, POST, PUT } from './request'; -import type { ChatModelItemType } from '@/constants/model'; import type { InitDateResponse } from '@/pages/api/system/getInitData'; export const getInitData = () => GET('/system/getInitData'); -export const getSystemModelList = () => GET('/system/getModels'); - export const uploadImg = (base64Img: string) => POST('/system/uploadImage', { base64Img }); diff --git a/client/src/components/ChatBox/index.tsx b/client/src/components/ChatBox/index.tsx index b7aedcbd3..5946ce462 100644 --- a/client/src/components/ChatBox/index.tsx +++ b/client/src/components/ChatBox/index.tsx @@ -10,7 +10,7 @@ import React, { import { throttle } from 'lodash'; import { ChatItemType, ChatSiteItemType, ExportChatType } from '@/types/chat'; import { useToast } from '@/hooks/useToast'; -import { useCopyData, voiceBroadcast, hasVoiceApi } from '@/utils/tools'; +import { useCopyData, voiceBroadcast, hasVoiceApi, getErrText } from '@/utils/tools'; import { Box, Card, Flex, Input, Textarea, Button, useTheme } from '@chakra-ui/react'; import { useUserStore } from '@/store/user'; @@ -241,33 +241,34 @@ const ChatBox = ( variables: data }); - // 设置聊天内容为完成状态 - setChatHistory((state) => - state.map((item, index) => { - if (index !== state.length - 1) return item; - return { - ...item, - status: 'finish' - }; - }) - ); - setTimeout(() => { generatingScroll(); TextareaDom.current?.focus(); }, 100); } catch (err: any) { toast({ - title: typeof err === 'string' ? err : err?.message || '聊天出错了~', - status: 'warning', + title: getErrText(err, '聊天出错了~'), + status: 'error', duration: 5000, isClosable: true }); - resetInputVal(value); - - setChatHistory(newChatList.slice(0, newChatList.length - 2)); + if (!err?.responseText) { + resetInputVal(value); + setChatHistory(newChatList.slice(0, newChatList.length - 2)); + } } + + // set finish status + setChatHistory((state) => + state.map((item, index) => { + if (index !== state.length - 1) return item; + return { + ...item, + status: 'finish' + }; + }) + ); }, [ isChatting, @@ -404,7 +405,7 @@ const ChatBox = ( py={4} _hover={{ '& .control': { - display: 'flex' + display: item.status === 'finish' ? 'flex' : 'none' } }} > diff --git a/client/src/constants/app.ts b/client/src/constants/app.ts index ff09156b3..2b4ebd23f 100644 --- a/client/src/constants/app.ts +++ b/client/src/constants/app.ts @@ -965,8 +965,8 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] = name: '意图识别', intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。', type: 'http', - url: '/openapi/modules/agent/classifyQuestion', - flowType: 'classifyQuestionNode', + url: '/openapi/modules/agent/recognizeIntention', + flowType: 'recognizeIntention', inputs: [ { key: 'systemPrompt', diff --git a/client/src/constants/data.ts b/client/src/constants/data.ts deleted file mode 100644 index e72f987fb..000000000 --- a/client/src/constants/data.ts +++ /dev/null @@ -1,12 +0,0 @@ -export enum ChatModelEnum { - 'GPT35' = 'gpt-3.5-turbo', - 'GPT3516k' = 'gpt-3.5-turbo-16k', - 'GPT4' = 'gpt-4', - 'GPT432k' = 'gpt-4-32k' -} - -export const chatModelList = [ - { label: 'Gpt35-16k', value: ChatModelEnum.GPT3516k }, - { label: 'Gpt35-4k', value: ChatModelEnum.GPT35 }, - { label: 'Gpt4-8k', value: ChatModelEnum.GPT4 } -]; diff --git a/client/src/constants/flow/ModuleTemplate.ts b/client/src/constants/flow/ModuleTemplate.ts index d945d1e28..5bf0bad98 100644 --- a/client/src/constants/flow/ModuleTemplate.ts +++ b/client/src/constants/flow/ModuleTemplate.ts @@ -1,7 +1,7 @@ import { AppModuleItemTypeEnum, SystemInputEnum, SpecificInputEnum } from '../app'; import { FlowModuleTypeEnum, FlowInputItemTypeEnum, FlowOutputItemTypeEnum } from './index'; import type { AppModuleTemplateItemType } from '@/types/app'; -import { chatModelList } from '../data'; +import { chatModelList } from '@/store/static'; import { Input_Template_History, Input_Template_TFSwitch, @@ -96,8 +96,8 @@ export const ChatModule: AppModuleTemplateItemType = { key: 'model', type: FlowInputItemTypeEnum.select, label: '对话模型', - value: chatModelList[0].value, - list: chatModelList + value: chatModelList[0]?.model, + list: chatModelList.map((item) => ({ label: item.name, value: item.model })) }, { key: 'temperature', @@ -278,13 +278,13 @@ export const TFSwitchModule: AppModuleTemplateItemType = { } ] }; -export const ClassifyQuestionModule: AppModuleTemplateItemType = { +export const RecognizeIntentionModule: AppModuleTemplateItemType = { logo: '/imgs/module/cq.png', name: '意图识别', intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。', type: AppModuleItemTypeEnum.http, - url: '/openapi/modules/agent/classifyQuestion', - flowType: FlowModuleTypeEnum.classifyQuestionNode, + url: '/openapi/modules/agent/recognizeIntention', + flowType: FlowModuleTypeEnum.recognizeIntention, inputs: [ { key: 'systemPrompt', @@ -348,6 +348,6 @@ export const ModuleTemplates = [ }, { label: 'Agent', - list: [ClassifyQuestionModule] + list: [RecognizeIntentionModule] } ]; diff --git a/client/src/constants/flow/index.ts b/client/src/constants/flow/index.ts index 4af9271b9..7e79c460c 100644 --- a/client/src/constants/flow/index.ts +++ b/client/src/constants/flow/index.ts @@ -26,7 +26,7 @@ export enum FlowModuleTypeEnum { kbSearchNode = 'kbSearchNode', tfSwitchNode = 'tfSwitchNode', answerNode = 'answerNode', - classifyQuestionNode = 'classifyQuestionNode' + recognizeIntention = 'recognizeIntention' } export const edgeOptions = { diff --git a/client/src/constants/model.ts b/client/src/constants/model.ts index f1ddb5551..b8ddf6f47 100644 --- a/client/src/constants/model.ts +++ b/client/src/constants/model.ts @@ -1,11 +1,6 @@ -import { getSystemModelList } from '@/api/system'; import type { ShareChatEditType } from '@/types/app'; import type { AppSchema } from '@/types/mongoSchema'; -export const embeddingModel = 'text-embedding-ada-002'; -export const embeddingPrice = 0.1; -export type EmbeddingModelType = 'text-embedding-ada-002'; - export enum OpenAiChatEnum { 'GPT35' = 'gpt-3.5-turbo', 'GPT3516k' = 'gpt-3.5-turbo-16k', @@ -13,58 +8,6 @@ export enum OpenAiChatEnum { 'GPT432k' = 'gpt-4-32k' } -export type ChatModelType = `${OpenAiChatEnum}`; - -export type ChatModelItemType = { - chatModel: ChatModelType; - name: string; - contextMaxToken: number; - systemMaxToken: number; - maxTemperature: number; - price: number; -}; - -export const ChatModelMap = { - [OpenAiChatEnum.GPT35]: { - chatModel: OpenAiChatEnum.GPT35, - name: 'Gpt35-4k', - contextMaxToken: 4000, - systemMaxToken: 2400, - maxTemperature: 1.2, - price: 1.5 - }, - [OpenAiChatEnum.GPT3516k]: { - chatModel: OpenAiChatEnum.GPT3516k, - name: 'Gpt35-16k', - contextMaxToken: 16000, - systemMaxToken: 8000, - maxTemperature: 1.2, - price: 3 - }, - [OpenAiChatEnum.GPT4]: { - chatModel: OpenAiChatEnum.GPT4, - name: 'Gpt4', - contextMaxToken: 8000, - systemMaxToken: 4000, - maxTemperature: 1.2, - price: 45 - }, - [OpenAiChatEnum.GPT432k]: { - chatModel: OpenAiChatEnum.GPT432k, - name: 'Gpt4-32k', - contextMaxToken: 32000, - systemMaxToken: 8000, - maxTemperature: 1.2, - price: 90 - } -}; - -export const chatModelList: ChatModelItemType[] = [ - ChatModelMap[OpenAiChatEnum.GPT3516k], - ChatModelMap[OpenAiChatEnum.GPT35], - ChatModelMap[OpenAiChatEnum.GPT4] -]; - export const defaultApp: AppSchema = { _id: '', userId: 'userId', @@ -72,17 +15,6 @@ export const defaultApp: AppSchema = { avatar: '/icon/logo.png', intro: '', updateTime: Date.now(), - chat: { - relatedKbs: [], - searchSimilarity: 0.2, - searchLimit: 5, - searchEmptyText: '', - systemPrompt: '', - limitPrompt: '', - temperature: 0, - maxToken: 4000, - chatModel: OpenAiChatEnum.GPT35 - }, share: { isShare: false, isShareDetail: false, diff --git a/client/src/constants/user.ts b/client/src/constants/user.ts index 4386331e2..af407832f 100644 --- a/client/src/constants/user.ts +++ b/client/src/constants/user.ts @@ -1,9 +1,6 @@ -export enum BillTypeEnum { - chat = 'chat', - openapiChat = 'openapiChat', - QA = 'QA', - vector = 'vector', - return = 'return' +export enum BillSourceEnum { + fastgpt = 'fastgpt', + api = 'api' } export enum PageTypeEnum { login = 'login', @@ -11,12 +8,9 @@ export enum PageTypeEnum { forgetPassword = 'forgetPassword' } -export const BillTypeMap: Record<`${BillTypeEnum}`, string> = { - [BillTypeEnum.chat]: '对话', - [BillTypeEnum.openapiChat]: 'api 对话', - [BillTypeEnum.QA]: 'QA拆分', - [BillTypeEnum.vector]: '索引生成', - [BillTypeEnum.return]: '退款' +export const BillSourceMap: Record<`${BillSourceEnum}`, string> = { + [BillSourceEnum.fastgpt]: 'FastGpt 平台', + [BillSourceEnum.api]: 'Api' }; export enum PromotionEnum { diff --git a/client/src/hooks/usePagination.tsx b/client/src/hooks/usePagination.tsx index 49a8cb64b..516f085ed 100644 --- a/client/src/hooks/usePagination.tsx +++ b/client/src/hooks/usePagination.tsx @@ -1,4 +1,4 @@ -import { useRef, useState, useCallback, useLayoutEffect, useMemo, useEffect } from 'react'; +import { useRef, useState, useCallback, useMemo, useEffect } from 'react'; import type { PagingData } from '../types/index'; import { IconButton, Flex, Box, Input } from '@chakra-ui/react'; import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons'; @@ -144,7 +144,7 @@ export const usePagination = ({ [data.length, isLoading, mutate, pageNum, total] ); - useLayoutEffect(() => { + useEffect(() => { if (!elementRef.current || type !== 'scroll') return; const scrolling = throttle((e: Event) => { diff --git a/client/src/pages/_app.tsx b/client/src/pages/_app.tsx index 5f58580e9..4f6617cb6 100644 --- a/client/src/pages/_app.tsx +++ b/client/src/pages/_app.tsx @@ -1,4 +1,4 @@ -import { useEffect } from 'react'; +import { useEffect, useState } from 'react'; import type { AppProps } from 'next/app'; import Script from 'next/script'; import Head from 'next/head'; @@ -8,9 +8,9 @@ import { theme } from '@/constants/theme'; import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; import NProgress from 'nprogress'; //nprogress module import Router from 'next/router'; -import { useGlobalStore } from '@/store/global'; import 'nprogress/nprogress.css'; import '@/styles/reset.scss'; +import { clientInitData } from '@/store/static'; //Binding events. Router.events.on('routeChangeStart', () => NProgress.start()); @@ -29,13 +29,15 @@ const queryClient = new QueryClient({ }); function App({ Component, pageProps }: AppProps) { - const { - loadInitData, - initData: { googleVerKey, baiduTongji } - } = useGlobalStore(); + const [googleVerKey, setGoogleVerKey] = useState(); + const [baiduTongji, setBaiduTongji] = useState(); useEffect(() => { - loadInitData(); + (async () => { + const { googleVerKey, baiduTongji } = await clientInitData(); + setGoogleVerKey(googleVerKey); + setBaiduTongji(baiduTongji); + })(); }, []); return ( @@ -53,7 +55,7 @@ function App({ Component, pageProps }: AppProps) { - {baiduTongji && } + {baiduTongji && } {googleVerKey && ( <>