perf: system model list

This commit is contained in:
archer
2023-05-13 14:38:43 +08:00
parent 4ec02c654b
commit 5f66f4523c
12 changed files with 66 additions and 120 deletions

View File

@@ -1,8 +1,6 @@
# proxy # proxy
# AXIOS_PROXY_HOST=127.0.0.1 # AXIOS_PROXY_HOST=127.0.0.1
# AXIOS_PROXY_PORT=7890 # AXIOS_PROXY_PORT=7890
# OPENAI_BASE_URL=https://api.openai.com/v1
# OPENAI_BASE_URL_AUTH=可选的安全凭证
# 是否开启队列任务。 1-开启0-关闭请求parentUrl去执行任务,单机时直接填1 # 是否开启队列任务。 1-开启0-关闭请求parentUrl去执行任务,单机时直接填1
queueTask=1 queueTask=1
parentUrl=https://hostname/api/openapi/startEvents parentUrl=https://hostname/api/openapi/startEvents
@@ -17,7 +15,13 @@ aliTemplateCode=SMS_xxx
# token # token
TOKEN_KEY=xxx TOKEN_KEY=xxx
# openai # openai
# OPENAI_BASE_URL=https://api.openai.com/v1
# OPENAI_BASE_URL_AUTH=可选的安全凭证
OPENAIKEY=sk-xxx OPENAIKEY=sk-xxx
GPT4KEY=sk-xxx
# claude
CLAUDE_BASE_URL=calude模型请求地址
CLAUDE_KEY=CLAUDE_KEY
# db # db
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/test?authSource=admin MONGODB_URI=mongodb://username:password@0.0.0.0:27017/test?authSource=admin
PG_HOST=0.0.0.0 PG_HOST=0.0.0.0

View File

@@ -170,9 +170,6 @@ services:
# proxy可选 # proxy可选
- AXIOS_PROXY_HOST=127.0.0.1 - AXIOS_PROXY_HOST=127.0.0.1
- AXIOS_PROXY_PORT=7890 - AXIOS_PROXY_PORT=7890
# openai 中转连接(可选)
- OPENAI_BASE_URL=https://api.openai.com/v1
- OPENAI_BASE_URL_AUTH=可选的安全凭证
# 是否开启队列任务。 1-开启0-关闭(请求 parentUrl 去执行任务,单机时直接填1 # 是否开启队列任务。 1-开启0-关闭(请求 parentUrl 去执行任务,单机时直接填1
- queueTask=1 - queueTask=1
- parentUrl=https://hostname/api/openapi/startEvents - parentUrl=https://hostname/api/openapi/startEvents
@@ -195,8 +192,14 @@ services:
- PG_USER=fastgpt # POSTGRES_USER - PG_USER=fastgpt # POSTGRES_USER
- PG_PASSWORD=1234 # POSTGRES_PASSWORD - PG_PASSWORD=1234 # POSTGRES_PASSWORD
- PG_DB_NAME=fastgpt # POSTGRES_DB - PG_DB_NAME=fastgpt # POSTGRES_DB
# openai api key # openai
- OPENAIKEY=sk-xxxxx - OPENAIKEY=sk-xxxxx
- GPT4KEY=sk-xxx
- OPENAI_BASE_URL=https://api.openai.com/v1
- OPENAI_BASE_URL_AUTH=可选的安全凭证
# claude
- CLAUDE_BASE_URL=calude模型请求地址
- CLAUDE_KEY=CLAUDE_KEY
nginx: nginx:
image: nginx:alpine3.17 image: nginx:alpine3.17
container_name: nginx container_name: nginx

View File

@@ -39,9 +39,6 @@ services:
# proxy可选 # proxy可选
- AXIOS_PROXY_HOST=127.0.0.1 - AXIOS_PROXY_HOST=127.0.0.1
- AXIOS_PROXY_PORT=7890 - AXIOS_PROXY_PORT=7890
# openai 中转连接(可选)
- OPENAI_BASE_URL=https://api.openai.com/v1
- OPENAI_BASE_URL_AUTH=可选的安全凭证
# 是否开启队列任务。 1-开启0-关闭(请求 parentUrl 去执行任务,单机时直接填1 # 是否开启队列任务。 1-开启0-关闭(请求 parentUrl 去执行任务,单机时直接填1
- queueTask=1 - queueTask=1
- parentUrl=https://hostname/api/openapi/startEvents - parentUrl=https://hostname/api/openapi/startEvents
@@ -64,8 +61,14 @@ services:
- PG_USER=fastgpt # POSTGRES_USER - PG_USER=fastgpt # POSTGRES_USER
- PG_PASSWORD=1234 # POSTGRES_PASSWORD - PG_PASSWORD=1234 # POSTGRES_PASSWORD
- PG_DB_NAME=fastgpt # POSTGRES_DB - PG_DB_NAME=fastgpt # POSTGRES_DB
# openai api key # openai
- OPENAIKEY=sk-xxxxx - OPENAIKEY=sk-xxxxx
- GPT4KEY=sk-xxx
- OPENAI_BASE_URL=https://api.openai.com/v1
- OPENAI_BASE_URL_AUTH=可选的安全凭证
# claude
- CLAUDE_BASE_URL=calude模型请求地址
- CLAUDE_KEY=CLAUDE_KEY
nginx: nginx:
image: nginx:alpine3.17 image: nginx:alpine3.17
container_name: nginx container_name: nginx

View File

@@ -1,3 +1,6 @@
import { GET, POST, PUT } from './request'; import { GET, POST, PUT } from './request';
import type { ChatModelItemType } from '@/constants/model';
export const getFilling = () => GET<{ beianText: string }>('/system/getFiling'); export const getFilling = () => GET<{ beianText: string }>('/system/getFiling');
export const getSystemModelList = () => GET<ChatModelItemType[]>('/system/getModels');

View File

@@ -21,7 +21,7 @@ const Layout = ({ children, isPcDevice }: { children: JSX.Element; isPcDevice: b
const { isPc } = useScreen({ defaultIsPc: isPcDevice }); const { isPc } = useScreen({ defaultIsPc: isPcDevice });
const router = useRouter(); const router = useRouter();
const { colorMode, setColorMode } = useColorMode(); const { colorMode, setColorMode } = useColorMode();
const { Loading } = useLoading({ defaultLoading: true }); const { Loading } = useLoading();
const { loading } = useGlobalStore(); const { loading } = useGlobalStore();
const isChatPage = useMemo( const isChatPage = useMemo(

View File

@@ -1,3 +1,4 @@
import { getSystemModelList } from '@/api/system';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
export const embeddingModel = 'text-embedding-ada-002'; export const embeddingModel = 'text-embedding-ada-002';
@@ -58,11 +59,15 @@ export const ChatModelMap = {
} }
}; };
export const chatModelList: ChatModelItemType[] = [ let chatModelList: ChatModelItemType[] = [];
ChatModelMap[OpenAiChatEnum.GPT35], export const getChatModelList = async () => {
ChatModelMap[OpenAiChatEnum.GPT4], if (chatModelList.length > 0) {
ChatModelMap[ClaudeEnum.Claude] return chatModelList;
]; }
const list = await getSystemModelList();
chatModelList = list;
return list;
};
export enum ModelStatusEnum { export enum ModelStatusEnum {
running = 'running', running = 'running',

View File

@@ -0,0 +1,23 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import type { ChatModelItemType } from '@/constants/model';
import { ChatModelMap, OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
// get the models available to the system
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const chatModelList: ChatModelItemType[] = [];
if (process.env.OPENAIKEY) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
}
if (process.env.GPT4KEY) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT4]);
}
if (process.env.CLAUDE_KEY) {
chatModelList.push(ChatModelMap[ClaudeEnum.Claude]);
}
jsonRes(res, {
data: chatModelList
});
}

View File

@@ -138,7 +138,7 @@ const ModelDataCard = ({ modelId, isOwner }: { modelId: string; isOwner: boolean
}); });
return ( return (
<> <Box position={'relative'}>
<Flex> <Flex>
<Box fontWeight={'bold'} fontSize={'lg'} flex={1} mr={2}> <Box fontWeight={'bold'} fontSize={'lg'} flex={1} mr={2}>
: {total} : {total}
@@ -303,7 +303,7 @@ const ModelDataCard = ({ modelId, isOwner }: { modelId: string; isOwner: boolean
{isOpenSelectCsvModal && ( {isOpenSelectCsvModal && (
<SelectCsvModal modelId={modelId} onClose={onCloseSelectCsvModal} onSuccess={refetchData} /> <SelectCsvModal modelId={modelId} onClose={onCloseSelectCsvModal} onSuccess={refetchData} />
)} )}
</> </Box>
); );
}; };

View File

@@ -21,12 +21,13 @@ import {
import { QuestionOutlineIcon } from '@chakra-ui/icons'; import { QuestionOutlineIcon } from '@chakra-ui/icons';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
import { UseFormReturn } from 'react-hook-form'; import { UseFormReturn } from 'react-hook-form';
import { ChatModelMap, ModelVectorSearchModeMap, chatModelList } from '@/constants/model'; import { ChatModelMap, ModelVectorSearchModeMap, getChatModelList } from '@/constants/model';
import { formatPrice } from '@/utils/user'; import { formatPrice } from '@/utils/user';
import { useConfirm } from '@/hooks/useConfirm'; import { useConfirm } from '@/hooks/useConfirm';
import { useSelectFile } from '@/hooks/useSelectFile'; import { useSelectFile } from '@/hooks/useSelectFile';
import { useToast } from '@/hooks/useToast'; import { useToast } from '@/hooks/useToast';
import { compressImg } from '@/utils/file'; import { compressImg } from '@/utils/file';
import { useQuery } from '@tanstack/react-query';
const ModelEditForm = ({ const ModelEditForm = ({
formHooks, formHooks,
@@ -70,6 +71,8 @@ const ModelEditForm = ({
[setValue, toast] [setValue, toast]
); );
const { data: chatModelList = [] } = useQuery(['init'], getChatModelList);
return ( return (
<> <>
<Card p={4}> <Card p={4}>
@@ -299,104 +302,6 @@ const ModelEditForm = ({
</Card> </Card>
)} )}
<File onSelect={onSelectFile} /> <File onSelect={onSelectFile} />
{/* <Card p={4}>
<Box fontWeight={'bold'}>安全策略</Box>
<FormControl mt={2}>
<Flex alignItems={'center'}>
<Box flex={'0 0 120px'} w={0}>
单句最大长度:
</Box>
<Input
flex={1}
type={'number'}
{...register('security.contentMaxLen', {
required: '单句长度不能为空',
min: {
value: 0,
message: '单句长度最小为0'
},
max: {
value: 4000,
message: '单句长度最长为4000'
},
valueAsNumber: true
})}
></Input>
</Flex>
</FormControl>
<FormControl mt={5}>
<Flex alignItems={'center'}>
<Box flex={'0 0 120px'} w={0}>
上下文最大长度:
</Box>
<Input
flex={1}
type={'number'}
{...register('security.contextMaxLen', {
required: '上下文长度不能为空',
min: {
value: 1,
message: '上下文长度最小为5'
},
max: {
value: 400000,
message: '上下文长度最长为 400000'
},
valueAsNumber: true
})}
></Input>
</Flex>
</FormControl>
<FormControl mt={5}>
<Flex alignItems={'center'}>
<Box flex={'0 0 120px'} w={0}>
聊天过期时间:
</Box>
<Input
flex={1}
type={'number'}
{...register('security.expiredTime', {
required: '聊天过期时间不能为空',
min: {
value: 0.1,
message: '聊天过期时间最小为0.1小时'
},
max: {
value: 999999,
message: '聊天过期时间最长为 999999 小时'
},
valueAsNumber: true
})}
></Input>
<Box ml={3}>小时</Box>
</Flex>
</FormControl>
<FormControl mt={5} pb={5}>
<Flex alignItems={'center'}>
<Box flex={'0 0 130px'} w={0}>
聊天最大加载次数:
</Box>
<Box flex={1}>
<Input
type={'number'}
{...register('security.maxLoadAmount', {
required: '聊天最大加载次数不能为空',
max: {
value: 999999,
message: '聊天最大加载次数最小为 999999 次'
},
valueAsNumber: true
})}
></Input>
<Box fontSize={'sm'} color={'blackAlpha.400'} position={'absolute'}>
设置为-1代表不限制次数
</Box>
</Box>
<Box ml={3}>次</Box>
</Flex>
</FormControl>
</Card> */}
<ConfirmChild /> <ConfirmChild />
</> </>
); );

View File

@@ -34,7 +34,7 @@ const Model = ({ modelId, isPcDevice }: { modelId: string; isPcDevice: boolean }
<ModelList modelId={modelId} /> <ModelList modelId={modelId} />
</Box> </Box>
)} )}
<Box flex={1} h={'100%'}> <Box flex={1} h={'100%'} position={'relative'}>
{modelId && <ModelDetail modelId={modelId} isPc={isPc} />} {modelId && <ModelDetail modelId={modelId} isPc={isPc} />}
</Box> </Box>
</Flex> </Flex>

View File

@@ -63,7 +63,7 @@ export const getApiKey = async ({
}, },
[ClaudeEnum.Claude]: { [ClaudeEnum.Claude]: {
userOpenAiKey: '', userOpenAiKey: '',
systemAuthKey: process.env.LAFKEY as string systemAuthKey: process.env.CLAUDE_KEY as string
} }
}; };

View File

@@ -30,7 +30,7 @@ export const lafClaudChat = async ({
const prompt = `${systemPromptText}我的问题是:'${messages[messages.length - 1].value}'`; const prompt = `${systemPromptText}我的问题是:'${messages[messages.length - 1].value}'`;
const lafResponse = await axios.post( const lafResponse = await axios.post(
'https://hnvacz.laf.run/claude-gpt', process.env.CLAUDE_BASE_URL || '',
{ {
prompt, prompt,
stream, stream,