System optimize (#303)

This commit is contained in:
Archer
2023-09-15 10:21:46 +08:00
committed by GitHub
parent 7c1ec04380
commit 4365a94ea9
36 changed files with 378 additions and 369 deletions

View File

@@ -17,10 +17,6 @@ const nextConfig = {
}
};
}
config.experiments = {
asyncWebAssembly: true,
layers: true
};
config.module = {
...config.module,
rules: config.module.rules.concat([

View File

@@ -12,7 +12,6 @@
"@chakra-ui/icons": "^2.0.17",
"@chakra-ui/react": "^2.7.0",
"@chakra-ui/system": "^2.5.8",
"@dqbd/tiktoken": "^1.0.7",
"@emotion/react": "^11.10.6",
"@emotion/styled": "^11.10.6",
"@mozilla/readability": "^0.4.4",
@@ -32,6 +31,7 @@
"i18next": "^22.5.1",
"immer": "^9.0.19",
"js-cookie": "^3.0.5",
"js-tiktoken": "^1.0.7",
"jschardet": "^3.0.0",
"jsdom": "^22.1.0",
"jsonwebtoken": "^9.0.0",

20
client/pnpm-lock.yaml generated
View File

@@ -10,9 +10,6 @@ dependencies:
'@chakra-ui/system':
specifier: ^2.5.8
version: registry.npmmirror.com/@chakra-ui/system@2.5.8(@emotion/react@11.10.6)(@emotion/styled@11.10.6)(react@18.2.0)
'@dqbd/tiktoken':
specifier: ^1.0.7
version: registry.npmmirror.com/@dqbd/tiktoken@1.0.7
'@emotion/react':
specifier: ^11.10.6
version: registry.npmmirror.com/@emotion/react@11.10.6(@types/react@18.0.28)(react@18.2.0)
@@ -70,6 +67,9 @@ dependencies:
js-cookie:
specifier: ^3.0.5
version: registry.npmmirror.com/js-cookie@3.0.5
js-tiktoken:
specifier: ^1.0.7
version: registry.npmmirror.com/js-tiktoken@1.0.7
jschardet:
specifier: ^3.0.0
version: registry.npmmirror.com/jschardet@3.0.0
@@ -4214,12 +4214,6 @@ packages:
kuler: registry.npmmirror.com/kuler@2.0.0
dev: false
registry.npmmirror.com/@dqbd/tiktoken@1.0.7:
resolution: {integrity: sha512-bhR5k5W+8GLzysjk8zTMVygQZsgvf7W1F0IlL4ZQ5ugjo5rCyiwGM5d8DYriXspytfu98tv59niang3/T+FoDw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@dqbd/tiktoken/-/tiktoken-1.0.7.tgz}
name: '@dqbd/tiktoken'
version: 1.0.7
dev: false
registry.npmmirror.com/@emotion/babel-plugin@11.11.0:
resolution: {integrity: sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz}
name: '@emotion/babel-plugin'
@@ -8955,6 +8949,14 @@ packages:
version: 4.4.1
dev: true
registry.npmmirror.com/js-tiktoken@1.0.7:
resolution: {integrity: sha512-biba8u/clw7iesNEWLOLwrNGoBP2lA+hTaBLs/D45pJdUPFXyxD6nhcDVtADChghv4GgyAiMKYMiRx7x6h7Biw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/js-tiktoken/-/js-tiktoken-1.0.7.tgz}
name: js-tiktoken
version: 1.0.7
dependencies:
base64-js: registry.npmmirror.com/base64-js@1.5.1
dev: false
registry.npmmirror.com/js-tokens@4.0.0:
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz}
name: js-tokens

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683254592786" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1352" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M450.09164971 42.7357605a123.86965959 123.86965959 0 0 1 123.76374272 0L889.06369251 222.84722403a123.92261859 123.92261859 0 0 1 62.06722618 107.2407279v360.38180181c0 44.22025102-23.6194395 85.05116445-61.9613093 107.13480989l-0.10591688 0.10591687-315.20830008 180.11146353a123.86965959 123.86965959 0 0 1-123.76374272 0L134.93630749 797.7104805a123.92261859 123.92261859 0 0 1-62.06722618-107.24072676V330.08795193c0-44.22025102 23.67239737-85.05116445 61.9613093-107.13481102l0.10591688-0.10591688z m462.16781482 223.59029646a33.78744889 33.78744889 0 0 0-46.17971029-12.28634453l-353.81496263 204.57823687L158.44982898 254.09267029a33.78744889 33.78744889 0 0 0-33.89336463 58.46605597l353.6031289 204.47232v430.02207687c0 18.00585102 15.14609778 32.62236445 33.84040675 32.62236444a33.20490667 33.20490667 0 0 0 33.73449102-32.62236444V517.29583787l354.18567111-204.79006948a33.78744889 33.78744889 0 0 0 14.66947129-41.20162304z" p-id="1353"></path></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

File diff suppressed because one or more lines are too long

View File

@@ -26,8 +26,7 @@ import {
import { Box, Card, Flex, Input, Textarea, Button, useTheme, BoxProps } from '@chakra-ui/react';
import { feConfigs } from '@/store/static';
import { event } from '@/utils/plugin/eventbus';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { useMarkdown } from '@/hooks/useMarkdown';
import { VariableItemType } from '@/types/app';
import { VariableInputEnum } from '@/constants/app';
@@ -344,7 +343,7 @@ const ChatBox = (
const abortSignal = new AbortController();
controller.current = abortSignal;
const messages = adaptChatItem_openAI({ messages: newChatList, reserveId: true });
const messages = adaptChat2GptMessages({ messages: newChatList, reserveId: true });
const { responseData } = await onStartChat({
chatList: newChatList,

View File

@@ -1,102 +1,109 @@
import React from 'react';
import React, { useEffect, useState } from 'react';
import type { IconProps } from '@chakra-ui/react';
import { Icon } from '@chakra-ui/react';
const map = {
appFill: require('./icons/fill/app.svg').default,
appLight: require('./icons/light/app.svg').default,
copy: require('./icons/copy.svg').default,
chatSend: require('./icons/chatSend.svg').default,
delete: require('./icons/delete.svg').default,
stop: require('./icons/stop.svg').default,
collectionLight: require('./icons/collectionLight.svg').default,
collectionSolid: require('./icons/collectionSolid.svg').default,
empty: require('./icons/empty.svg').default,
back: require('./icons/back.svg').default,
backFill: require('./icons/fill/back.svg').default,
more: require('./icons/more.svg').default,
tabbarChat: require('./icons/phoneTabbar/chat.svg').default,
tabbarModel: require('./icons/phoneTabbar/app.svg').default,
tabbarMore: require('./icons/phoneTabbar/more.svg').default,
tabbarMe: require('./icons/phoneTabbar/me.svg').default,
closeSolid: require('./icons/closeSolid.svg').default,
wx: require('./icons/wx.svg').default,
out: require('./icons/out.svg').default,
git: require('./icons/git.svg').default,
gitFill: require('./icons/fill/git.svg').default,
googleFill: require('./icons/fill/google.svg').default,
menu: require('./icons/menu.svg').default,
edit: require('./icons/edit.svg').default,
inform: require('./icons/inform.svg').default,
export: require('./icons/export.svg').default,
text: require('./icons/text.svg').default,
history: require('./icons/history.svg').default,
kbTest: require('./icons/kbTest.svg').default,
date: require('./icons/date.svg').default,
apikey: require('./icons/apikey.svg').default,
save: require('./icons/save.svg').default,
minus: require('./icons/minus.svg').default,
chat: require('./icons/light/chat.svg').default,
chatFill: require('./icons/fill/chat.svg').default,
clear: require('./icons/light/clear.svg').default,
apiLight: require('./icons/light/appApi.svg').default,
overviewLight: require('./icons/light/overview.svg').default,
settingLight: require('./icons/light/setting.svg').default,
shareLight: require('./icons/light/share.svg').default,
dbLight: require('./icons/light/db.svg').default,
dbFill: require('./icons/fill/db.svg').default,
appStoreLight: require('./icons/light/appStore.svg').default,
appStoreFill: require('./icons/fill/appStore.svg').default,
meLight: require('./icons/light/me.svg').default,
meFill: require('./icons/fill/me.svg').default,
welcomeText: require('./icons/modules/welcomeText.svg').default,
variable: require('./icons/modules/variable.svg').default,
setTop: require('./icons/light/setTop.svg').default,
fullScreenLight: require('./icons/light/fullScreen.svg').default,
voice: require('./icons/voice.svg').default,
html: require('./icons/file/html.svg').default,
pdf: require('./icons/file/pdf.svg').default,
markdown: require('./icons/file/markdown.svg').default,
importLight: require('./icons/light/import.svg').default,
manualImport: require('./icons/file/manualImport.svg').default,
indexImport: require('./icons/file/indexImport.svg').default,
csvImport: require('./icons/file/csv.svg').default,
qaImport: require('./icons/file/qaImport.svg').default,
uploadFile: require('./icons/file/uploadFile.svg').default,
closeLight: require('./icons/light/close.svg').default,
customTitle: require('./icons/light/customTitle.svg').default,
billRecordLight: require('./icons/light/billRecord.svg').default,
informLight: require('./icons/light/inform.svg').default,
payRecordLight: require('./icons/light/payRecord.svg').default,
loginoutLight: require('./icons/light/loginout.svg').default,
chatModelTag: require('./icons/light/chatModelTag.svg').default,
language_en: require('./icons/language/en.svg').default,
language_zh: require('./icons/language/zh.svg').default,
outlink_share: require('./icons/outlink/share.svg').default,
outlink_iframe: require('./icons/outlink/iframe.svg').default,
addCircle: require('./icons/circle/add.svg').default,
playFill: require('./icons/fill/play.svg').default,
courseLight: require('./icons/light/course.svg').default,
promotionLight: require('./icons/light/promotion.svg').default,
logsLight: require('./icons/light/logs.svg').default,
badLight: require('./icons/light/bad.svg').default,
markLight: require('./icons/light/mark.svg').default,
retryLight: require('./icons/light/retry.svg').default,
rightArrowLight: require('./icons/light/rightArrow.svg').default,
searchLight: require('./icons/light/search.svg').default,
plusFill: require('./icons/fill/plus.svg').default,
moveLight: require('./icons/light/move.svg').default
const iconPaths = {
appFill: () => import('./icons/fill/app.svg'),
appLight: () => import('./icons/light/app.svg'),
copy: () => import('./icons/copy.svg'),
chatSend: () => import('./icons/chatSend.svg'),
delete: () => import('./icons/delete.svg'),
stop: () => import('./icons/stop.svg'),
collectionLight: () => import('./icons/collectionLight.svg'),
collectionSolid: () => import('./icons/collectionSolid.svg'),
empty: () => import('./icons/empty.svg'),
back: () => import('./icons/back.svg'),
backFill: () => import('./icons/fill/back.svg'),
more: () => import('./icons/more.svg'),
tabbarChat: () => import('./icons/phoneTabbar/chat.svg'),
tabbarModel: () => import('./icons/phoneTabbar/app.svg'),
tabbarMore: () => import('./icons/phoneTabbar/more.svg'),
tabbarMe: () => import('./icons/phoneTabbar/me.svg'),
closeSolid: () => import('./icons/closeSolid.svg'),
wx: () => import('./icons/wx.svg'),
out: () => import('./icons/out.svg'),
git: () => import('./icons/git.svg'),
gitFill: () => import('./icons/fill/git.svg'),
googleFill: () => import('./icons/fill/google.svg'),
menu: () => import('./icons/menu.svg'),
edit: () => import('./icons/edit.svg'),
inform: () => import('./icons/inform.svg'),
export: () => import('./icons/export.svg'),
text: () => import('./icons/text.svg'),
history: () => import('./icons/history.svg'),
kbTest: () => import('./icons/kbTest.svg'),
date: () => import('./icons/date.svg'),
apikey: () => import('./icons/apikey.svg'),
save: () => import('./icons/save.svg'),
minus: () => import('./icons/minus.svg'),
chat: () => import('./icons/light/chat.svg'),
chatFill: () => import('./icons/fill/chat.svg'),
clear: () => import('./icons/light/clear.svg'),
apiLight: () => import('./icons/light/appApi.svg'),
overviewLight: () => import('./icons/light/overview.svg'),
settingLight: () => import('./icons/light/setting.svg'),
shareLight: () => import('./icons/light/share.svg'),
dbLight: () => import('./icons/light/db.svg'),
dbFill: () => import('./icons/fill/db.svg'),
appStoreLight: () => import('./icons/light/appStore.svg'),
appStoreFill: () => import('./icons/fill/appStore.svg'),
meLight: () => import('./icons/light/me.svg'),
meFill: () => import('./icons/fill/me.svg'),
welcomeText: () => import('./icons/modules/welcomeText.svg'),
variable: () => import('./icons/modules/variable.svg'),
setTop: () => import('./icons/light/setTop.svg'),
fullScreenLight: () => import('./icons/light/fullScreen.svg'),
voice: () => import('./icons/voice.svg'),
html: () => import('./icons/file/html.svg'),
pdf: () => import('./icons/file/pdf.svg'),
markdown: () => import('./icons/file/markdown.svg'),
importLight: () => import('./icons/light/import.svg'),
manualImport: () => import('./icons/file/manualImport.svg'),
indexImport: () => import('./icons/file/indexImport.svg'),
csvImport: () => import('./icons/file/csv.svg'),
qaImport: () => import('./icons/file/qaImport.svg'),
uploadFile: () => import('./icons/file/uploadFile.svg'),
closeLight: () => import('./icons/light/close.svg'),
customTitle: () => import('./icons/light/customTitle.svg'),
billRecordLight: () => import('./icons/light/billRecord.svg'),
informLight: () => import('./icons/light/inform.svg'),
payRecordLight: () => import('./icons/light/payRecord.svg'),
loginoutLight: () => import('./icons/light/loginout.svg'),
chatModelTag: () => import('./icons/light/chatModelTag.svg'),
language_en: () => import('./icons/language/en.svg'),
language_zh: () => import('./icons/language/zh.svg'),
outlink_share: () => import('./icons/outlink/share.svg'),
outlink_iframe: () => import('./icons/outlink/iframe.svg'),
addCircle: () => import('./icons/circle/add.svg'),
playFill: () => import('./icons/fill/play.svg'),
courseLight: () => import('./icons/light/course.svg'),
promotionLight: () => import('./icons/light/promotion.svg'),
logsLight: () => import('./icons/light/logs.svg'),
badLight: () => import('./icons/light/bad.svg'),
markLight: () => import('./icons/light/mark.svg'),
retryLight: () => import('./icons/light/retry.svg'),
rightArrowLight: () => import('./icons/light/rightArrow.svg'),
searchLight: () => import('./icons/light/search.svg'),
plusFill: () => import('./icons/fill/plus.svg'),
moveLight: () => import('./icons/light/move.svg')
};
export type IconName = keyof typeof map;
export type IconName = keyof typeof iconPaths;
const MyIcon = (
{ name, w = 'auto', h = 'auto', ...props }: { name: IconName } & IconProps,
ref: any
) => {
return map[name] ? (
const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconName } & IconProps) => {
const [IconComponent, setIconComponent] = useState<any>(null);
useEffect(() => {
iconPaths[name]()
.then((icon) => {
setIconComponent({ as: icon.default });
})
.catch((error) => console.log(error));
}, [name]);
return name ? (
<Icon
as={map[name]}
{...IconComponent}
w={w}
h={h}
boxSizing={'content-box'}
@@ -107,4 +114,4 @@ const MyIcon = (
) : null;
};
export default React.forwardRef(MyIcon);
export default MyIcon;

View File

@@ -156,6 +156,7 @@ const Navbar = ({ unread }: { unread: number }) => {
<Link
as={NextLink}
{...itemStyles}
prefetch
href={`/account?currentTab=inform`}
mb={0}
color={'#9096a5'}

View File

@@ -0,0 +1,40 @@
import React, { useState } from 'react';
import { Image, Skeleton, ImageProps } from '@chakra-ui/react';
export const MyImage = (props: ImageProps) => {
const [isLoading, setIsLoading] = useState(true);
const [succeed, setSucceed] = useState(false);
return (
<Skeleton
minH="100px"
isLoaded={!isLoading}
fadeDuration={2}
display={'flex'}
justifyContent={'center'}
my={1}
>
<Image
display={'inline-block'}
borderRadius={'md'}
alt={''}
fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'}
cursor={succeed ? 'pointer' : 'default'}
objectFit={'contain'}
loading={'lazy'}
onLoad={() => {
setIsLoading(false);
setSucceed(true);
}}
onError={() => setIsLoading(false)}
onClick={() => {
if (!succeed) return;
window.open(props.src, '_blank');
}}
{...props}
/>
</Skeleton>
);
};
export default React.memo(MyImage);

View File

@@ -44,6 +44,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
})
.skip((pageNum - 1) * pageSize)
.limit(pageSize)
.sort({ uploadDate: -1 })
.toArray(),
collection.countDocuments(mongoWhere)
]);

View File

@@ -7,9 +7,9 @@ import { withNextCors } from '@/service/utils/tools';
import { PgDatasetTableName, TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { getVectorModel } from '@/service/utils/data';
import { DatasetItemType } from '@/types/plugin';
import { countPromptTokens } from '@/utils/common/tiktoken';
export type Props = {
kbId: string;
@@ -102,9 +102,7 @@ export async function pushDataToKb({
const text = item.q + item.a;
// count q token
const token = modelToolMap.countTokens({
messages: [{ obj: 'System', value: item.q }]
});
const token = countPromptTokens(item.q, 'system');
if (token > modeMaxToken[mode]) {
return;

View File

@@ -1,61 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
type Props = {
messages: ChatItemType[];
model: string;
maxLen: number;
};
type Response = ChatItemType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req });
const { messages, model, maxLen } = req.body as Props;
if (!Array.isArray(messages) || !model || !maxLen) {
throw new Error('params is error');
}
return jsonRes<Response>(res, {
data: gpt_chatItemTokenSlice({
messages,
maxToken: maxLen
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export function gpt_chatItemTokenSlice({
messages,
maxToken
}: {
messages: ChatItemType[];
maxToken: number;
}) {
let result: ChatItemType[] = [];
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];
const tokens = countOpenAIToken({ messages: msgs });
if (tokens < maxToken) {
result = msgs;
} else {
break;
}
}
return result.length === 0 && messages[0] ? [messages[0]] : result;
}

View File

@@ -79,6 +79,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (!Array.isArray(messages)) {
throw new Error('messages is not array');
}
if (messages.length === 0) {
throw new Error('messages is empty');
}
await connectToDatabase();
let startTime = Date.now();
@@ -120,7 +123,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
responseDetail = isOwner || responseDetail;
const prompts = history.concat(gptMessage2ChatType(messages));
if (prompts[prompts.length - 1].obj === 'AI') {
if (prompts[prompts.length - 1]?.obj === 'AI') {
prompts.pop();
}
// user question

View File

@@ -5,10 +5,10 @@ import { authKb, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { PgDatasetTableName } from '@/constants/plugin';
import { insertKbItem, PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { getVectorModel } from '@/service/utils/data';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { DatasetItemType } from '@/types/plugin';
import { countPromptTokens } from '@/utils/common/tiktoken';
export type Props = {
kbId: string;
@@ -35,9 +35,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const a = data?.a?.replace(/\\n/g, '\n').trim().replace(/'/g, '"');
// token check
const token = modelToolMap.countTokens({
messages: [{ obj: 'System', value: q }]
});
const token = countPromptTokens(q, 'system');
if (token > getVectorModel(kb.vectorModel).maxToken) {
throw new Error('Over Tokens');

View File

@@ -282,6 +282,7 @@ export function EditLinkModal({
return (
<MyModal
isOpen={true}
showCloseBtn={false}
onClose={() => {}}
title={isEdit ? titleMap.current.edit[type] : titleMap.current.create[type]}
>

View File

@@ -2,6 +2,7 @@ import { Box, Image, BoxProps, Grid, useTheme } from '@chakra-ui/react';
import React from 'react';
import { useTranslation } from 'next-i18next';
import { feConfigs } from '@/store/static';
import { MyImage } from '@/components/MyImage';
const Ability = () => {
const theme = useTheme();
@@ -48,13 +49,7 @@ const Ability = () => {
<Box {...DescStyles} color={'rgba(255,255,255,0.9)'}>
{t('home.AI Assistant Desc')}
</Box>
<Image
src="/imgs/home/ai_assiatant.png"
alt={''}
w={'100%'}
borderRadius={'lg'}
transform={'translateX(20px)'}
/>
<MyImage src="/imgs/home/ai_assiatant.png" alt={''} transform={'translateX(20px)'} />
</Box>
<Box
{...CardStyles}
@@ -67,13 +62,7 @@ const Ability = () => {
<Box {...DescStyles} color={'rgba(255,255,255,0.9)'}>
{t('home.Dateset Desc')}
</Box>
<Image
src="/imgs/home/dataset_import.png"
alt={''}
w={'90%'}
mx={'auto'}
borderRadius={'lg'}
/>
<MyImage src="/imgs/home/dataset_import.png" w={'90%'} mx={'auto'} borderRadius={'lg'} />
</Box>
</Grid>
<Grid
@@ -88,7 +77,7 @@ const Ability = () => {
<Box {...DescStyles} fontSize={['sm', 'md']}>
{t('home.Advanced Settings Desc')}
</Box>
<Image src="/imgs/home/advanced_settings.png" alt={''} w={'100%'} />
<MyImage src="/imgs/home/advanced_settings.png" alt={''} w={'100%'} />
</Box>
<Box
{...CardStyles}
@@ -97,7 +86,13 @@ const Ability = () => {
>
<Box {...TitleStyles}>{t('home.OpenAPI')}</Box>
<Box {...DescStyles}>{t('home.OpenAPI Desc')}</Box>
<Image src="/imgs/home/openapi.png" alt={''} w={'90%'} mx={'auto'} borderRadius={'lg'} />
<MyImage
src="/imgs/home/openapi.png"
alt={''}
w={'90%'}
mx={'auto'}
borderRadius={'lg'}
/>
</Box>
</Grid>
</Box>

View File

@@ -87,7 +87,7 @@ const Choice = () => {
boxShadow={theme.shadows.base}
borderRadius={'14px'}
>
<Image src={item.icon} w={'28px'} alt={''} />
<Image src={item.icon} w={'28px'} alt={''} loading={'lazy'} />
</Flex>
<Box ml={5}>
<Box fontSize={['lg', '2xl']} fontWeight={'bold'} color={'myGray.900'}>

View File

@@ -62,6 +62,7 @@ const Hero = () => {
maxW={['120%', '1000px']}
alt=""
draggable={false}
loading={'lazy'}
/>
<MyIcon
name={'playFill'}

View File

@@ -1,4 +1,4 @@
import React from 'react';
import React, { useEffect } from 'react';
import { Box } from '@chakra-ui/react';
import { feConfigs } from '@/store/static';
import { serviceSideProps } from '@/utils/i18n';
@@ -18,6 +18,11 @@ const Home = ({ homeUrl = '/' }: { homeUrl: string }) => {
router.replace(homeUrl);
}
useEffect(() => {
router.prefetch('/app/list');
router.prefetch('/login');
}, []);
return homeUrl === '/' ? (
<Box id="home" bg={'myWhite.600'} h={'100vh'} overflowY={'auto'} overflowX={'hidden'}>
<Box position={'fixed'} zIndex={10} top={0} left={0} right={0}>

View File

@@ -128,6 +128,7 @@ const FileSelect = ({
text,
maxLen: chunkLen
});
const fileItem: FileItemType = {
id: filesId[0],
filename: file.name,

View File

@@ -43,7 +43,7 @@ const QAImport = ({ kbId }: { kbId: string }) => {
const price = useMemo(() => {
const filesToken = files.reduce((sum, file) => sum + file.tokens, 0);
const promptTokens = files.reduce((sum, file) => sum + file.chunks.length, 0) * 139;
const totalToken = (filesToken + promptTokens) * 1.8;
const totalToken = (filesToken + promptTokens) * 2;
return formatPrice(totalToken * unitPrice);
}, [files, unitPrice]);

View File

@@ -1,4 +1,4 @@
import React, { useState, useCallback } from 'react';
import React, { useState, useCallback, useEffect } from 'react';
import styles from './index.module.scss';
import { Box, Flex, Image, useDisclosure } from '@chakra-ui/react';
import { PageTypeEnum } from '@/constants/user';
@@ -53,6 +53,10 @@ const Login = () => {
return <Component setPageType={setPageType} loginSuccess={loginSuccess} />;
}
useEffect(() => {
router.prefetch('/app/list');
}, []);
return (
<>
{feConfigs.googleClientVerKey && (
@@ -90,6 +94,7 @@ const Login = () => {
height={'100%'}
maxH={'450px'}
alt=""
loading={'lazy'}
/>
)}

View File

@@ -1,4 +1,4 @@
import React, { useCallback } from 'react';
import React, { useCallback, useEffect } from 'react';
import { useRouter } from 'next/router';
import { useGlobalStore } from '@/store/global';
import { ResLogin } from '@/api/response/user';
@@ -88,6 +88,10 @@ const provider = ({ code, state }: { code: string; state: string }) => {
return null;
});
useEffect(() => {
router.prefetch('/app/list');
}, []);
return <Loading />;
};

View File

@@ -1,7 +1,8 @@
import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin';
import { ChatRoleEnum } from '@/constants/chat';
import type { NextApiResponse } from 'next';
import { countMessagesTokens, countPromptTokens } from '@/utils/common/tiktoken';
import { adaptRole_Chat2Message } from '@/utils/common/adapt/message';
export type ChatCompletionResponseType = {
streamResponse: any;
@@ -11,39 +12,37 @@ export type ChatCompletionResponseType = {
};
export type StreamResponseType = {
chatResponse: any;
prompts: ChatItemType[];
messages: ChatItemType[];
res: NextApiResponse;
model: string;
[key: string]: any;
};
/* slice chat context by tokens */
export const ChatContextFilter = ({
model,
prompts = [],
export function ChatContextFilter({
messages = [],
maxTokens
}: {
model: string;
prompts: ChatItemType[];
messages: ChatItemType[];
maxTokens: number;
}) => {
if (!Array.isArray(prompts)) {
}) {
if (!Array.isArray(messages)) {
return [];
}
const rawTextLen = prompts.reduce((sum, item) => sum + item.value.length, 0);
const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0);
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
return prompts;
return messages;
}
// filter startWith system prompt
const chatStartIndex = prompts.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = prompts.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = prompts.slice(chatStartIndex);
const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex);
// reduce token of systemPrompt
maxTokens -= modelToolMap.countTokens({
// reduce token of systemPrompt
maxTokens -= countMessagesTokens({
messages: systemPrompts
});
@@ -52,18 +51,18 @@ export const ChatContextFilter = ({
// 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) {
chats.unshift(chatPrompts[i]);
const item = chatPrompts[i];
chats.unshift(item);
const tokens = modelToolMap.countTokens({
messages: chats
});
const tokens = countPromptTokens(item.value, adaptRole_Chat2Message(item.obj));
maxTokens -= tokens;
/* 整体 tokens 超出范围, system必须保留 */
if (tokens >= maxTokens) {
if (maxTokens <= 0) {
chats.shift();
break;
}
}
return [...systemPrompts, ...chats];
};
}

View File

@@ -7,10 +7,10 @@ import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '../lib/openai';
import { ChatCompletionRequestMessage } from 'openai';
import { modelToolMap } from '@/utils/plugin';
import { gptMessage2ChatType } from '@/utils/adapt';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
import { countMessagesTokens } from '@/utils/common/tiktoken';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@@ -81,7 +81,7 @@ A2:
}
];
const promptsToken = modelToolMap.countTokens({
const promptsToken = countMessagesTokens({
messages: gptMessage2ChatType(messages)
});
const maxToken = modelTokenLimit - promptsToken;

View File

@@ -1,5 +1,5 @@
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
@@ -50,11 +50,10 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
}
];
const filterMessages = ChatContextFilter({
model: agentModel,
prompts: messages,
messages,
maxTokens
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
// function body
const agentFunction = {

View File

@@ -1,5 +1,5 @@
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
@@ -45,12 +45,10 @@ export async function dispatchContentExtract({
}
];
const filterMessages = ChatContextFilter({
// @ts-ignore
model: agentModel,
prompts: messages,
messages,
maxTokens
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
const properties: Record<
string,

View File

@@ -1,8 +1,6 @@
import type { NextApiResponse } from 'next';
import { sseResponse } from '@/service/utils/tools';
import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
@@ -17,6 +15,8 @@ import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
export type ChatProps = {
res: NextApiResponse;
@@ -142,7 +142,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
value: answer
});
const totalTokens = countOpenAIToken({
const totalTokens = countMessagesTokens({
messages: completeMessages
});
@@ -154,8 +154,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
completeMessages
};
} else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
const answer = response.data.choices?.[0].message?.content || '';
const totalTokens = response.data.usage?.total_tokens || 0;
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
@@ -194,8 +194,8 @@ function filterQuote({
quoteQA: ChatProps['quoteQA'];
model: ChatModelItemType;
}) {
const sliceResult = modelToolMap.tokenSlice({
maxToken: model.quoteMaxToken,
const sliceResult = sliceMessagesTB({
maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item) => ({
obj: ChatRoleEnum.System,
value: item.a ? `${item.q}\n${item.a}` : item.q
@@ -274,12 +274,11 @@ function getChatMessages({
];
const filterMessages = ChatContextFilter({
model: model.model,
prompts: messages,
messages,
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
return {
messages: adaptMessages,
@@ -298,7 +297,7 @@ function getMaxTokens({
const tokensLimit = model.contextMaxToken;
/* count response max token */
const promptsToken = modelToolMap.countTokens({
const promptsToken = countMessagesTokens({
messages: filterMessages
});
maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken;

View File

@@ -8,6 +8,7 @@ import { initPg } from './pg';
import { createHashPassword } from '@/utils/tools';
import { createLogger, format, transports } from 'winston';
import 'winston-mongodb';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
/**
* connect MongoDB and init data
@@ -38,6 +39,8 @@ export async function connectToDatabase(): Promise<void> {
// init function
getInitConfig();
// init tikToken
getTikTokenEnc();
try {
mongoose.set('strictQuery', true);

View File

@@ -1,7 +1,7 @@
import type { Mongoose } from 'mongoose';
import type { Agent } from 'http';
import type { Pool } from 'pg';
import type { Tiktoken } from '@dqbd/tiktoken';
import type { Tiktoken } from 'js-tiktoken';
import type { Logger } from 'winston';
import { ChatModelItemType, QAModelItemType, VectorModelItemType } from './model';
import { TrackEventName } from '@/constants/common';
@@ -49,7 +49,7 @@ declare global {
var httpsAgent: Agent;
var qaQueueLen: number;
var vectorQueueLen: number;
var OpenAiEncMap: Tiktoken;
var TikToken: Tiktoken;
var logger: Logger;

View File

@@ -0,0 +1,37 @@
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
const chat2Message = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
const message2Chat = {
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.Function]: 'function'
};
export function adaptRole_Chat2Message(role: `${ChatRoleEnum}`) {
return chat2Message[role];
}
export function adaptRole_Message2Chat(role: `${ChatCompletionRequestMessageRoleEnum}`) {
return message2Chat[role];
}
export const adaptChat2GptMessages = ({
messages,
reserveId
}: {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
return messages.map((item) => ({
...(reserveId && { dataId: item.dataId }),
role: chat2Message[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,92 @@
/* Only the token of gpt-3.5-turbo is used */
import { ChatItemType } from '@/types/chat';
import { Tiktoken } from 'js-tiktoken/lite';
import { adaptChat2GptMessages } from '../adapt/message';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import encodingJson from './cl100k_base.json';
/* init tikToken obj */
export function getTikTokenEnc() {
if (typeof window !== 'undefined' && window.TikToken) {
return window.TikToken;
}
if (typeof global !== 'undefined' && global.TikToken) {
return global.TikToken;
}
const enc = new Tiktoken(encodingJson);
if (typeof window !== 'undefined') {
window.TikToken = enc;
}
if (typeof global !== 'undefined') {
global.TikToken = enc;
}
return enc;
}
/* count one prompt tokens */
export function countPromptTokens(prompt = '', role: `${ChatCompletionRequestMessageRoleEnum}`) {
const enc = getTikTokenEnc();
const text = `${role}\n${prompt}`;
try {
const encodeText = enc.encode(text);
return encodeText.length + 3; // 补充 role 估算值
} catch (error) {
return text.length;
}
}
/* count messages tokens */
export function countMessagesTokens({ messages }: { messages: ChatItemType[] }) {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
let totalTokens = 0;
for (let i = 0; i < adaptMessages.length; i++) {
const item = adaptMessages[i];
const tokens = countPromptTokens(item.content, item.role);
totalTokens += tokens;
}
return totalTokens;
}
export function sliceTextByTokens({ text, length }: { text: string; length: number }) {
const enc = getTikTokenEnc();
try {
const encodeText = enc.encode(text);
return enc.decode(encodeText.slice(0, length));
} catch (error) {
return text.slice(0, length);
}
}
/* slice messages from top to bottom by maxTokens */
export function sliceMessagesTB({
messages,
maxTokens
}: {
messages: ChatItemType[];
maxTokens: number;
}) {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
let reduceTokens = maxTokens;
let result: ChatItemType[] = [];
for (let i = 0; i < adaptMessages.length; i++) {
const item = adaptMessages[i];
const tokens = countPromptTokens(item.content, item.role);
reduceTokens -= tokens;
if (tokens > 0) {
result.push(messages[i]);
} else {
break;
}
}
return result.length === 0 && messages[0] ? [messages[0]] : result;
}

View File

@@ -1,8 +1,8 @@
import mammoth from 'mammoth';
import Papa from 'papaparse';
import { getOpenAiEncMap } from './plugin/openai';
import { getErrText } from './tools';
import { uploadImg, postUploadFiles } from '@/api/support/file';
import { countPromptTokens } from './common/tiktoken';
/**
* upload file to mongo gridfs
@@ -206,16 +206,7 @@ export const splitText2Chunks = ({ text, maxLen }: { text: string; maxLen: numbe
chunks.push(chunk);
}
const tokens = (() => {
try {
const enc = getOpenAiEncMap();
const encodeText = enc.encode(chunks.join(''));
const tokens = encodeText.length;
return tokens;
} catch (error) {
return chunks.join('').length;
}
})();
const tokens = chunks.reduce((sum, chunk) => sum + countPromptTokens(chunk, 'system'), 0);
return {
chunks,

View File

@@ -1,8 +0,0 @@
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap = {
countTokens: countOpenAIToken,
sliceText: openAiSliceTextByToken,
tokenSlice: gpt_chatItemTokenSlice
};

View File

@@ -1,100 +0,0 @@
import { encoding_for_model } from '@dqbd/tiktoken';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import axios from 'axios';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
export const getOpenAiEncMap = () => {
if (typeof window !== 'undefined' && window.OpenAiEncMap) {
return window.OpenAiEncMap;
}
if (typeof global !== 'undefined' && global.OpenAiEncMap) {
return global.OpenAiEncMap;
}
const enc = encoding_for_model('gpt-3.5-turbo', {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
'<|im_sep|>': 100266
});
if (typeof window !== 'undefined') {
window.OpenAiEncMap = enc;
}
if (typeof global !== 'undefined') {
global.OpenAiEncMap = enc;
}
return enc;
};
export const adaptChatItem_openAI = ({
messages,
reserveId
}: {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
return messages.map((item) => ({
...(reserveId && { dataId: item.dataId }),
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
};
export function countOpenAIToken({ messages }: { messages: ChatItemType[] }) {
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
const token = adaptMessages.reduce((sum, item) => {
const text = `${item.role}\n${item.content}`;
/* use textLen as tokens if encode error */
const tokens = (() => {
try {
const enc = getOpenAiEncMap();
const encodeText = enc.encode(text);
return encodeText.length + 3; // 补充估算值
} catch (error) {
return text.length;
}
})();
return sum + tokens;
}, 0);
return token;
}
export const openAiSliceTextByToken = ({ text, length }: { text: string; length: number }) => {
const enc = getOpenAiEncMap();
try {
const encodeText = enc.encode(text);
const decoder = new TextDecoder();
return decoder.decode(enc.decode(encodeText.slice(0, length)));
} catch (error) {
return text.slice(0, length);
}
};
export const authOpenAiKey = async (key: string) => {
return axios
.get('https://ccdbwscohpmu.cloud.sealos.io/openai/v1/dashboard/billing/subscription', {
headers: {
Authorization: `Bearer ${key}`
}
})
.then((res) => {
if (!res.data.access_until) {
return Promise.resolve('OpenAI Key 可能无效');
}
})
.catch((err) => {
console.log(err);
return Promise.reject(err?.response?.data?.error?.message || 'OpenAI Key 可能无效');
});
};