System optimize (#303)

This commit is contained in:
Archer
2023-09-15 10:21:46 +08:00
committed by GitHub
parent 7c1ec04380
commit 4365a94ea9
36 changed files with 378 additions and 369 deletions

View File

@@ -17,10 +17,6 @@ const nextConfig = {
} }
}; };
} }
config.experiments = {
asyncWebAssembly: true,
layers: true
};
config.module = { config.module = {
...config.module, ...config.module,
rules: config.module.rules.concat([ rules: config.module.rules.concat([

View File

@@ -12,7 +12,6 @@
"@chakra-ui/icons": "^2.0.17", "@chakra-ui/icons": "^2.0.17",
"@chakra-ui/react": "^2.7.0", "@chakra-ui/react": "^2.7.0",
"@chakra-ui/system": "^2.5.8", "@chakra-ui/system": "^2.5.8",
"@dqbd/tiktoken": "^1.0.7",
"@emotion/react": "^11.10.6", "@emotion/react": "^11.10.6",
"@emotion/styled": "^11.10.6", "@emotion/styled": "^11.10.6",
"@mozilla/readability": "^0.4.4", "@mozilla/readability": "^0.4.4",
@@ -32,6 +31,7 @@
"i18next": "^22.5.1", "i18next": "^22.5.1",
"immer": "^9.0.19", "immer": "^9.0.19",
"js-cookie": "^3.0.5", "js-cookie": "^3.0.5",
"js-tiktoken": "^1.0.7",
"jschardet": "^3.0.0", "jschardet": "^3.0.0",
"jsdom": "^22.1.0", "jsdom": "^22.1.0",
"jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.0",

20
client/pnpm-lock.yaml generated
View File

@@ -10,9 +10,6 @@ dependencies:
'@chakra-ui/system': '@chakra-ui/system':
specifier: ^2.5.8 specifier: ^2.5.8
version: registry.npmmirror.com/@chakra-ui/system@2.5.8(@emotion/react@11.10.6)(@emotion/styled@11.10.6)(react@18.2.0) version: registry.npmmirror.com/@chakra-ui/system@2.5.8(@emotion/react@11.10.6)(@emotion/styled@11.10.6)(react@18.2.0)
'@dqbd/tiktoken':
specifier: ^1.0.7
version: registry.npmmirror.com/@dqbd/tiktoken@1.0.7
'@emotion/react': '@emotion/react':
specifier: ^11.10.6 specifier: ^11.10.6
version: registry.npmmirror.com/@emotion/react@11.10.6(@types/react@18.0.28)(react@18.2.0) version: registry.npmmirror.com/@emotion/react@11.10.6(@types/react@18.0.28)(react@18.2.0)
@@ -70,6 +67,9 @@ dependencies:
js-cookie: js-cookie:
specifier: ^3.0.5 specifier: ^3.0.5
version: registry.npmmirror.com/js-cookie@3.0.5 version: registry.npmmirror.com/js-cookie@3.0.5
js-tiktoken:
specifier: ^1.0.7
version: registry.npmmirror.com/js-tiktoken@1.0.7
jschardet: jschardet:
specifier: ^3.0.0 specifier: ^3.0.0
version: registry.npmmirror.com/jschardet@3.0.0 version: registry.npmmirror.com/jschardet@3.0.0
@@ -4214,12 +4214,6 @@ packages:
kuler: registry.npmmirror.com/kuler@2.0.0 kuler: registry.npmmirror.com/kuler@2.0.0
dev: false dev: false
registry.npmmirror.com/@dqbd/tiktoken@1.0.7:
resolution: {integrity: sha512-bhR5k5W+8GLzysjk8zTMVygQZsgvf7W1F0IlL4ZQ5ugjo5rCyiwGM5d8DYriXspytfu98tv59niang3/T+FoDw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@dqbd/tiktoken/-/tiktoken-1.0.7.tgz}
name: '@dqbd/tiktoken'
version: 1.0.7
dev: false
registry.npmmirror.com/@emotion/babel-plugin@11.11.0: registry.npmmirror.com/@emotion/babel-plugin@11.11.0:
resolution: {integrity: sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz} resolution: {integrity: sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz}
name: '@emotion/babel-plugin' name: '@emotion/babel-plugin'
@@ -8955,6 +8949,14 @@ packages:
version: 4.4.1 version: 4.4.1
dev: true dev: true
registry.npmmirror.com/js-tiktoken@1.0.7:
resolution: {integrity: sha512-biba8u/clw7iesNEWLOLwrNGoBP2lA+hTaBLs/D45pJdUPFXyxD6nhcDVtADChghv4GgyAiMKYMiRx7x6h7Biw==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/js-tiktoken/-/js-tiktoken-1.0.7.tgz}
name: js-tiktoken
version: 1.0.7
dependencies:
base64-js: registry.npmmirror.com/base64-js@1.5.1
dev: false
registry.npmmirror.com/js-tokens@4.0.0: registry.npmmirror.com/js-tokens@4.0.0:
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz} resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/js-tokens/-/js-tokens-4.0.0.tgz}
name: js-tokens name: js-tokens

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1683254592786" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="1352" xmlns:xlink="http://www.w3.org/1999/xlink" width="64" height="64"><path d="M450.09164971 42.7357605a123.86965959 123.86965959 0 0 1 123.76374272 0L889.06369251 222.84722403a123.92261859 123.92261859 0 0 1 62.06722618 107.2407279v360.38180181c0 44.22025102-23.6194395 85.05116445-61.9613093 107.13480989l-0.10591688 0.10591687-315.20830008 180.11146353a123.86965959 123.86965959 0 0 1-123.76374272 0L134.93630749 797.7104805a123.92261859 123.92261859 0 0 1-62.06722618-107.24072676V330.08795193c0-44.22025102 23.67239737-85.05116445 61.9613093-107.13481102l0.10591688-0.10591688z m462.16781482 223.59029646a33.78744889 33.78744889 0 0 0-46.17971029-12.28634453l-353.81496263 204.57823687L158.44982898 254.09267029a33.78744889 33.78744889 0 0 0-33.89336463 58.46605597l353.6031289 204.47232v430.02207687c0 18.00585102 15.14609778 32.62236445 33.84040675 32.62236444a33.20490667 33.20490667 0 0 0 33.73449102-32.62236444V517.29583787l354.18567111-204.79006948a33.78744889 33.78744889 0 0 0 14.66947129-41.20162304z" p-id="1353"></path></svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

File diff suppressed because one or more lines are too long

View File

@@ -26,8 +26,7 @@ import {
import { Box, Card, Flex, Input, Textarea, Button, useTheme, BoxProps } from '@chakra-ui/react'; import { Box, Card, Flex, Input, Textarea, Button, useTheme, BoxProps } from '@chakra-ui/react';
import { feConfigs } from '@/store/static'; import { feConfigs } from '@/store/static';
import { event } from '@/utils/plugin/eventbus'; import { event } from '@/utils/plugin/eventbus';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { useMarkdown } from '@/hooks/useMarkdown'; import { useMarkdown } from '@/hooks/useMarkdown';
import { VariableItemType } from '@/types/app'; import { VariableItemType } from '@/types/app';
import { VariableInputEnum } from '@/constants/app'; import { VariableInputEnum } from '@/constants/app';
@@ -344,7 +343,7 @@ const ChatBox = (
const abortSignal = new AbortController(); const abortSignal = new AbortController();
controller.current = abortSignal; controller.current = abortSignal;
const messages = adaptChatItem_openAI({ messages: newChatList, reserveId: true }); const messages = adaptChat2GptMessages({ messages: newChatList, reserveId: true });
const { responseData } = await onStartChat({ const { responseData } = await onStartChat({
chatList: newChatList, chatList: newChatList,

View File

@@ -1,102 +1,109 @@
import React from 'react'; import React, { useEffect, useState } from 'react';
import type { IconProps } from '@chakra-ui/react'; import type { IconProps } from '@chakra-ui/react';
import { Icon } from '@chakra-ui/react'; import { Icon } from '@chakra-ui/react';
const map = { const iconPaths = {
appFill: require('./icons/fill/app.svg').default, appFill: () => import('./icons/fill/app.svg'),
appLight: require('./icons/light/app.svg').default, appLight: () => import('./icons/light/app.svg'),
copy: require('./icons/copy.svg').default, copy: () => import('./icons/copy.svg'),
chatSend: require('./icons/chatSend.svg').default, chatSend: () => import('./icons/chatSend.svg'),
delete: require('./icons/delete.svg').default, delete: () => import('./icons/delete.svg'),
stop: require('./icons/stop.svg').default, stop: () => import('./icons/stop.svg'),
collectionLight: require('./icons/collectionLight.svg').default, collectionLight: () => import('./icons/collectionLight.svg'),
collectionSolid: require('./icons/collectionSolid.svg').default, collectionSolid: () => import('./icons/collectionSolid.svg'),
empty: require('./icons/empty.svg').default, empty: () => import('./icons/empty.svg'),
back: require('./icons/back.svg').default, back: () => import('./icons/back.svg'),
backFill: require('./icons/fill/back.svg').default, backFill: () => import('./icons/fill/back.svg'),
more: require('./icons/more.svg').default, more: () => import('./icons/more.svg'),
tabbarChat: require('./icons/phoneTabbar/chat.svg').default, tabbarChat: () => import('./icons/phoneTabbar/chat.svg'),
tabbarModel: require('./icons/phoneTabbar/app.svg').default, tabbarModel: () => import('./icons/phoneTabbar/app.svg'),
tabbarMore: require('./icons/phoneTabbar/more.svg').default, tabbarMore: () => import('./icons/phoneTabbar/more.svg'),
tabbarMe: require('./icons/phoneTabbar/me.svg').default, tabbarMe: () => import('./icons/phoneTabbar/me.svg'),
closeSolid: require('./icons/closeSolid.svg').default, closeSolid: () => import('./icons/closeSolid.svg'),
wx: require('./icons/wx.svg').default, wx: () => import('./icons/wx.svg'),
out: require('./icons/out.svg').default, out: () => import('./icons/out.svg'),
git: require('./icons/git.svg').default, git: () => import('./icons/git.svg'),
gitFill: require('./icons/fill/git.svg').default, gitFill: () => import('./icons/fill/git.svg'),
googleFill: require('./icons/fill/google.svg').default, googleFill: () => import('./icons/fill/google.svg'),
menu: require('./icons/menu.svg').default, menu: () => import('./icons/menu.svg'),
edit: require('./icons/edit.svg').default, edit: () => import('./icons/edit.svg'),
inform: require('./icons/inform.svg').default, inform: () => import('./icons/inform.svg'),
export: require('./icons/export.svg').default, export: () => import('./icons/export.svg'),
text: require('./icons/text.svg').default, text: () => import('./icons/text.svg'),
history: require('./icons/history.svg').default, history: () => import('./icons/history.svg'),
kbTest: require('./icons/kbTest.svg').default, kbTest: () => import('./icons/kbTest.svg'),
date: require('./icons/date.svg').default, date: () => import('./icons/date.svg'),
apikey: require('./icons/apikey.svg').default, apikey: () => import('./icons/apikey.svg'),
save: require('./icons/save.svg').default, save: () => import('./icons/save.svg'),
minus: require('./icons/minus.svg').default, minus: () => import('./icons/minus.svg'),
chat: require('./icons/light/chat.svg').default, chat: () => import('./icons/light/chat.svg'),
chatFill: require('./icons/fill/chat.svg').default, chatFill: () => import('./icons/fill/chat.svg'),
clear: require('./icons/light/clear.svg').default, clear: () => import('./icons/light/clear.svg'),
apiLight: require('./icons/light/appApi.svg').default, apiLight: () => import('./icons/light/appApi.svg'),
overviewLight: require('./icons/light/overview.svg').default, overviewLight: () => import('./icons/light/overview.svg'),
settingLight: require('./icons/light/setting.svg').default, settingLight: () => import('./icons/light/setting.svg'),
shareLight: require('./icons/light/share.svg').default, shareLight: () => import('./icons/light/share.svg'),
dbLight: require('./icons/light/db.svg').default, dbLight: () => import('./icons/light/db.svg'),
dbFill: require('./icons/fill/db.svg').default, dbFill: () => import('./icons/fill/db.svg'),
appStoreLight: require('./icons/light/appStore.svg').default, appStoreLight: () => import('./icons/light/appStore.svg'),
appStoreFill: require('./icons/fill/appStore.svg').default, appStoreFill: () => import('./icons/fill/appStore.svg'),
meLight: require('./icons/light/me.svg').default, meLight: () => import('./icons/light/me.svg'),
meFill: require('./icons/fill/me.svg').default, meFill: () => import('./icons/fill/me.svg'),
welcomeText: require('./icons/modules/welcomeText.svg').default, welcomeText: () => import('./icons/modules/welcomeText.svg'),
variable: require('./icons/modules/variable.svg').default, variable: () => import('./icons/modules/variable.svg'),
setTop: require('./icons/light/setTop.svg').default, setTop: () => import('./icons/light/setTop.svg'),
fullScreenLight: require('./icons/light/fullScreen.svg').default, fullScreenLight: () => import('./icons/light/fullScreen.svg'),
voice: require('./icons/voice.svg').default, voice: () => import('./icons/voice.svg'),
html: require('./icons/file/html.svg').default, html: () => import('./icons/file/html.svg'),
pdf: require('./icons/file/pdf.svg').default, pdf: () => import('./icons/file/pdf.svg'),
markdown: require('./icons/file/markdown.svg').default, markdown: () => import('./icons/file/markdown.svg'),
importLight: require('./icons/light/import.svg').default, importLight: () => import('./icons/light/import.svg'),
manualImport: require('./icons/file/manualImport.svg').default, manualImport: () => import('./icons/file/manualImport.svg'),
indexImport: require('./icons/file/indexImport.svg').default, indexImport: () => import('./icons/file/indexImport.svg'),
csvImport: require('./icons/file/csv.svg').default, csvImport: () => import('./icons/file/csv.svg'),
qaImport: require('./icons/file/qaImport.svg').default, qaImport: () => import('./icons/file/qaImport.svg'),
uploadFile: require('./icons/file/uploadFile.svg').default, uploadFile: () => import('./icons/file/uploadFile.svg'),
closeLight: require('./icons/light/close.svg').default, closeLight: () => import('./icons/light/close.svg'),
customTitle: require('./icons/light/customTitle.svg').default, customTitle: () => import('./icons/light/customTitle.svg'),
billRecordLight: require('./icons/light/billRecord.svg').default, billRecordLight: () => import('./icons/light/billRecord.svg'),
informLight: require('./icons/light/inform.svg').default, informLight: () => import('./icons/light/inform.svg'),
payRecordLight: require('./icons/light/payRecord.svg').default, payRecordLight: () => import('./icons/light/payRecord.svg'),
loginoutLight: require('./icons/light/loginout.svg').default, loginoutLight: () => import('./icons/light/loginout.svg'),
chatModelTag: require('./icons/light/chatModelTag.svg').default, chatModelTag: () => import('./icons/light/chatModelTag.svg'),
language_en: require('./icons/language/en.svg').default, language_en: () => import('./icons/language/en.svg'),
language_zh: require('./icons/language/zh.svg').default, language_zh: () => import('./icons/language/zh.svg'),
outlink_share: require('./icons/outlink/share.svg').default, outlink_share: () => import('./icons/outlink/share.svg'),
outlink_iframe: require('./icons/outlink/iframe.svg').default, outlink_iframe: () => import('./icons/outlink/iframe.svg'),
addCircle: require('./icons/circle/add.svg').default, addCircle: () => import('./icons/circle/add.svg'),
playFill: require('./icons/fill/play.svg').default, playFill: () => import('./icons/fill/play.svg'),
courseLight: require('./icons/light/course.svg').default, courseLight: () => import('./icons/light/course.svg'),
promotionLight: require('./icons/light/promotion.svg').default, promotionLight: () => import('./icons/light/promotion.svg'),
logsLight: require('./icons/light/logs.svg').default, logsLight: () => import('./icons/light/logs.svg'),
badLight: require('./icons/light/bad.svg').default, badLight: () => import('./icons/light/bad.svg'),
markLight: require('./icons/light/mark.svg').default, markLight: () => import('./icons/light/mark.svg'),
retryLight: require('./icons/light/retry.svg').default, retryLight: () => import('./icons/light/retry.svg'),
rightArrowLight: require('./icons/light/rightArrow.svg').default, rightArrowLight: () => import('./icons/light/rightArrow.svg'),
searchLight: require('./icons/light/search.svg').default, searchLight: () => import('./icons/light/search.svg'),
plusFill: require('./icons/fill/plus.svg').default, plusFill: () => import('./icons/fill/plus.svg'),
moveLight: require('./icons/light/move.svg').default moveLight: () => import('./icons/light/move.svg')
}; };
export type IconName = keyof typeof map; export type IconName = keyof typeof iconPaths;
const MyIcon = ( const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconName } & IconProps) => {
{ name, w = 'auto', h = 'auto', ...props }: { name: IconName } & IconProps, const [IconComponent, setIconComponent] = useState<any>(null);
ref: any
) => { useEffect(() => {
return map[name] ? ( iconPaths[name]()
.then((icon) => {
setIconComponent({ as: icon.default });
})
.catch((error) => console.log(error));
}, [name]);
return name ? (
<Icon <Icon
as={map[name]} {...IconComponent}
w={w} w={w}
h={h} h={h}
boxSizing={'content-box'} boxSizing={'content-box'}
@@ -107,4 +114,4 @@ const MyIcon = (
) : null; ) : null;
}; };
export default React.forwardRef(MyIcon); export default MyIcon;

View File

@@ -156,6 +156,7 @@ const Navbar = ({ unread }: { unread: number }) => {
<Link <Link
as={NextLink} as={NextLink}
{...itemStyles} {...itemStyles}
prefetch
href={`/account?currentTab=inform`} href={`/account?currentTab=inform`}
mb={0} mb={0}
color={'#9096a5'} color={'#9096a5'}

View File

@@ -0,0 +1,40 @@
import React, { useState } from 'react';
import { Image, Skeleton, ImageProps } from '@chakra-ui/react';
export const MyImage = (props: ImageProps) => {
const [isLoading, setIsLoading] = useState(true);
const [succeed, setSucceed] = useState(false);
return (
<Skeleton
minH="100px"
isLoaded={!isLoading}
fadeDuration={2}
display={'flex'}
justifyContent={'center'}
my={1}
>
<Image
display={'inline-block'}
borderRadius={'md'}
alt={''}
fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'}
cursor={succeed ? 'pointer' : 'default'}
objectFit={'contain'}
loading={'lazy'}
onLoad={() => {
setIsLoading(false);
setSucceed(true);
}}
onError={() => setIsLoading(false)}
onClick={() => {
if (!succeed) return;
window.open(props.src, '_blank');
}}
{...props}
/>
</Skeleton>
);
};
export default React.memo(MyImage);

View File

@@ -44,6 +44,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}) })
.skip((pageNum - 1) * pageSize) .skip((pageNum - 1) * pageSize)
.limit(pageSize) .limit(pageSize)
.sort({ uploadDate: -1 })
.toArray(), .toArray(),
collection.countDocuments(mongoWhere) collection.countDocuments(mongoWhere)
]); ]);

View File

@@ -7,9 +7,9 @@ import { withNextCors } from '@/service/utils/tools';
import { PgDatasetTableName, TrainingModeEnum } from '@/constants/plugin'; import { PgDatasetTableName, TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools'; import { startQueue } from '@/service/utils/tools';
import { PgClient } from '@/service/pg'; import { PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { getVectorModel } from '@/service/utils/data'; import { getVectorModel } from '@/service/utils/data';
import { DatasetItemType } from '@/types/plugin'; import { DatasetItemType } from '@/types/plugin';
import { countPromptTokens } from '@/utils/common/tiktoken';
export type Props = { export type Props = {
kbId: string; kbId: string;
@@ -102,9 +102,7 @@ export async function pushDataToKb({
const text = item.q + item.a; const text = item.q + item.a;
// count q token // count q token
const token = modelToolMap.countTokens({ const token = countPromptTokens(item.q, 'system');
messages: [{ obj: 'System', value: item.q }]
});
if (token > modeMaxToken[mode]) { if (token > modeMaxToken[mode]) {
return; return;

View File

@@ -1,61 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
type Props = {
messages: ChatItemType[];
model: string;
maxLen: number;
};
type Response = ChatItemType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req });
const { messages, model, maxLen } = req.body as Props;
if (!Array.isArray(messages) || !model || !maxLen) {
throw new Error('params is error');
}
return jsonRes<Response>(res, {
data: gpt_chatItemTokenSlice({
messages,
maxToken: maxLen
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export function gpt_chatItemTokenSlice({
messages,
maxToken
}: {
messages: ChatItemType[];
maxToken: number;
}) {
let result: ChatItemType[] = [];
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];
const tokens = countOpenAIToken({ messages: msgs });
if (tokens < maxToken) {
result = msgs;
} else {
break;
}
}
return result.length === 0 && messages[0] ? [messages[0]] : result;
}

View File

@@ -79,6 +79,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (!Array.isArray(messages)) { if (!Array.isArray(messages)) {
throw new Error('messages is not array'); throw new Error('messages is not array');
} }
if (messages.length === 0) {
throw new Error('messages is empty');
}
await connectToDatabase(); await connectToDatabase();
let startTime = Date.now(); let startTime = Date.now();
@@ -120,7 +123,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
responseDetail = isOwner || responseDetail; responseDetail = isOwner || responseDetail;
const prompts = history.concat(gptMessage2ChatType(messages)); const prompts = history.concat(gptMessage2ChatType(messages));
if (prompts[prompts.length - 1].obj === 'AI') { if (prompts[prompts.length - 1]?.obj === 'AI') {
prompts.pop(); prompts.pop();
} }
// user question // user question

View File

@@ -5,10 +5,10 @@ import { authKb, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import { PgDatasetTableName } from '@/constants/plugin'; import { PgDatasetTableName } from '@/constants/plugin';
import { insertKbItem, PgClient } from '@/service/pg'; import { insertKbItem, PgClient } from '@/service/pg';
import { modelToolMap } from '@/utils/plugin';
import { getVectorModel } from '@/service/utils/data'; import { getVectorModel } from '@/service/utils/data';
import { getVector } from '@/pages/api/openapi/plugin/vector'; import { getVector } from '@/pages/api/openapi/plugin/vector';
import { DatasetItemType } from '@/types/plugin'; import { DatasetItemType } from '@/types/plugin';
import { countPromptTokens } from '@/utils/common/tiktoken';
export type Props = { export type Props = {
kbId: string; kbId: string;
@@ -35,9 +35,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const a = data?.a?.replace(/\\n/g, '\n').trim().replace(/'/g, '"'); const a = data?.a?.replace(/\\n/g, '\n').trim().replace(/'/g, '"');
// token check // token check
const token = modelToolMap.countTokens({ const token = countPromptTokens(q, 'system');
messages: [{ obj: 'System', value: q }]
});
if (token > getVectorModel(kb.vectorModel).maxToken) { if (token > getVectorModel(kb.vectorModel).maxToken) {
throw new Error('Over Tokens'); throw new Error('Over Tokens');

View File

@@ -282,6 +282,7 @@ export function EditLinkModal({
return ( return (
<MyModal <MyModal
isOpen={true} isOpen={true}
showCloseBtn={false}
onClose={() => {}} onClose={() => {}}
title={isEdit ? titleMap.current.edit[type] : titleMap.current.create[type]} title={isEdit ? titleMap.current.edit[type] : titleMap.current.create[type]}
> >

View File

@@ -2,6 +2,7 @@ import { Box, Image, BoxProps, Grid, useTheme } from '@chakra-ui/react';
import React from 'react'; import React from 'react';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import { feConfigs } from '@/store/static'; import { feConfigs } from '@/store/static';
import { MyImage } from '@/components/MyImage';
const Ability = () => { const Ability = () => {
const theme = useTheme(); const theme = useTheme();
@@ -48,13 +49,7 @@ const Ability = () => {
<Box {...DescStyles} color={'rgba(255,255,255,0.9)'}> <Box {...DescStyles} color={'rgba(255,255,255,0.9)'}>
{t('home.AI Assistant Desc')} {t('home.AI Assistant Desc')}
</Box> </Box>
<Image <MyImage src="/imgs/home/ai_assiatant.png" alt={''} transform={'translateX(20px)'} />
src="/imgs/home/ai_assiatant.png"
alt={''}
w={'100%'}
borderRadius={'lg'}
transform={'translateX(20px)'}
/>
</Box> </Box>
<Box <Box
{...CardStyles} {...CardStyles}
@@ -67,13 +62,7 @@ const Ability = () => {
<Box {...DescStyles} color={'rgba(255,255,255,0.9)'}> <Box {...DescStyles} color={'rgba(255,255,255,0.9)'}>
{t('home.Dateset Desc')} {t('home.Dateset Desc')}
</Box> </Box>
<Image <MyImage src="/imgs/home/dataset_import.png" w={'90%'} mx={'auto'} borderRadius={'lg'} />
src="/imgs/home/dataset_import.png"
alt={''}
w={'90%'}
mx={'auto'}
borderRadius={'lg'}
/>
</Box> </Box>
</Grid> </Grid>
<Grid <Grid
@@ -88,7 +77,7 @@ const Ability = () => {
<Box {...DescStyles} fontSize={['sm', 'md']}> <Box {...DescStyles} fontSize={['sm', 'md']}>
{t('home.Advanced Settings Desc')} {t('home.Advanced Settings Desc')}
</Box> </Box>
<Image src="/imgs/home/advanced_settings.png" alt={''} w={'100%'} /> <MyImage src="/imgs/home/advanced_settings.png" alt={''} w={'100%'} />
</Box> </Box>
<Box <Box
{...CardStyles} {...CardStyles}
@@ -97,7 +86,13 @@ const Ability = () => {
> >
<Box {...TitleStyles}>{t('home.OpenAPI')}</Box> <Box {...TitleStyles}>{t('home.OpenAPI')}</Box>
<Box {...DescStyles}>{t('home.OpenAPI Desc')}</Box> <Box {...DescStyles}>{t('home.OpenAPI Desc')}</Box>
<Image src="/imgs/home/openapi.png" alt={''} w={'90%'} mx={'auto'} borderRadius={'lg'} /> <MyImage
src="/imgs/home/openapi.png"
alt={''}
w={'90%'}
mx={'auto'}
borderRadius={'lg'}
/>
</Box> </Box>
</Grid> </Grid>
</Box> </Box>

View File

@@ -87,7 +87,7 @@ const Choice = () => {
boxShadow={theme.shadows.base} boxShadow={theme.shadows.base}
borderRadius={'14px'} borderRadius={'14px'}
> >
<Image src={item.icon} w={'28px'} alt={''} /> <Image src={item.icon} w={'28px'} alt={''} loading={'lazy'} />
</Flex> </Flex>
<Box ml={5}> <Box ml={5}>
<Box fontSize={['lg', '2xl']} fontWeight={'bold'} color={'myGray.900'}> <Box fontSize={['lg', '2xl']} fontWeight={'bold'} color={'myGray.900'}>

View File

@@ -62,6 +62,7 @@ const Hero = () => {
maxW={['120%', '1000px']} maxW={['120%', '1000px']}
alt="" alt=""
draggable={false} draggable={false}
loading={'lazy'}
/> />
<MyIcon <MyIcon
name={'playFill'} name={'playFill'}

View File

@@ -1,4 +1,4 @@
import React from 'react'; import React, { useEffect } from 'react';
import { Box } from '@chakra-ui/react'; import { Box } from '@chakra-ui/react';
import { feConfigs } from '@/store/static'; import { feConfigs } from '@/store/static';
import { serviceSideProps } from '@/utils/i18n'; import { serviceSideProps } from '@/utils/i18n';
@@ -18,6 +18,11 @@ const Home = ({ homeUrl = '/' }: { homeUrl: string }) => {
router.replace(homeUrl); router.replace(homeUrl);
} }
useEffect(() => {
router.prefetch('/app/list');
router.prefetch('/login');
}, []);
return homeUrl === '/' ? ( return homeUrl === '/' ? (
<Box id="home" bg={'myWhite.600'} h={'100vh'} overflowY={'auto'} overflowX={'hidden'}> <Box id="home" bg={'myWhite.600'} h={'100vh'} overflowY={'auto'} overflowX={'hidden'}>
<Box position={'fixed'} zIndex={10} top={0} left={0} right={0}> <Box position={'fixed'} zIndex={10} top={0} left={0} right={0}>

View File

@@ -128,6 +128,7 @@ const FileSelect = ({
text, text,
maxLen: chunkLen maxLen: chunkLen
}); });
const fileItem: FileItemType = { const fileItem: FileItemType = {
id: filesId[0], id: filesId[0],
filename: file.name, filename: file.name,

View File

@@ -43,7 +43,7 @@ const QAImport = ({ kbId }: { kbId: string }) => {
const price = useMemo(() => { const price = useMemo(() => {
const filesToken = files.reduce((sum, file) => sum + file.tokens, 0); const filesToken = files.reduce((sum, file) => sum + file.tokens, 0);
const promptTokens = files.reduce((sum, file) => sum + file.chunks.length, 0) * 139; const promptTokens = files.reduce((sum, file) => sum + file.chunks.length, 0) * 139;
const totalToken = (filesToken + promptTokens) * 1.8; const totalToken = (filesToken + promptTokens) * 2;
return formatPrice(totalToken * unitPrice); return formatPrice(totalToken * unitPrice);
}, [files, unitPrice]); }, [files, unitPrice]);

View File

@@ -1,4 +1,4 @@
import React, { useState, useCallback } from 'react'; import React, { useState, useCallback, useEffect } from 'react';
import styles from './index.module.scss'; import styles from './index.module.scss';
import { Box, Flex, Image, useDisclosure } from '@chakra-ui/react'; import { Box, Flex, Image, useDisclosure } from '@chakra-ui/react';
import { PageTypeEnum } from '@/constants/user'; import { PageTypeEnum } from '@/constants/user';
@@ -53,6 +53,10 @@ const Login = () => {
return <Component setPageType={setPageType} loginSuccess={loginSuccess} />; return <Component setPageType={setPageType} loginSuccess={loginSuccess} />;
} }
useEffect(() => {
router.prefetch('/app/list');
}, []);
return ( return (
<> <>
{feConfigs.googleClientVerKey && ( {feConfigs.googleClientVerKey && (
@@ -90,6 +94,7 @@ const Login = () => {
height={'100%'} height={'100%'}
maxH={'450px'} maxH={'450px'}
alt="" alt=""
loading={'lazy'}
/> />
)} )}

View File

@@ -1,4 +1,4 @@
import React, { useCallback } from 'react'; import React, { useCallback, useEffect } from 'react';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { useGlobalStore } from '@/store/global'; import { useGlobalStore } from '@/store/global';
import { ResLogin } from '@/api/response/user'; import { ResLogin } from '@/api/response/user';
@@ -88,6 +88,10 @@ const provider = ({ code, state }: { code: string; state: string }) => {
return null; return null;
}); });
useEffect(() => {
router.prefetch('/app/list');
}, []);
return <Loading />; return <Loading />;
}; };

View File

@@ -1,7 +1,8 @@
import { ChatItemType } from '@/types/chat'; import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { countMessagesTokens, countPromptTokens } from '@/utils/common/tiktoken';
import { adaptRole_Chat2Message } from '@/utils/common/adapt/message';
export type ChatCompletionResponseType = { export type ChatCompletionResponseType = {
streamResponse: any; streamResponse: any;
@@ -11,39 +12,37 @@ export type ChatCompletionResponseType = {
}; };
export type StreamResponseType = { export type StreamResponseType = {
chatResponse: any; chatResponse: any;
prompts: ChatItemType[]; messages: ChatItemType[];
res: NextApiResponse; res: NextApiResponse;
model: string; model: string;
[key: string]: any; [key: string]: any;
}; };
/* slice chat context by tokens */ /* slice chat context by tokens */
export const ChatContextFilter = ({ export function ChatContextFilter({
model, messages = [],
prompts = [],
maxTokens maxTokens
}: { }: {
model: string; messages: ChatItemType[];
prompts: ChatItemType[];
maxTokens: number; maxTokens: number;
}) => { }) {
if (!Array.isArray(prompts)) { if (!Array.isArray(messages)) {
return []; return [];
} }
const rawTextLen = prompts.reduce((sum, item) => sum + item.value.length, 0); const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0);
// If the text length is less than half of the maximum token, no calculation is required // If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) { if (rawTextLen < maxTokens * 0.5) {
return prompts; return messages;
} }
// filter startWith system prompt // filter startWith system prompt
const chatStartIndex = prompts.findIndex((item) => item.obj !== ChatRoleEnum.System); const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = prompts.slice(0, chatStartIndex); const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = prompts.slice(chatStartIndex); const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex);
// reduce token of systemPrompt // reduce token of systemPrompt
maxTokens -= modelToolMap.countTokens({ maxTokens -= countMessagesTokens({
messages: systemPrompts messages: systemPrompts
}); });
@@ -52,18 +51,18 @@ export const ChatContextFilter = ({
// 从后往前截取对话内容 // 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) { for (let i = chatPrompts.length - 1; i >= 0; i--) {
chats.unshift(chatPrompts[i]); const item = chatPrompts[i];
chats.unshift(item);
const tokens = modelToolMap.countTokens({ const tokens = countPromptTokens(item.value, adaptRole_Chat2Message(item.obj));
messages: chats maxTokens -= tokens;
});
/* 整体 tokens 超出范围, system必须保留 */ /* 整体 tokens 超出范围, system必须保留 */
if (tokens >= maxTokens) { if (maxTokens <= 0) {
chats.shift(); chats.shift();
break; break;
} }
} }
return [...systemPrompts, ...chats]; return [...systemPrompts, ...chats];
}; }

View File

@@ -7,10 +7,10 @@ import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth'; import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '../lib/openai'; import { axiosConfig, getAIChatApi } from '../lib/openai';
import { ChatCompletionRequestMessage } from 'openai'; import { ChatCompletionRequestMessage } from 'openai';
import { modelToolMap } from '@/utils/plugin';
import { gptMessage2ChatType } from '@/utils/adapt'; import { gptMessage2ChatType } from '@/utils/adapt';
import { addLog } from '../utils/tools'; import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file'; import { splitText2Chunks } from '@/utils/file';
import { countMessagesTokens } from '@/utils/common/tiktoken';
const reduceQueue = () => { const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0; global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
@@ -81,7 +81,7 @@ A2:
} }
]; ];
const promptsToken = modelToolMap.countTokens({ const promptsToken = countMessagesTokens({
messages: gptMessage2ChatType(messages) messages: gptMessage2ChatType(messages)
}); });
const maxToken = modelTokenLimit - promptsToken; const maxToken = modelTokenLimit - promptsToken;

View File

@@ -1,5 +1,5 @@
import { adaptChatItem_openAI } from '@/utils/plugin/openai'; import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/utils/chat/index'; import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat'; import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat'; import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai'; import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
@@ -50,11 +50,10 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
} }
]; ];
const filterMessages = ChatContextFilter({ const filterMessages = ChatContextFilter({
model: agentModel, messages,
prompts: messages,
maxTokens maxTokens
}); });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false }); const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
// function body // function body
const agentFunction = { const agentFunction = {

View File

@@ -1,5 +1,5 @@
import { adaptChatItem_openAI } from '@/utils/plugin/openai'; import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/utils/chat/index'; import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat'; import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat'; import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai'; import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
@@ -45,12 +45,10 @@ export async function dispatchContentExtract({
} }
]; ];
const filterMessages = ChatContextFilter({ const filterMessages = ChatContextFilter({
// @ts-ignore messages,
model: agentModel,
prompts: messages,
maxTokens maxTokens
}); });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false }); const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
const properties: Record< const properties: Record<
string, string,

View File

@@ -1,8 +1,6 @@
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { sseResponse } from '@/service/utils/tools'; import { sseResponse } from '@/service/utils/tools';
import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai'; import { ChatContextFilter } from '@/service/common/tiktoken';
import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType, QuoteItemType } from '@/types/chat'; import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat'; import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatModuleEnum, ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat'; import { ChatModuleEnum, ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
@@ -17,6 +15,8 @@ import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/api/service/plugins'; import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai'; import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app'; import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
export type ChatProps = { export type ChatProps = {
res: NextApiResponse; res: NextApiResponse;
@@ -142,7 +142,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
value: answer value: answer
}); });
const totalTokens = countOpenAIToken({ const totalTokens = countMessagesTokens({
messages: completeMessages messages: completeMessages
}); });
@@ -154,8 +154,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
completeMessages completeMessages
}; };
} else { } else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || ''; const answer = response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0; const totalTokens = response.data.usage?.total_tokens || 0;
const completeMessages = filterMessages.concat({ const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI, obj: ChatRoleEnum.AI,
@@ -194,8 +194,8 @@ function filterQuote({
quoteQA: ChatProps['quoteQA']; quoteQA: ChatProps['quoteQA'];
model: ChatModelItemType; model: ChatModelItemType;
}) { }) {
const sliceResult = modelToolMap.tokenSlice({ const sliceResult = sliceMessagesTB({
maxToken: model.quoteMaxToken, maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item) => ({ messages: quoteQA.map((item) => ({
obj: ChatRoleEnum.System, obj: ChatRoleEnum.System,
value: item.a ? `${item.q}\n${item.a}` : item.q value: item.a ? `${item.q}\n${item.a}` : item.q
@@ -274,12 +274,11 @@ function getChatMessages({
]; ];
const filterMessages = ChatContextFilter({ const filterMessages = ChatContextFilter({
model: model.model, messages,
prompts: messages,
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
}); });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false }); const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
return { return {
messages: adaptMessages, messages: adaptMessages,
@@ -298,7 +297,7 @@ function getMaxTokens({
const tokensLimit = model.contextMaxToken; const tokensLimit = model.contextMaxToken;
/* count response max token */ /* count response max token */
const promptsToken = modelToolMap.countTokens({ const promptsToken = countMessagesTokens({
messages: filterMessages messages: filterMessages
}); });
maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken; maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken;

View File

@@ -8,6 +8,7 @@ import { initPg } from './pg';
import { createHashPassword } from '@/utils/tools'; import { createHashPassword } from '@/utils/tools';
import { createLogger, format, transports } from 'winston'; import { createLogger, format, transports } from 'winston';
import 'winston-mongodb'; import 'winston-mongodb';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
/** /**
* connect MongoDB and init data * connect MongoDB and init data
@@ -38,6 +39,8 @@ export async function connectToDatabase(): Promise<void> {
// init function // init function
getInitConfig(); getInitConfig();
// init tikToken
getTikTokenEnc();
try { try {
mongoose.set('strictQuery', true); mongoose.set('strictQuery', true);

View File

@@ -1,7 +1,7 @@
import type { Mongoose } from 'mongoose'; import type { Mongoose } from 'mongoose';
import type { Agent } from 'http'; import type { Agent } from 'http';
import type { Pool } from 'pg'; import type { Pool } from 'pg';
import type { Tiktoken } from '@dqbd/tiktoken'; import type { Tiktoken } from 'js-tiktoken';
import type { Logger } from 'winston'; import type { Logger } from 'winston';
import { ChatModelItemType, QAModelItemType, VectorModelItemType } from './model'; import { ChatModelItemType, QAModelItemType, VectorModelItemType } from './model';
import { TrackEventName } from '@/constants/common'; import { TrackEventName } from '@/constants/common';
@@ -49,7 +49,7 @@ declare global {
var httpsAgent: Agent; var httpsAgent: Agent;
var qaQueueLen: number; var qaQueueLen: number;
var vectorQueueLen: number; var vectorQueueLen: number;
var OpenAiEncMap: Tiktoken; var TikToken: Tiktoken;
var logger: Logger; var logger: Logger;

View File

@@ -0,0 +1,37 @@
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
const chat2Message = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
const message2Chat = {
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.Function]: 'function'
};
export function adaptRole_Chat2Message(role: `${ChatRoleEnum}`) {
return chat2Message[role];
}
export function adaptRole_Message2Chat(role: `${ChatCompletionRequestMessageRoleEnum}`) {
return message2Chat[role];
}
export const adaptChat2GptMessages = ({
messages,
reserveId
}: {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
return messages.map((item) => ({
...(reserveId && { dataId: item.dataId }),
role: chat2Message[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
};

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,92 @@
/* Only the token of gpt-3.5-turbo is used */
import { ChatItemType } from '@/types/chat';
import { Tiktoken } from 'js-tiktoken/lite';
import { adaptChat2GptMessages } from '../adapt/message';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import encodingJson from './cl100k_base.json';
/* init tikToken obj */
export function getTikTokenEnc() {
if (typeof window !== 'undefined' && window.TikToken) {
return window.TikToken;
}
if (typeof global !== 'undefined' && global.TikToken) {
return global.TikToken;
}
const enc = new Tiktoken(encodingJson);
if (typeof window !== 'undefined') {
window.TikToken = enc;
}
if (typeof global !== 'undefined') {
global.TikToken = enc;
}
return enc;
}
/* count one prompt tokens */
export function countPromptTokens(prompt = '', role: `${ChatCompletionRequestMessageRoleEnum}`) {
const enc = getTikTokenEnc();
const text = `${role}\n${prompt}`;
try {
const encodeText = enc.encode(text);
return encodeText.length + 3; // 补充 role 估算值
} catch (error) {
return text.length;
}
}
/* count messages tokens */
export function countMessagesTokens({ messages }: { messages: ChatItemType[] }) {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
let totalTokens = 0;
for (let i = 0; i < adaptMessages.length; i++) {
const item = adaptMessages[i];
const tokens = countPromptTokens(item.content, item.role);
totalTokens += tokens;
}
return totalTokens;
}
export function sliceTextByTokens({ text, length }: { text: string; length: number }) {
const enc = getTikTokenEnc();
try {
const encodeText = enc.encode(text);
return enc.decode(encodeText.slice(0, length));
} catch (error) {
return text.slice(0, length);
}
}
/* slice messages from top to bottom by maxTokens */
export function sliceMessagesTB({
messages,
maxTokens
}: {
messages: ChatItemType[];
maxTokens: number;
}) {
const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true });
let reduceTokens = maxTokens;
let result: ChatItemType[] = [];
for (let i = 0; i < adaptMessages.length; i++) {
const item = adaptMessages[i];
const tokens = countPromptTokens(item.content, item.role);
reduceTokens -= tokens;
if (tokens > 0) {
result.push(messages[i]);
} else {
break;
}
}
return result.length === 0 && messages[0] ? [messages[0]] : result;
}

View File

@@ -1,8 +1,8 @@
import mammoth from 'mammoth'; import mammoth from 'mammoth';
import Papa from 'papaparse'; import Papa from 'papaparse';
import { getOpenAiEncMap } from './plugin/openai';
import { getErrText } from './tools'; import { getErrText } from './tools';
import { uploadImg, postUploadFiles } from '@/api/support/file'; import { uploadImg, postUploadFiles } from '@/api/support/file';
import { countPromptTokens } from './common/tiktoken';
/** /**
* upload file to mongo gridfs * upload file to mongo gridfs
@@ -206,16 +206,7 @@ export const splitText2Chunks = ({ text, maxLen }: { text: string; maxLen: numbe
chunks.push(chunk); chunks.push(chunk);
} }
const tokens = (() => { const tokens = chunks.reduce((sum, chunk) => sum + countPromptTokens(chunk, 'system'), 0);
try {
const enc = getOpenAiEncMap();
const encodeText = enc.encode(chunks.join(''));
const tokens = encodeText.length;
return tokens;
} catch (error) {
return chunks.join('').length;
}
})();
return { return {
chunks, chunks,

View File

@@ -1,8 +0,0 @@
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap = {
countTokens: countOpenAIToken,
sliceText: openAiSliceTextByToken,
tokenSlice: gpt_chatItemTokenSlice
};

View File

@@ -1,100 +0,0 @@
import { encoding_for_model } from '@dqbd/tiktoken';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import axios from 'axios';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
export const getOpenAiEncMap = () => {
if (typeof window !== 'undefined' && window.OpenAiEncMap) {
return window.OpenAiEncMap;
}
if (typeof global !== 'undefined' && global.OpenAiEncMap) {
return global.OpenAiEncMap;
}
const enc = encoding_for_model('gpt-3.5-turbo', {
'<|im_start|>': 100264,
'<|im_end|>': 100265,
'<|im_sep|>': 100266
});
if (typeof window !== 'undefined') {
window.OpenAiEncMap = enc;
}
if (typeof global !== 'undefined') {
global.OpenAiEncMap = enc;
}
return enc;
};
export const adaptChatItem_openAI = ({
messages,
reserveId
}: {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
return messages.map((item) => ({
...(reserveId && { dataId: item.dataId }),
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
};
export function countOpenAIToken({ messages }: { messages: ChatItemType[] }) {
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
const token = adaptMessages.reduce((sum, item) => {
const text = `${item.role}\n${item.content}`;
/* use textLen as tokens if encode error */
const tokens = (() => {
try {
const enc = getOpenAiEncMap();
const encodeText = enc.encode(text);
return encodeText.length + 3; // 补充估算值
} catch (error) {
return text.length;
}
})();
return sum + tokens;
}, 0);
return token;
}
export const openAiSliceTextByToken = ({ text, length }: { text: string; length: number }) => {
const enc = getOpenAiEncMap();
try {
const encodeText = enc.encode(text);
const decoder = new TextDecoder();
return decoder.decode(enc.decode(encodeText.slice(0, length)));
} catch (error) {
return text.slice(0, length);
}
};
export const authOpenAiKey = async (key: string) => {
return axios
.get('https://ccdbwscohpmu.cloud.sealos.io/openai/v1/dashboard/billing/subscription', {
headers: {
Authorization: `Bearer ${key}`
}
})
.then((res) => {
if (!res.data.access_until) {
return Promise.resolve('OpenAI Key 可能无效');
}
})
.catch((err) => {
console.log(err);
return Promise.reject(err?.response?.data?.error?.message || 'OpenAI Key 可能无效');
});
};