fix: gpt35 4k

This commit is contained in:
archer
2023-06-14 20:54:34 +08:00
parent e4aeee7be3
commit 07f8e18c10
16 changed files with 74 additions and 22 deletions

View File

@@ -49,14 +49,20 @@ export default function App({ Component, pageProps }: AppProps) {
/>
<link rel="icon" href="/favicon.ico" />
</Head>
<Script src="/js/qrcode.min.js" strategy="lazyOnload"></Script>
<Script src="/js/pdf.js" strategy="lazyOnload"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
<Script src="/js/qrcode.min.js" strategy="afterInteractive"></Script>
<Script src="/js/pdf.js" strategy="afterInteractive"></Script>
<Script src="/js/html2pdf.bundle.min.js" strategy="afterInteractive"></Script>
{googleVerKey && (
<Script
src={`https://www.recaptcha.net/recaptcha/api.js?render=${googleVerKey}`}
strategy="lazyOnload"
></Script>
<>
<Script
src={`https://www.recaptcha.net/recaptcha/api.js?render=${googleVerKey}`}
strategy="afterInteractive"
></Script>
<Script
src={`https://www.google.com/recaptcha/api.js?render=${googleVerKey}`}
strategy="afterInteractive"
></Script>
</>
)}
<Script src="/js/particles.js"></Script>
<QueryClientProvider client={queryClient}>

View File

@@ -78,7 +78,7 @@ export async function pushDataToKb({
if (mode === TrainingModeEnum.qa) {
// count token
const token = modelToolMap[OpenAiChatEnum.GPT35].countTokens({
const token = modelToolMap[OpenAiChatEnum.GPT3516k].countTokens({
messages: [{ obj: 'System', value: item.q }]
});
if (token > modeMaxToken[TrainingModeEnum.qa]) {

View File

@@ -8,6 +8,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const chatModelList: ChatModelItemType[] = [];
if (global.systemEnv.openAIKeys) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT3516k]);
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
}
if (global.systemEnv.gpt4Key) {

View File

@@ -31,12 +31,12 @@ const modeMap = {
[TrainingModeEnum.qa]: {
maxLen: 8000,
slideLen: 3000,
price: ChatModelMap[OpenAiChatEnum.GPT35].price,
price: ChatModelMap[OpenAiChatEnum.GPT3516k].price,
isPrompt: true
},
[TrainingModeEnum.index]: {
maxLen: 1400,
slideLen: 700,
maxLen: 1000,
slideLen: 500,
price: embeddingPrice,
isPrompt: false
}

View File

@@ -114,7 +114,8 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
| 计费项 | 价格: 元/ 1K tokens(包含上下文)|
| --- | --- |
| 知识库 - 索引 | 0.001 |
| chatgpt - 对话 | 0.025 |
| chatgpt - 对话 | 0.022 |
| chatgpt16K - 对话 | 0.025 |
| gpt4 - 对话 | 0.5 |
| 文件拆分 | 0.025 |`}
/>