fix: chat 页有些手机无法加载

This commit is contained in:
archer
2023-04-08 13:21:03 +08:00
parent b7cd4dec89
commit 9a145f223f
5 changed files with 66 additions and 73 deletions

View File

@@ -2,7 +2,7 @@
Fast GPT 允许你使用自己的 openai API KEY 来快速的调用 openai 接口,包括 GPT3 及其微调方法,以及最新的 gpt3.5 接口。
## 初始化
## 开发
复制 .env.template 成 .env.local ,填写核心参数
```
@@ -15,7 +15,6 @@ TOKEN_KEY=随便填一个,用于生成和校验 token
OPENAIKEY=openai的key
REDIS_URL=redis的地址
```
```bash
pnpm dev
```
@@ -32,48 +31,6 @@ docker push imageName:tag
# 或者直接拉镜像,见下方
```
#### 服务器拉取镜像和运行
```yml
# docker-compose
version: "3.3"
services:
fast-gpt:
image: c121914yu/fast-gpt:latest
environment:
AXIOS_PROXY_HOST: 127.0.0.1
AXIOS_PROXY_PORT: 7890
MY_MAIL:
MAILE_CODE:
TOKEN_KEY:
MONGODB_URI:
OPENAIKEY:
REDIS_URL:
network_mode: host
restart: always
container_name: fast-gpt
```
```bash
#!/bin/bash
# 拉取最新镜像
docker-compose pull
docker-compose up -d
echo "Docker Compose 重新拉取镜像完成!"
# 删除本地旧镜像
images=$(docker images --format "{{.ID}} {{.Repository}}" | grep fast-gpt)
# 将镜像 ID 和名称放入数组中
IFS=$'\n' read -rd '' -a image_array <<<"$images"
# 遍历数组并删除所有旧的镜像
for ((i=1; i<${#image_array[@]}; i++))
do
image=${image_array[$i]}
image_id=${image%% *}
docker rmi $image_id
done
```
#### 软件教程docker 安装
```bash
@@ -163,3 +120,45 @@ appendfsync everysec
# 添加索引
FT.CREATE idx:model:data:hash ON HASH PREFIX 1 model:data: SCHEMA modelId TAG userId TAG status TAG q TEXT text TEXT vector VECTOR FLAT 6 DIM 1536 DISTANCE_METRIC COSINE TYPE FLOAT32
```
#### 服务器拉取镜像和运行
```yml
# docker-compose
version: "3.3"
services:
fast-gpt:
image: c121914yu/fast-gpt:latest
environment:
AXIOS_PROXY_HOST: 127.0.0.1
AXIOS_PROXY_PORT: 7890
MY_MAIL:
MAILE_CODE:
TOKEN_KEY:
MONGODB_URI:
OPENAIKEY:
REDIS_URL:
network_mode: host
restart: always
container_name: fast-gpt
```
```bash
#!/bin/bash
# 拉取最新镜像
docker-compose pull
docker-compose up -d
echo "Docker Compose 重新拉取镜像完成!"
# 删除本地旧镜像
images=$(docker images --format "{{.ID}} {{.Repository}}" | grep fast-gpt)
# 将镜像 ID 和名称放入数组中
IFS=$'\n' read -rd '' -a image_array <<<"$images"
# 遍历数组并删除所有旧的镜像
for ((i=1; i<${#image_array[@]}; i++))
do
image=${image_array[$i]}
image_id=${image%% *}
docker rmi $image_id
done
```

View File

@@ -47,7 +47,7 @@ export default function App({ Component, pageProps }: AppProps) {
<meta name="description" content="Generated by Fast GPT" />
<meta
name="viewport"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0;"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0"
/>
<link rel="icon" href="/favicon.ico" />
</Head>

View File

@@ -1,13 +1,7 @@
import React, { useCallback, useState, useRef, useMemo, useEffect } from 'react';
import { useRouter } from 'next/router';
import Image from 'next/image';
import {
getInitChatSiteInfo,
getChatSiteId,
postGPT3SendPrompt,
delChatRecordByIndex,
postSaveChat
} from '@/api/chat';
import { getInitChatSiteInfo, getChatSiteId, delChatRecordByIndex, postSaveChat } from '@/api/chat';
import type { InitChatResponse } from '@/api/response/chat';
import { ChatSiteItemType } from '@/types/chat';
import {
@@ -33,12 +27,11 @@ import { useGlobalStore } from '@/store/global';
import { useChatStore } from '@/store/chat';
import { useCopyData } from '@/utils/tools';
import { streamFetch } from '@/api/fetch';
import SlideBar from './components/SlideBar';
import Empty from './components/Empty';
import Icon from '@/components/Icon';
import { encode } from 'gpt-token-utils';
import { modelList } from '@/constants/model';
const SlideBar = dynamic(() => import('./components/SlideBar'));
const Empty = dynamic(() => import('./components/Empty'));
const Markdown = dynamic(() => import('@/components/Markdown'));
const textareaMinH = '22px';
@@ -48,10 +41,12 @@ interface ChatType extends InitChatResponse {
}
const Chat = ({ chatId }: { chatId: string }) => {
const { toast } = useToast();
const router = useRouter();
const ChatBox = useRef<HTMLDivElement>(null);
const TextareaDom = useRef<HTMLTextAreaElement>(null);
const { toast } = useToast();
const router = useRouter();
// 中断请求
const controller = useRef(new AbortController());
const [chatData, setChatData] = useState<ChatType>({
@@ -70,11 +65,11 @@ const Chat = ({ chatId }: { chatId: string }) => {
() => chatData.history[chatData.history.length - 1]?.status === 'loading',
[chatData.history]
);
const { isOpen: isOpenSlider, onClose: onCloseSlider, onOpen: onOpenSlider } = useDisclosure();
const { copyData } = useCopyData();
const { isPc, media } = useScreen();
const { setLoading } = useGlobalStore();
const { isOpen: isOpenSlider, onClose: onCloseSlider, onOpen: onOpenSlider } = useDisclosure();
const { pushChatHistory } = useChatStore();
// 滚动到底部
@@ -211,12 +206,11 @@ const Chat = ({ chatId }: { chatId: string }) => {
}
// 长度校验
const tokens = encode(val).length;
const model = modelList.find((item) => item.model === chatData.modelName);
if (model && tokens >= model.maxToken) {
if (model && val.length >= model.maxToken) {
toast({
title: '单次输入超出 4000 tokens',
title: '单次输入超出 4000 字符',
status: 'warning'
});
return;
@@ -314,14 +308,6 @@ const Chat = ({ chatId }: { chatId: string }) => {
[copyData]
);
useEffect(() => {
controller.current = new AbortController();
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
controller.current?.abort();
};
}, [chatId]);
// 初始化聊天框
useQuery(
['init', chatId],
@@ -359,6 +345,14 @@ const Chat = ({ chatId }: { chatId: string }) => {
}
);
// 更新流中断对象
useEffect(() => {
controller.current = new AbortController();
return () => {
// eslint-disable-next-line react-hooks/exhaustive-deps
controller.current?.abort();
};
}, [chatId]);
return (
<Flex
h={'100%'}
@@ -546,7 +540,7 @@ const Chat = ({ chatId }: { chatId: string }) => {
export default Chat;
export async function getServerSideProps(context: any) {
const chatId = context.query?.chatId || '';
const chatId = context?.query?.chatId || 'noid';
return {
props: { chatId }

View File

@@ -127,7 +127,7 @@ const SelectFileModal = ({
tokens
</Box>
<Box mt={2}>
{encode(fileText).length} tokens {formatPrice(encode(fileText).length * 4)}
{encode(fileText).length} tokens {formatPrice(encode(fileText).length * 3)}
</Box>
<Flex w={'100%'} alignItems={'center'} my={4}>

View File

@@ -106,7 +106,7 @@ const SelectUrlModal = ({
QA tokens
</Box>
<Box mt={2}>
{encode(webText).length} tokens {formatPrice(encode(webText).length * 4)}
{encode(webText).length} tokens {formatPrice(encode(webText).length * 3)}
</Box>
<Flex w={'100%'} alignItems={'center'} my={4}>
<Box flex={'0 0 70px'}></Box>