feat: vision model (#489)

* mongo init

* perf: mongo connect

* perf: tts

perf: whisper and tts

peref: tts whisper permission

log

reabase (#488)

* perf: modal

* i18n

* perf: schema lean

* feat: vision model format

* perf: tts loading

* perf: static data

* perf: tts

* feat: image

* perf: image

* perf: upload image and title

* perf: image size

* doc

* perf: color

* doc

* speaking can not select file

* doc
This commit is contained in:
Archer
2023-11-18 15:42:35 +08:00
committed by GitHub
parent 70f3373246
commit c5664c7e90
58 changed files with 650 additions and 254 deletions

View File

@@ -36,6 +36,7 @@ weight: 520
"quoteMaxToken": 2000, // 最大引用内容长度
"maxTemperature": 1.2, // 最大温度值
"censor": false, // 是否开启敏感词过滤(商业版)
"vision": false, // 支持图片输入
"defaultSystemChatPrompt": ""
},
{
@@ -47,6 +48,7 @@ weight: 520
"quoteMaxToken": 8000,
"maxTemperature": 1.2,
"censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
@@ -58,6 +60,19 @@ weight: 520
"quoteMaxToken": 4000,
"maxTemperature": 1.2,
"censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
"model": "gpt-4-vision-preview",
"name": "GPT4-Vision",
"maxContext": 128000,
"maxResponse": 4000,
"price": 0,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"censor": false,
"vision": true,
"defaultSystemChatPrompt": ""
}
],

View File

@@ -0,0 +1,16 @@
---
title: 'V4.6.1'
description: 'FastGPT V4.6 .1'
icon: 'upgrade'
draft: false
toc: true
weight: 835
---
## V4.6.1 功能介绍
1. 新增 - GPT4-v 模型支持
2. 新增 - whisper 语音输入
3. 优化 - TTS 流传输
4. 优化 - TTS 缓存

View File

@@ -24,7 +24,7 @@ export const simpleText = (text: string) => {
};
/*
replace {{variable}} to value
replace {{variable}} to value
*/
export function replaceVariable(text: string, obj: Record<string, string | number>) {
for (const key in obj) {

View File

@@ -9,6 +9,7 @@ export type ChatModelItemType = LLMModelItemType & {
quoteMaxToken: number;
maxTemperature: number;
censor?: boolean;
vision?: boolean;
defaultSystemChatPrompt?: string;
};

View File

@@ -17,6 +17,7 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 2000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
@@ -28,6 +29,7 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 8000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
@@ -39,6 +41,19 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 4000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-4-vision-preview',
name: 'GPT4-Vision',
maxContext: 128000,
maxResponse: 4000,
price: 0,
quoteMaxToken: 100000,
maxTemperature: 1.2,
censor: false,
vision: true,
defaultSystemChatPrompt: ''
}
];

View File

@@ -5,12 +5,14 @@ import type {
ChatCompletionMessageParam,
ChatCompletionContentPart
} from 'openai/resources';
export type ChatCompletionContentPart = ChatCompletionContentPart;
export type ChatCompletionCreateParams = ChatCompletionCreateParams;
export type ChatMessageItemType = Omit<ChatCompletionMessageParam> & {
export type ChatMessageItemType = Omit<ChatCompletionMessageParam, 'name'> & {
name?: any;
dataId?: string;
content: any;
};
} & any;
export type ChatCompletion = ChatCompletion;
export type StreamChatType = Stream<ChatCompletionChunk>;

View File

@@ -54,3 +54,6 @@ export const ChatSourceMap = {
export const HUMAN_ICON = `/icon/human.svg`;
export const LOGO_ICON = `/icon/logo.svg`;
export const IMG_BLOCK_KEY = 'img-block';
export const FILE_BLOCK_KEY = 'file-block';

View File

@@ -0,0 +1,6 @@
import { IMG_BLOCK_KEY, FILE_BLOCK_KEY } from './constants';
export function chatContentReplaceBlock(content: string = '') {
const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g');
return content.replace(regex, '').trim();
}

View File

@@ -33,3 +33,4 @@ try {
export const MongoTTSBuffer: Model<TTSBufferSchemaType> =
models[collectionName] || model(collectionName, TTSBufferSchema);
MongoTTSBuffer.syncIndexes();

View File

@@ -5,12 +5,26 @@ export function getMongoImgUrl(id: string) {
return `${imageBaseUrl}${id}`;
}
export async function uploadMongoImg({ base64Img, userId }: { base64Img: string; userId: string }) {
export const maxImgSize = 1024 * 1024 * 12;
export async function uploadMongoImg({
base64Img,
teamId,
expiredTime
}: {
base64Img: string;
teamId: string;
expiredTime?: Date;
}) {
if (base64Img.length > maxImgSize) {
return Promise.reject('Image too large');
}
const base64Data = base64Img.split(',')[1];
const { _id } = await MongoImage.create({
userId,
binary: Buffer.from(base64Data, 'base64')
teamId,
binary: Buffer.from(base64Data, 'base64'),
expiredTime
});
return getMongoImgUrl(String(_id));

View File

@@ -1,16 +1,27 @@
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { connectionMongo, type Model } from '../../mongo';
const { Schema, model, models } = connectionMongo;
const ImageSchema = new Schema({
userId: {
teamId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
ref: TeamCollectionName
},
binary: {
type: Buffer
},
expiredTime: {
type: Date
}
});
export const MongoImage: Model<{ userId: string; binary: Buffer }> =
try {
ImageSchema.index({ expiredTime: 1 }, { expireAfterSeconds: 60 });
} catch (error) {
console.log(error);
}
export const MongoImage: Model<{ teamId: string; binary: Buffer }> =
models['image'] || model('image', ImageSchema);
MongoImage.syncIndexes();

View File

@@ -67,3 +67,5 @@ try {
export const MongoApp: Model<AppType> =
models[appCollectionName] || model(appCollectionName, AppSchema);
MongoApp.syncIndexes();

View File

@@ -83,3 +83,5 @@ try {
export const MongoChatItem: Model<ChatItemType> =
models['chatItem'] || model('chatItem', ChatItemSchema);
MongoChatItem.syncIndexes();

View File

@@ -92,7 +92,7 @@ const ChatSchema = new Schema({
});
try {
ChatSchema.index({ userId: 1 });
ChatSchema.index({ tmbId: 1 });
ChatSchema.index({ updateTime: -1 });
ChatSchema.index({ appId: 1 });
} catch (error) {
@@ -101,3 +101,4 @@ try {
export const MongoChat: Model<ChatType> =
models[chatCollectionName] || model(chatCollectionName, ChatSchema);
MongoChat.syncIndexes();

View File

@@ -1,7 +1,8 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum, IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { countMessagesTokens, countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { adaptRole_Chat2Message } from '@fastgpt/global/core/chat/adapt';
import type { ChatCompletionContentPart } from '@fastgpt/global/core/ai/type.d';
/* slice chat context by tokens */
export function ChatContextFilter({
@@ -51,3 +52,101 @@ export function ChatContextFilter({
return [...systemPrompts, ...chats];
}
/**
string to vision model. Follow the markdown code block rule for interception:
@rule:
```img-block
{src:""}
{src:""}
```
```file-block
{name:"",src:""},
{name:"",src:""}
```
@example:
Whats in this image?
```img-block
{src:"https://1.png"}
```
@return
[
{ type: 'text', text: 'Whats in this image?' },
{
type: 'image_url',
image_url: {
url: 'https://1.png'
}
}
]
*/
export function formatStr2ChatContent(str: string) {
const content: ChatCompletionContentPart[] = [];
let lastIndex = 0;
const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g');
let match;
while ((match = regex.exec(str)) !== null) {
// add previous text
if (match.index > lastIndex) {
const text = str.substring(lastIndex, match.index).trim();
if (text) {
content.push({ type: 'text', text });
}
}
const blockType = match[1].trim();
if (blockType === IMG_BLOCK_KEY) {
const blockContentLines = match[2].trim().split('\n');
const jsonLines = blockContentLines.map((item) => {
try {
return JSON.parse(item) as { src: string };
} catch (error) {
return { src: '' };
}
});
for (const item of jsonLines) {
if (!item.src) throw new Error("image block's content error");
}
content.push(
...jsonLines.map((item) => ({
type: 'image_url' as any,
image_url: {
url: item.src
}
}))
);
}
lastIndex = regex.lastIndex;
}
// add remaining text
if (lastIndex < str.length) {
const remainingText = str.substring(lastIndex).trim();
if (remainingText) {
content.push({ type: 'text', text: remainingText });
}
}
// Continuous text type content, if type=text, merge them
for (let i = 0; i < content.length - 1; i++) {
const currentContent = content[i];
const nextContent = content[i + 1];
if (currentContent.type === 'text' && nextContent.type === 'text') {
currentContent.text += nextContent.text;
content.splice(i + 1, 1);
i--;
}
}
if (content.length === 1 && content[0].type === 'text') {
return content[0].text;
}
return content ? content : null;
}

View File

@@ -22,9 +22,9 @@ export async function findDatasetIdTreeByTopDatasetId(
}
export async function getCollectionWithDataset(collectionId: string) {
const data = (
await MongoDatasetCollection.findById(collectionId).populate('datasetId')
)?.toJSON() as CollectionWithDatasetType;
const data = (await MongoDatasetCollection.findById(collectionId)
.populate('datasetId')
.lean()) as CollectionWithDatasetType;
if (!data) {
return Promise.reject('Collection is not exist');
}

View File

@@ -76,3 +76,4 @@ try {
export const MongoDatasetData: Model<DatasetDataSchemaType> =
models[DatasetDataCollectionName] || model(DatasetDataCollectionName, DatasetDataSchema);
MongoDatasetData.syncIndexes();

View File

@@ -82,3 +82,4 @@ try {
export const MongoDataset: Model<DatasetSchemaType> =
models[DatasetCollectionName] || model(DatasetCollectionName, DatasetSchema);
MongoDataset.syncIndexes();

View File

@@ -104,3 +104,5 @@ try {
export const MongoDatasetTraining: Model<DatasetTrainingSchemaType> =
models[DatasetTrainingCollectionName] || model(DatasetTrainingCollectionName, TrainingDataSchema);
MongoDatasetTraining.syncIndexes();

View File

@@ -46,10 +46,11 @@ const PluginSchema = new Schema({
});
try {
PluginSchema.index({ userId: 1 });
PluginSchema.index({ tmbId: 1 });
} catch (error) {
console.log(error);
}
export const MongoPlugin: Model<PluginItemSchema> =
models[ModuleCollectionName] || model(ModuleCollectionName, PluginSchema);
MongoPlugin.syncIndexes();

View File

@@ -31,3 +31,4 @@ const PromotionRecordSchema = new Schema({
export const MongoPromotionRecord: Model<PromotionRecordType> =
models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema);
MongoPromotionRecord.syncIndexes();

View File

@@ -70,3 +70,4 @@ const OpenApiSchema = new Schema(
export const MongoOpenApi: Model<OpenApiSchema> =
models['openapi'] || model('openapi', OpenApiSchema);
MongoOpenApi.syncIndexes();

View File

@@ -71,3 +71,5 @@ const OutLinkSchema = new Schema({
export const MongoOutLink: Model<SchemaType> =
models['outlinks'] || model('outlinks', OutLinkSchema);
MongoOutLink.syncIndexes();

View File

@@ -22,12 +22,12 @@ export async function authApp({
}
> {
const result = await parseHeaderCert(props);
const { userId, teamId, tmbId } = result;
const { teamId, tmbId } = result;
const { role } = await getTeamInfoByTmbId({ tmbId });
const { app, isOwner, canWrite } = await (async () => {
// get app
const app = (await MongoApp.findOne({ _id: appId, teamId }))?.toJSON();
const app = await MongoApp.findOne({ _id: appId, teamId }).lean();
if (!app) {
return Promise.reject(AppErrEnum.unAuthApp);
}

View File

@@ -24,9 +24,9 @@ export async function authChat({
const { chat, isOwner, canWrite } = await (async () => {
// get chat
const chat = (
await MongoChat.findOne({ chatId, teamId }).populate('appId')
)?.toJSON() as ChatWithAppSchema;
const chat = (await MongoChat.findOne({ chatId, teamId })
.populate('appId')
.lean()) as ChatWithAppSchema;
if (!chat) {
return Promise.reject('Chat is not exists');

View File

@@ -31,7 +31,7 @@ export async function authDataset({
const { role } = await getTeamInfoByTmbId({ tmbId });
const { dataset, isOwner, canWrite } = await (async () => {
const dataset = (await MongoDataset.findOne({ _id: datasetId, teamId }))?.toObject();
const dataset = await MongoDataset.findOne({ _id: datasetId, teamId }).lean();
if (!dataset) {
return Promise.reject(DatasetErrEnum.unAuthDataset);

View File

@@ -64,3 +64,4 @@ const UserSchema = new Schema({
export const MongoUser: Model<UserModelSchema> =
models[userCollectionName] || model(userCollectionName, UserSchema);
MongoUser.syncIndexes();

View File

@@ -59,3 +59,4 @@ try {
}
export const MongoBill: Model<BillType> = models['bill'] || model('bill', BillSchema);
MongoBill.syncIndexes();

View File

@@ -15,6 +15,7 @@
"quoteMaxToken": 2000,
"maxTemperature": 1.2,
"censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
@@ -26,6 +27,7 @@
"quoteMaxToken": 8000,
"maxTemperature": 1.2,
"censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
@@ -37,6 +39,19 @@
"quoteMaxToken": 4000,
"maxTemperature": 1.2,
"censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
"model": "gpt-4-vision-preview",
"name": "GPT4-Vision",
"maxContext": 128000,
"maxResponse": 4000,
"price": 0,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"censor": false,
"vision": true,
"defaultSystemChatPrompt": ""
}
],

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -191,6 +191,7 @@
"Update Success": "Update Success",
"Update Successful": "Update Successful",
"Update Time": "Update Time",
"Upload File Failed": "Upload File Failed",
"Username": "UserName",
"error": {
"unKnow": "There was an accident"

View File

@@ -191,6 +191,7 @@
"Update Success": "更新成功",
"Update Successful": "更新成功",
"Update Time": "更新时间",
"Upload File Failed": "上传文件失败",
"Username": "用户名",
"error": {
"unKnow": "出现了点意外~"

View File

@@ -1,7 +1,7 @@
import { useSpeech } from '@/web/common/hooks/useSpeech';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback, useState, useMemo } from 'react';
import React, { useRef, useEffect, useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next';
import MyTooltip from '../MyTooltip';
import MyIcon from '../Icon';
@@ -10,6 +10,23 @@ import { useRouter } from 'next/router';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { compressImgAndUpload } from '@/web/common/file/controller';
import { useToast } from '@/web/common/hooks/useToast';
import { customAlphabet } from 'nanoid';
import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { addDays } from 'date-fns';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
enum FileTypeEnum {
image = 'image',
file = 'file'
}
type FileItemType = {
id: string;
rawFile: File;
type: `${FileTypeEnum}`;
name: string;
icon: string; // img is base64
src?: string;
};
const MessageInput = ({
onChange,
@@ -17,16 +34,19 @@ const MessageInput = ({
onStop,
isChatting,
TextareaDom,
showFileSelector = false,
resetInputVal
}: {
onChange: (e: string) => void;
onSendMessage: (e: string) => void;
onStop: () => void;
isChatting: boolean;
showFileSelector?: boolean;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
resetInputVal: (val: string) => void;
}) => {
const { shareId } = useRouter().query as { shareId?: string };
const { toast } = useToast();
const {
isSpeaking,
isTransCription,
@@ -37,64 +57,106 @@ const MessageInput = ({
stream
} = useSpeech({ shareId });
const { isPc } = useSystemStore();
const canvasRef = useRef<HTMLCanvasElement>();
const canvasRef = useRef<HTMLCanvasElement>(null);
const { t } = useTranslation();
const textareaMinH = '22px';
const havInput = !!TextareaDom.current?.value;
const { toast } = useToast();
const [imgBase64Array, setImgBase64Array] = useState<string[]>([]);
const [fileList, setFileList] = useState<File[]>([]);
const [imgSrcArray, setImgSrcArray] = useState<string[]>([]);
const [fileList, setFileList] = useState<FileItemType[]>([]);
const havInput = !!TextareaDom.current?.value || fileList.length > 0;
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: '.jpg,.png',
multiple: true
fileType: 'image/*',
multiple: true,
maxCount: 10
});
useEffect(() => {
fileList.forEach((file) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = async () => {
setImgBase64Array((prev) => [...prev, reader.result as string]);
};
});
}, [fileList]);
const onSelectFile = useCallback((e: File[]) => {
if (!e || e.length === 0) {
const uploadFile = async (file: FileItemType) => {
if (file.type === FileTypeEnum.image) {
try {
const src = await compressImgAndUpload({
file: file.rawFile,
maxW: 1000,
maxH: 1000,
maxSize: 1024 * 1024 * 5,
// 30 day expired.
expiredTime: addDays(new Date(), 30)
});
setFileList((state) =>
state.map((item) =>
item.id === file.id
? {
...item,
src: `${location.origin}${src}`
}
: item
)
);
} catch (error) {
setFileList((state) => state.filter((item) => item.id !== file.id));
toast({
status: 'error',
title: t('common.Upload File Failed')
});
}
}
};
const onSelectFile = useCallback(async (files: File[]) => {
if (!files || files.length === 0) {
return;
}
setFileList(e);
const loadFiles = await Promise.all(
files.map(
(file) =>
new Promise<FileItemType>((resolve, reject) => {
if (file.type.includes('image')) {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const item = {
id: nanoid(),
rawFile: file,
type: FileTypeEnum.image,
name: file.name,
icon: reader.result as string
};
uploadFile(item);
resolve(item);
};
reader.onerror = () => {
reject(reader.error);
};
} else {
resolve({
id: nanoid(),
rawFile: file,
type: FileTypeEnum.file,
name: file.name,
icon: 'pdf'
});
}
})
)
);
setFileList((state) => [...state, ...loadFiles]);
}, []);
const handleSend = useCallback(async () => {
try {
for (const file of fileList) {
const src = await compressImgAndUpload({
file,
maxW: 1000,
maxH: 1000,
maxSize: 1024 * 1024 * 2
});
imgSrcArray.push(src);
}
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : '文件上传异常',
status: 'warning'
});
}
const textareaValue = TextareaDom.current?.value || '';
const inputMessage =
imgSrcArray.length === 0
? textareaValue
: `\`\`\`img-block\n${JSON.stringify(imgSrcArray)}\n\`\`\`\n${textareaValue}`;
const images = fileList.filter((item) => item.type === FileTypeEnum.image);
const imagesText =
images.length === 0
? ''
: `\`\`\`${IMG_BLOCK_KEY}
${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
\`\`\`
`;
const inputMessage = `${imagesText}${textareaValue}`;
onSendMessage(inputMessage);
setImgBase64Array([]);
setImgSrcArray([]);
}, [TextareaDom, fileList, imgSrcArray, onSendMessage, toast]);
setFileList([]);
}, [TextareaDom, fileList, onSendMessage]);
useEffect(() => {
if (!stream) {
@@ -107,117 +169,139 @@ const MessageInput = ({
const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser);
const renderCurve = () => {
renderAudioGraph(analyser, canvasRef.current as HTMLCanvasElement);
if (!canvasRef.current) return;
renderAudioGraph(analyser, canvasRef.current);
window.requestAnimationFrame(renderCurve);
};
renderCurve();
}, [renderAudioGraph, stream]);
return (
<>
<Box m={['0 auto', '10px auto']} w={'100%'} maxW={['auto', 'min(800px, 100%)']} px={[0, 5]}>
<Box
py={imgBase64Array.length > 0 ? '8px' : '18px'}
position={'relative'}
boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`}
{...(isPc
? {
border: '1px solid',
borderColor: 'rgba(0,0,0,0.12)'
}
: {
borderTop: '1px solid',
borderTopColor: 'rgba(0,0,0,0.15)'
})}
borderRadius={['none', 'md']}
backgroundColor={'white'}
<Box m={['0 auto', '10px auto']} w={'100%'} maxW={['auto', 'min(800px, 100%)']} px={[0, 5]}>
<Box
pt={fileList.length > 0 ? '10px' : ['14px', '18px']}
pb={['14px', '18px']}
position={'relative'}
boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`}
borderRadius={['none', 'md']}
bg={'white'}
{...(isPc
? {
border: '1px solid',
borderColor: 'rgba(0,0,0,0.12)'
}
: {
borderTop: '1px solid',
borderTopColor: 'rgba(0,0,0,0.15)'
})}
>
{/* translate loading */}
<Flex
position={'absolute'}
top={0}
bottom={0}
left={0}
right={0}
zIndex={10}
pl={5}
alignItems={'center'}
bg={'white'}
color={'myBlue.600'}
visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'}
>
{/* translate loading */}
<Box
position={'absolute'}
top={0}
bottom={0}
left={4}
right={['8px', '4px']}
zIndex={10}
display={'flex'}
alignItems={'center'}
bg={'white'}
pl={['5px', '10px']}
color="rgba(54,111,255,0.6)"
visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'}
>
<Spinner size={'sm'} mr={4} />
{t('chat.Converting to text')}
</Box>
{/* file uploader */}
<Flex
position={'absolute'}
alignItems={'center'}
left={['12px', '14px']}
bottom={['15px', '13px']}
h={['26px', '32px']}
zIndex={10}
cursor={'pointer'}
onClick={onOpenSelectFile}
>
<MyTooltip label={t('core.chat.Select File')}>
<MyIcon name={'core/chat/fileSelect'} />
</MyTooltip>
<File onSelect={onSelectFile} />
</Flex>
{/* file preview */}
<Flex w={'96%'} wrap={'wrap'} ml={4}>
{imgBase64Array.length > 0 &&
imgBase64Array.map((src, index) => (
<Box
key={index}
border={'1px solid rgba(0,0,0,0.12)'}
mr={2}
mb={2}
<Spinner size={'sm'} mr={4} />
{t('chat.Converting to text')}
</Flex>
{/* file preview */}
<Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
{fileList.map((item) => (
<Box
key={item.id}
border={'1px solid rgba(0,0,0,0.12)'}
mr={2}
mb={2}
rounded={'md'}
position={'relative'}
_hover={{
'.close-icon': { display: item.src ? 'block' : 'none' }
}}
>
{/* uploading */}
{!item.src && (
<Flex
position={'absolute'}
alignItems={'center'}
justifyContent={'center'}
rounded={'md'}
position={'relative'}
_hover={{
'.close-icon': { display: 'block' }
}}
color={'myBlue.600'}
top={0}
left={0}
bottom={0}
right={0}
bg={'rgba(255,255,255,0.8)'}
>
<MyIcon
name={'closeSolid'}
w={'16px'}
h={'16px'}
color={'myGray.700'}
cursor={'pointer'}
_hover={{ color: 'myBlue.600' }}
position={'absolute'}
right={-2}
top={-2}
onClick={() => {
setImgBase64Array((prev) => {
prev.splice(index, 1);
return [...prev];
});
}}
className="close-icon"
display={['', 'none']}
/>
<Image
alt={'img'}
src={src}
w={'80px'}
h={'80px'}
rounded={'md'}
objectFit={'cover'}
/>
</Box>
))}
</Flex>
<Spinner />
</Flex>
)}
<MyIcon
name={'closeSolid'}
w={'16px'}
h={'16px'}
color={'myGray.700'}
cursor={'pointer'}
_hover={{ color: 'myBlue.600' }}
position={'absolute'}
bg={'white'}
right={'-8px'}
top={'-8px'}
onClick={() => {
setFileList((state) => state.filter((file) => file.id !== item.id));
}}
className="close-icon"
display={['', 'none']}
/>
{item.type === FileTypeEnum.image && (
<Image
alt={'img'}
src={item.icon}
w={['50px', '70px']}
h={['50px', '70px']}
borderRadius={'md'}
objectFit={'contain'}
/>
)}
</Box>
))}
</Flex>
<Flex alignItems={'flex-end'} mt={fileList.length > 0 ? 1 : 0} pl={[2, 4]}>
{/* file selector */}
{showFileSelector && (
<Flex
h={'22px'}
alignItems={'center'}
justifyContent={'center'}
cursor={'pointer'}
transform={'translateY(1px)'}
onClick={() => {
if (isSpeaking) return;
onOpenSelectFile;
}}
>
<MyTooltip label={t('core.chat.Select File')}>
<MyIcon name={'core/chat/fileSelect'} />
</MyTooltip>
<File onSelect={onSelectFile} />
</Flex>
)}
{/* input area */}
<Textarea
ref={TextareaDom}
py={0}
pr={['45px', '55px']}
pl={['36px', '40px']}
mt={imgBase64Array.length > 0 ? 4 : 0}
pl={2}
pr={['30px', '48px']}
border={'none'}
_focusVisible={{
border: 'none'
@@ -255,29 +339,24 @@ const MessageInput = ({
const clipboardData = e.clipboardData;
if (clipboardData) {
const items = clipboardData.items;
const files: File[] = [];
for (let i = 0; i < items.length; i++) {
const item = items[i];
if (item.kind === 'file') {
const file = item.getAsFile();
files.push(file as File);
}
}
setFileList(files);
const files = Array.from(items)
.map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
.filter((item) => item) as File[];
onSelectFile(files);
}
}}
/>
<Flex
position={'absolute'}
alignItems={'center'}
right={['12px', '14px']}
bottom={['15px', '13px']}
position={'absolute'}
right={[2, 4]}
bottom={['10px', '12px']}
>
{/* voice-input */}
{!shareId && !havInput && !isChatting && (
<>
<canvas
ref={canvasRef as any}
ref={canvasRef}
style={{
height: '30px',
width: isSpeaking && !isTransCription ? '100px' : 0,
@@ -289,6 +368,7 @@ const MessageInput = ({
mr={2}
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['26px', '32px']}
w={['26px', '32px']}
borderRadius={'md'}
@@ -314,11 +394,14 @@ const MessageInput = ({
)}
{/* send and stop icon */}
{isSpeaking ? (
<Box color={'#5A646E'}>{speakingTimeString}</Box>
<Box color={'#5A646E'} w={'36px'} textAlign={'right'}>
{speakingTimeString}
</Box>
) : (
<Flex
alignItems={'center'}
justifyContent={'center'}
flexShrink={0}
h={['28px', '32px']}
w={['28px', '32px']}
borderRadius={'md'}
@@ -356,9 +439,9 @@ const MessageInput = ({
</Flex>
)}
</Flex>
</Box>
</Flex>
</Box>
</>
</Box>
);
};

View File

@@ -17,7 +17,17 @@ import { useToast } from '@/web/common/hooks/useToast';
import { useAudioPlay } from '@/web/common/utils/voice';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useCopyData } from '@/web/common/hooks/useCopyData';
import { Box, Card, Flex, Input, Button, useTheme, BoxProps, FlexProps } from '@chakra-ui/react';
import {
Box,
Card,
Flex,
Input,
Button,
useTheme,
BoxProps,
FlexProps,
Image
} from '@chakra-ui/react';
import { feConfigs } from '@/web/common/system/staticData';
import { eventBus } from '@/web/common/utils/eventbus';
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
@@ -90,6 +100,7 @@ type Props = {
appAvatar?: string;
userAvatar?: string;
userGuideModule?: ModuleItemType;
showFileSelector?: boolean;
active?: boolean; // can use
onUpdateVariable?: (e: Record<string, any>) => void;
onStartChat?: (e: StartChatFnProps) => Promise<{
@@ -109,6 +120,7 @@ const ChatBox = (
appAvatar,
userAvatar,
userGuideModule,
showFileSelector,
active = true,
onUpdateVariable,
onStartChat,
@@ -795,6 +807,7 @@ const ChatBox = (
isChatting={isChatting}
TextareaDom={TextareaDom}
resetInputVal={resetInputVal}
showFileSelector={showFileSelector}
/>
) : null}
@@ -1109,11 +1122,11 @@ function ChatController({
{...controlIconStyle}
mr={1}
name={'core/chat/stopSpeech'}
_hover={{ color: '#E74694' }}
color={'#E74694'}
onClick={() => cancelAudio()}
/>
</MyTooltip>
{/* <MyIcon name={'loading'} w={'16px'} /> */}
<Image src="/icon/speaking.gif" w={'23px'} alt={''} />
</Flex>
) : (
<MyTooltip label={t('core.app.TTS')}>

View File

@@ -1,12 +1,25 @@
import { Box, Flex } from '@chakra-ui/react';
import MdImage from '../img/Image';
import { useMemo } from 'react';
const ImageBlock = ({ images }: { images: string }) => {
const formatData = useMemo(
() =>
images.split('\n').map((item) => {
try {
return JSON.parse(item) as { src: string };
} catch (error) {
return { src: '' };
}
}),
[images]
);
return (
<Flex w={'100%'} wrap={'wrap'}>
{JSON.parse(images).map((src: string) => {
<Flex alignItems={'center'} wrap={'wrap'} gap={4}>
{formatData.map(({ src }) => {
return (
<Box key={src} mr={2} mb={2} rounded={'md'} flex={'0 0 auto'} w={'100px'} h={'100px'}>
<Box key={src} rounded={'md'} flex={'0 0 auto'} w={'120px'}>
<MdImage src={src} />
</Box>
);

View File

@@ -27,6 +27,7 @@ const MdImage = ({ src }: { src?: string }) => {
borderRadius={'md'}
src={src}
alt={''}
maxH={'150px'}
fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'}
cursor={succeed ? 'pointer' : 'default'}
@@ -45,7 +46,6 @@ const MdImage = ({ src }: { src?: string }) => {
<Modal isOpen={isOpen} onClose={onClose}>
<ModalOverlay />
<ModalContent m={'auto'}>
<ModalCloseButton />
<Image
src={src}
alt={''}
@@ -55,6 +55,7 @@ const MdImage = ({ src }: { src?: string }) => {
objectFit={'contain'}
/>
</ModalContent>
<ModalCloseButton bg={'myWhite.500'} zIndex={999999} />
</Modal>
</Skeleton>
);

View File

@@ -16,6 +16,7 @@ import MyTooltip from '@/components/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
import { getGuideModule } from '@/global/core/app/modules/utils';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
export type ChatTestComponentRef = {
resetChatTest: () => void;
@@ -115,6 +116,7 @@ const ChatTest = (
userAvatar={userInfo?.avatar}
showMarkIcon
userGuideModule={getGuideModule(modules)}
showFileSelector={checkChatSupportSelectFileByModules(modules)}
onStartChat={startChat}
onDelMessage={() => {}}
/>

View File

@@ -52,19 +52,19 @@ const UpdateInviteModal = () => {
return (
<MyModal
isOpen={inviteList.length > 0}
isOpen={inviteList && inviteList.length > 0}
title={
<>
<Box>{t('user.team.Processing invitations')}</Box>
<Box fontWeight={'normal'} fontSize={'sm'} color={'myGray.500'}>
{t('user.team.Processing invitations Tips', { amount: inviteList.length })}
{t('user.team.Processing invitations Tips', { amount: inviteList?.length })}
</Box>
</>
}
maxW={['90vw', '500px']}
>
<ModalBody>
{inviteList.map((item) => (
{inviteList?.map((item) => (
<Flex
key={item.teamId}
alignItems={'center'}

View File

@@ -95,7 +95,7 @@ function App({ Component, pageProps }: AppProps) {
<title>{feConfigs?.systemTitle || process.env.SYSTEM_NAME || 'GPT'}</title>
<meta
name="description"
content="FastGPT is a knowledge-based question answering system built on the LLM. It offers out-of-the-box data processing and model invocation capabilities. Moreover, it allows for workflow orchestration through Flow visualization, thereby enabling complex question and answer scenarios!"
content="FastGPT 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!"
/>
<meta
name="viewport"

View File

@@ -4,17 +4,18 @@ import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadMongoImg } from '@fastgpt/service/common/file/image/controller';
type Props = { base64Img: string };
type Props = { base64Img: string; expiredTime?: Date };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { userId } = await authCert({ req, authToken: true });
const { base64Img } = req.body as Props;
const { teamId } = await authCert({ req, authToken: true });
const { base64Img, expiredTime } = req.body as Props;
const data = await uploadMongoImg({
userId,
base64Img
teamId,
base64Img,
expiredTime
});
jsonRes(res, { data });

View File

@@ -17,10 +17,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, role }),
type: 'dataset'
});
}).lean();
const data = datasets.map((item) => ({
...item.toJSON(),
...item,
vectorModel: getVectorModel(item.vectorModel),
agentModel: getQAModel(item.agentModel),
canWrite: String(item.tmbId) === tmbId,

View File

@@ -22,11 +22,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
...mongoRPermission({ teamId, tmbId, role }),
...(parentId !== undefined && { parentId: parentId || null }),
...(type && { type })
}).sort({ updateTime: -1 });
})
.sort({ updateTime: -1 })
.lean();
const data = await Promise.all(
datasets.map(async (item) => ({
...item.toJSON(),
...item,
vectorModel: getVectorModel(item.vectorModel),
agentModel: getQAModel(item.agentModel),
canWrite,

View File

@@ -59,6 +59,7 @@ import { useAppStore } from '@/web/core/app/store/useAppStore';
import PermissionIconText from '@/components/support/permission/IconText';
import QGSwitch from '../QGSwitch';
import TTSSelect from '../TTSSelect';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
const VariableEditModal = dynamic(() => import('@/components/core/module/VariableEditModal'));
const InfoModal = dynamic(() => import('../InfoModal'));
@@ -676,6 +677,7 @@ const ChatTest = ({ appId }: { appId: string }) => {
userAvatar={userInfo?.avatar}
showMarkIcon
userGuideModule={getGuideModule(modules)}
showFileSelector={checkChatSupportSelectFileByModules(modules)}
onStartChat={startChat}
onDelMessage={() => {}}
/>

View File

@@ -1,7 +1,7 @@
import MyIcon from '@/components/Icon';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { Box, Button, Flex, ModalBody, useDisclosure } from '@chakra-ui/react';
import { Box, Button, Flex, ModalBody, useDisclosure, Image } from '@chakra-ui/react';
import React, { useCallback, useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import MySelect from '@/components/Select';
@@ -130,9 +130,9 @@ const TTSSelect = ({
<Flex mt={10} justifyContent={'end'}>
{audioPlaying ? (
<Flex>
<MyIcon name={'core/chat/speaking'} w={'16px'} />
<Image src="/icon/speaking.gif" w={'24px'} alt={''} />
<Button
ml={3}
ml={2}
variant={'gray'}
isLoading={audioLoading}
leftIcon={<MyIcon name={'core/chat/stopSpeech'} w={'16px'} />}

View File

@@ -7,6 +7,7 @@ import Avatar from '@/components/Avatar';
import ToolMenu from './ToolMenu';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { useRouter } from 'next/router';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
const ChatHeader = ({
history,
@@ -27,7 +28,10 @@ const ChatHeader = ({
const theme = useTheme();
const { isPc } = useSystemStore();
const title = useMemo(
() => history[history.length - 2]?.value?.slice(0, 8) || appName || '新对话',
() =>
chatContentReplaceBlock(history[history.length - 2]?.value)?.slice(0, 8) ||
appName ||
'新对话',
[appName, history]
);

View File

@@ -32,6 +32,8 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { useUserStore } from '@/web/support/user/useUserStore';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { useAppStore } from '@/web/core/app/store/useAppStore';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const router = useRouter();
@@ -78,7 +80,10 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
abortSignal: controller
});
const newTitle = prompts[0].content?.slice(0, 20) || '新对话';
const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
'新对话';
// update history
if (completionChatId !== chatId) {
@@ -363,6 +368,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
appAvatar={chatData.app.avatar}
userAvatar={userInfo?.avatar}
userGuideModule={chatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'}
onUpdateVariable={(e) => {}}
onStartChat={startChat}

View File

@@ -20,6 +20,7 @@ import PageContainer from '@/components/PageContainer';
import ChatHeader from './components/ChatHeader';
import ChatHistorySlider from './components/ChatHistorySlider';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
const OutLink = ({
shareId,
@@ -254,6 +255,9 @@ const OutLink = ({
appAvatar={shareChatData.app.avatar}
userAvatar={shareChatData.userAvatar}
userGuideModule={shareChatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(
shareChatData.app.chatModels
)}
feedbackType={'user'}
onUpdateVariable={(e) => {
setShareChatData((state) => ({

View File

@@ -29,16 +29,16 @@ export async function generateQA(): Promise<any> {
error = false
} = await (async () => {
try {
const data = (
await MongoDatasetTraining.findOneAndUpdate(
{
mode: TrainingModeEnum.qa,
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
},
{
lockTime: new Date()
}
).select({
const data = await MongoDatasetTraining.findOneAndUpdate(
{
mode: TrainingModeEnum.qa,
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
},
{
lockTime: new Date()
}
)
.select({
_id: 1,
userId: 1,
teamId: 1,
@@ -50,7 +50,7 @@ export async function generateQA(): Promise<any> {
billId: 1,
prompt: 1
})
)?.toJSON();
.lean();
// task preemption
if (!data) {

View File

@@ -24,16 +24,16 @@ export async function generateVector(): Promise<any> {
error = false
} = await (async () => {
try {
const data = (
await MongoDatasetTraining.findOneAndUpdate(
{
mode: TrainingModeEnum.chunk,
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
},
{
lockTime: new Date()
}
).select({
const data = await MongoDatasetTraining.findOneAndUpdate(
{
mode: TrainingModeEnum.chunk,
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
},
{
lockTime: new Date()
}
)
.select({
_id: 1,
userId: 1,
teamId: 1,
@@ -46,7 +46,7 @@ export async function generateVector(): Promise<any> {
model: 1,
billId: 1
})
)?.toJSON();
.lean();
// task preemption
if (!data) {

View File

@@ -21,6 +21,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
@@ -106,6 +107,21 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
temperature = Math.max(temperature, 0.01);
const ai = getAIApi(user.openaiAccount, 480000);
const concatMessages = [
...(modelConstantsData.defaultSystemChatPrompt
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystemChatPrompt
}
]
: []),
...messages.map((item) => ({
...item,
content: modelConstantsData.vision ? formatStr2ChatContent(item.content) : item.content
}))
];
const response = await ai.chat.completions.create(
{
model,
@@ -113,17 +129,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
max_tokens,
stream,
seed: temperature < 0.3 ? 1 : undefined,
messages: [
...(modelConstantsData.defaultSystemChatPrompt
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystemChatPrompt
}
]
: []),
...messages
]
messages: concatMessages
},
{
headers: {

View File

@@ -4,6 +4,7 @@ import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { addLog } from '@fastgpt/service/common/mongo/controller';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
type Props = {
chatId: string;
@@ -51,12 +52,17 @@ export async function saveChat({
)
];
const title =
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
content[1]?.value?.slice(0, 20) ||
'Chat';
if (chatHistory) {
promise.push(
MongoChat.updateOne(
{ chatId },
{
title: content[0].value.slice(0, 20),
title,
updateTime: new Date()
}
)
@@ -69,7 +75,7 @@ export async function saveChat({
tmbId,
appId,
variables,
title: content[0].value.slice(0, 20),
title,
source,
shareId
})

View File

@@ -1,8 +1,8 @@
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
import { AxiosProgressEvent } from 'axios';
export const postUploadImg = (base64Img: string) =>
POST<string>('/common/file/uploadImage', { base64Img });
export const postUploadImg = (base64Img: string, expiredTime?: Date) =>
POST<string>('/common/file/uploadImage', { base64Img, expiredTime });
export const postUploadFiles = (
data: FormData,

View File

@@ -37,12 +37,14 @@ export const compressImgAndUpload = ({
file,
maxW = 200,
maxH = 200,
maxSize = 1024 * 100
maxSize = 1024 * 100, // 100kb
expiredTime
}: {
file: File;
maxW?: number;
maxH?: number;
maxSize?: number;
expiredTime?: Date;
}) =>
new Promise<string>((resolve, reject) => {
const reader = new FileReader();
@@ -87,7 +89,7 @@ export const compressImgAndUpload = ({
const src = await (async () => {
try {
const src = await postUploadImg(compressedDataUrl);
const src = await postUploadImg(compressedDataUrl, expiredTime);
return src;
} catch (error) {
return compressedDataUrl;

View File

@@ -3,9 +3,13 @@ import { Box } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast';
import { useTranslation } from 'next-i18next';
export const useSelectFile = (props?: { fileType?: string; multiple?: boolean }) => {
export const useSelectFile = (props?: {
fileType?: string;
multiple?: boolean;
maxCount?: number;
}) => {
const { t } = useTranslation();
const { fileType = '*', multiple = false } = props || {};
const { fileType = '*', multiple = false, maxCount = 10 } = props || {};
const { toast } = useToast();
const SelectFileDom = useRef<HTMLInputElement>(null);
@@ -19,7 +23,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
multiple={multiple}
onChange={(e) => {
if (!e.target.files || e.target.files?.length === 0) return;
if (e.target.files.length > 10) {
if (e.target.files.length > maxCount) {
return toast({
status: 'warning',
title: t('file.Select a maximum of 10 files')
@@ -30,7 +34,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
/>
</Box>
),
[fileType, multiple, t, toast]
[fileType, maxCount, multiple, t, toast]
);
const onOpen = useCallback(() => {

View File

@@ -1,4 +1,4 @@
import { useState, useCallback, useEffect, useMemo } from 'react';
import { useState, useCallback, useEffect, useMemo, useRef } from 'react';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { AppTTSConfigType } from '@/types/app';
@@ -14,6 +14,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
const [audio, setAudio] = useState<HTMLAudioElement>();
const [audioLoading, setAudioLoading] = useState(false);
const [audioPlaying, setAudioPlaying] = useState(false);
const audioController = useRef(new AbortController());
// Check whether the voice is supported
const hasAudio = useMemo(() => {
@@ -49,12 +50,15 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
return resolve({ buffer });
}
audioController.current = new AbortController();
/* request tts */
const response = await fetch('/api/core/chat/item/getSpeech', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
signal: audioController.current.signal,
body: JSON.stringify({
chatItemId,
ttsConfig,
@@ -120,6 +124,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
audio.src = '';
}
window.speechSynthesis?.cancel();
audioController.current?.abort();
setAudioPlaying(false);
}, [audio]);

View File

@@ -7,6 +7,7 @@ import type {
} from '@fastgpt/global/support/outLink/api.d';
import type { ChatSiteItemType } from '@fastgpt/global/core/chat/type.d';
import { HUMAN_ICON } from '@fastgpt/global/core/chat/constants';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
type State = {
shareChatData: ShareChatType;
@@ -64,6 +65,10 @@ export const useShareChatStore = create<State>()(
shareChatHistory: [],
saveChatResponse({ chatId, prompts, variables, shareId }) {
const chatHistory = get().shareChatHistory.find((item) => item.chatId === chatId);
const newTitle =
chatContentReplaceBlock(prompts[prompts.length - 2]?.value).slice(0, 20) ||
prompts[prompts.length - 1]?.value?.slice(0, 20) ||
'Chat';
const historyList = (() => {
if (chatHistory) {
@@ -71,7 +76,7 @@ export const useShareChatStore = create<State>()(
item.chatId === chatId
? {
...item,
title: prompts[prompts.length - 2]?.value,
title: newTitle,
updateTime: new Date(),
chats: chatHistory.chats.concat(prompts).slice(-30),
variables
@@ -82,7 +87,7 @@ export const useShareChatStore = create<State>()(
return get().shareChatHistory.concat({
chatId,
shareId,
title: prompts[prompts.length - 2]?.value,
title: newTitle,
updateTime: new Date(),
chats: prompts,
variables

View File

@@ -0,0 +1,21 @@
import { chatModelList } from '@/web/common/system/staticData';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
export function checkChatSupportSelectFileByChatModels(models: string[] = []) {
for (const model of models) {
const modelData = chatModelList.find((item) => item.model === model || item.name === model);
if (modelData?.vision) {
return true;
}
}
return false;
}
export function checkChatSupportSelectFileByModules(modules: ModuleItemType[] = []) {
const chatModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.chatNode);
const models: string[] = chatModules.map(
(item) => item.inputs.find((item) => item.key === 'model')?.value || ''
);
return checkChatSupportSelectFileByChatModels(models);
}

View File

@@ -83,7 +83,7 @@ const Button = defineStyleConfig({
_hover: {
color: 'myBlue.600',
bg: 'myWhite.400',
boxShadow: '0 0 5px rgba(0,0,0,0.2)'
boxShadow: '0 0 5px rgba(0,0,0,0.1)'
},
_active: {
color: 'myBlue.700'