feat: vision model (#489)

* mongo init

* perf: mongo connect

* perf: tts

perf: whisper and tts

peref: tts whisper permission

log

reabase (#488)

* perf: modal

* i18n

* perf: schema lean

* feat: vision model format

* perf: tts loading

* perf: static data

* perf: tts

* feat: image

* perf: image

* perf: upload image and title

* perf: image size

* doc

* perf: color

* doc

* speaking can not select file

* doc
This commit is contained in:
Archer
2023-11-18 15:42:35 +08:00
committed by GitHub
parent 70f3373246
commit c5664c7e90
58 changed files with 650 additions and 254 deletions

View File

@@ -36,6 +36,7 @@ weight: 520
"quoteMaxToken": 2000, // 最大引用内容长度 "quoteMaxToken": 2000, // 最大引用内容长度
"maxTemperature": 1.2, // 最大温度值 "maxTemperature": 1.2, // 最大温度值
"censor": false, // 是否开启敏感词过滤(商业版) "censor": false, // 是否开启敏感词过滤(商业版)
"vision": false, // 支持图片输入
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
}, },
{ {
@@ -47,6 +48,7 @@ weight: 520
"quoteMaxToken": 8000, "quoteMaxToken": 8000,
"maxTemperature": 1.2, "maxTemperature": 1.2,
"censor": false, "censor": false,
"vision": false,
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
}, },
{ {
@@ -58,6 +60,19 @@ weight: 520
"quoteMaxToken": 4000, "quoteMaxToken": 4000,
"maxTemperature": 1.2, "maxTemperature": 1.2,
"censor": false, "censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
"model": "gpt-4-vision-preview",
"name": "GPT4-Vision",
"maxContext": 128000,
"maxResponse": 4000,
"price": 0,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"censor": false,
"vision": true,
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
} }
], ],

View File

@@ -0,0 +1,16 @@
---
title: 'V4.6.1'
description: 'FastGPT V4.6 .1'
icon: 'upgrade'
draft: false
toc: true
weight: 835
---
## V4.6.1 功能介绍
1. 新增 - GPT4-v 模型支持
2. 新增 - whisper 语音输入
3. 优化 - TTS 流传输
4. 优化 - TTS 缓存

View File

@@ -9,6 +9,7 @@ export type ChatModelItemType = LLMModelItemType & {
quoteMaxToken: number; quoteMaxToken: number;
maxTemperature: number; maxTemperature: number;
censor?: boolean; censor?: boolean;
vision?: boolean;
defaultSystemChatPrompt?: string; defaultSystemChatPrompt?: string;
}; };

View File

@@ -17,6 +17,7 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 2000, quoteMaxToken: 2000,
maxTemperature: 1.2, maxTemperature: 1.2,
censor: false, censor: false,
vision: false,
defaultSystemChatPrompt: '' defaultSystemChatPrompt: ''
}, },
{ {
@@ -28,6 +29,7 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 8000, quoteMaxToken: 8000,
maxTemperature: 1.2, maxTemperature: 1.2,
censor: false, censor: false,
vision: false,
defaultSystemChatPrompt: '' defaultSystemChatPrompt: ''
}, },
{ {
@@ -39,6 +41,19 @@ export const defaultChatModels: ChatModelItemType[] = [
quoteMaxToken: 4000, quoteMaxToken: 4000,
maxTemperature: 1.2, maxTemperature: 1.2,
censor: false, censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-4-vision-preview',
name: 'GPT4-Vision',
maxContext: 128000,
maxResponse: 4000,
price: 0,
quoteMaxToken: 100000,
maxTemperature: 1.2,
censor: false,
vision: true,
defaultSystemChatPrompt: '' defaultSystemChatPrompt: ''
} }
]; ];

View File

@@ -5,12 +5,14 @@ import type {
ChatCompletionMessageParam, ChatCompletionMessageParam,
ChatCompletionContentPart ChatCompletionContentPart
} from 'openai/resources'; } from 'openai/resources';
export type ChatCompletionContentPart = ChatCompletionContentPart; export type ChatCompletionContentPart = ChatCompletionContentPart;
export type ChatCompletionCreateParams = ChatCompletionCreateParams; export type ChatCompletionCreateParams = ChatCompletionCreateParams;
export type ChatMessageItemType = Omit<ChatCompletionMessageParam> & { export type ChatMessageItemType = Omit<ChatCompletionMessageParam, 'name'> & {
name?: any;
dataId?: string; dataId?: string;
content: any; content: any;
}; } & any;
export type ChatCompletion = ChatCompletion; export type ChatCompletion = ChatCompletion;
export type StreamChatType = Stream<ChatCompletionChunk>; export type StreamChatType = Stream<ChatCompletionChunk>;

View File

@@ -54,3 +54,6 @@ export const ChatSourceMap = {
export const HUMAN_ICON = `/icon/human.svg`; export const HUMAN_ICON = `/icon/human.svg`;
export const LOGO_ICON = `/icon/logo.svg`; export const LOGO_ICON = `/icon/logo.svg`;
export const IMG_BLOCK_KEY = 'img-block';
export const FILE_BLOCK_KEY = 'file-block';

View File

@@ -0,0 +1,6 @@
import { IMG_BLOCK_KEY, FILE_BLOCK_KEY } from './constants';
export function chatContentReplaceBlock(content: string = '') {
const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g');
return content.replace(regex, '').trim();
}

View File

@@ -33,3 +33,4 @@ try {
export const MongoTTSBuffer: Model<TTSBufferSchemaType> = export const MongoTTSBuffer: Model<TTSBufferSchemaType> =
models[collectionName] || model(collectionName, TTSBufferSchema); models[collectionName] || model(collectionName, TTSBufferSchema);
MongoTTSBuffer.syncIndexes();

View File

@@ -5,12 +5,26 @@ export function getMongoImgUrl(id: string) {
return `${imageBaseUrl}${id}`; return `${imageBaseUrl}${id}`;
} }
export async function uploadMongoImg({ base64Img, userId }: { base64Img: string; userId: string }) { export const maxImgSize = 1024 * 1024 * 12;
export async function uploadMongoImg({
base64Img,
teamId,
expiredTime
}: {
base64Img: string;
teamId: string;
expiredTime?: Date;
}) {
if (base64Img.length > maxImgSize) {
return Promise.reject('Image too large');
}
const base64Data = base64Img.split(',')[1]; const base64Data = base64Img.split(',')[1];
const { _id } = await MongoImage.create({ const { _id } = await MongoImage.create({
userId, teamId,
binary: Buffer.from(base64Data, 'base64') binary: Buffer.from(base64Data, 'base64'),
expiredTime
}); });
return getMongoImgUrl(String(_id)); return getMongoImgUrl(String(_id));

View File

@@ -1,16 +1,27 @@
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { connectionMongo, type Model } from '../../mongo'; import { connectionMongo, type Model } from '../../mongo';
const { Schema, model, models } = connectionMongo; const { Schema, model, models } = connectionMongo;
const ImageSchema = new Schema({ const ImageSchema = new Schema({
userId: { teamId: {
type: Schema.Types.ObjectId, type: Schema.Types.ObjectId,
ref: 'user', ref: TeamCollectionName
required: true
}, },
binary: { binary: {
type: Buffer type: Buffer
},
expiredTime: {
type: Date
} }
}); });
export const MongoImage: Model<{ userId: string; binary: Buffer }> = try {
ImageSchema.index({ expiredTime: 1 }, { expireAfterSeconds: 60 });
} catch (error) {
console.log(error);
}
export const MongoImage: Model<{ teamId: string; binary: Buffer }> =
models['image'] || model('image', ImageSchema); models['image'] || model('image', ImageSchema);
MongoImage.syncIndexes();

View File

@@ -67,3 +67,5 @@ try {
export const MongoApp: Model<AppType> = export const MongoApp: Model<AppType> =
models[appCollectionName] || model(appCollectionName, AppSchema); models[appCollectionName] || model(appCollectionName, AppSchema);
MongoApp.syncIndexes();

View File

@@ -83,3 +83,5 @@ try {
export const MongoChatItem: Model<ChatItemType> = export const MongoChatItem: Model<ChatItemType> =
models['chatItem'] || model('chatItem', ChatItemSchema); models['chatItem'] || model('chatItem', ChatItemSchema);
MongoChatItem.syncIndexes();

View File

@@ -92,7 +92,7 @@ const ChatSchema = new Schema({
}); });
try { try {
ChatSchema.index({ userId: 1 }); ChatSchema.index({ tmbId: 1 });
ChatSchema.index({ updateTime: -1 }); ChatSchema.index({ updateTime: -1 });
ChatSchema.index({ appId: 1 }); ChatSchema.index({ appId: 1 });
} catch (error) { } catch (error) {
@@ -101,3 +101,4 @@ try {
export const MongoChat: Model<ChatType> = export const MongoChat: Model<ChatType> =
models[chatCollectionName] || model(chatCollectionName, ChatSchema); models[chatCollectionName] || model(chatCollectionName, ChatSchema);
MongoChat.syncIndexes();

View File

@@ -1,7 +1,8 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d'; import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants'; import { ChatRoleEnum, IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { countMessagesTokens, countPromptTokens } from '@fastgpt/global/common/string/tiktoken'; import { countMessagesTokens, countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { adaptRole_Chat2Message } from '@fastgpt/global/core/chat/adapt'; import { adaptRole_Chat2Message } from '@fastgpt/global/core/chat/adapt';
import type { ChatCompletionContentPart } from '@fastgpt/global/core/ai/type.d';
/* slice chat context by tokens */ /* slice chat context by tokens */
export function ChatContextFilter({ export function ChatContextFilter({
@@ -51,3 +52,101 @@ export function ChatContextFilter({
return [...systemPrompts, ...chats]; return [...systemPrompts, ...chats];
} }
/**
string to vision model. Follow the markdown code block rule for interception:
@rule:
```img-block
{src:""}
{src:""}
```
```file-block
{name:"",src:""},
{name:"",src:""}
```
@example:
Whats in this image?
```img-block
{src:"https://1.png"}
```
@return
[
{ type: 'text', text: 'Whats in this image?' },
{
type: 'image_url',
image_url: {
url: 'https://1.png'
}
}
]
*/
export function formatStr2ChatContent(str: string) {
const content: ChatCompletionContentPart[] = [];
let lastIndex = 0;
const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g');
let match;
while ((match = regex.exec(str)) !== null) {
// add previous text
if (match.index > lastIndex) {
const text = str.substring(lastIndex, match.index).trim();
if (text) {
content.push({ type: 'text', text });
}
}
const blockType = match[1].trim();
if (blockType === IMG_BLOCK_KEY) {
const blockContentLines = match[2].trim().split('\n');
const jsonLines = blockContentLines.map((item) => {
try {
return JSON.parse(item) as { src: string };
} catch (error) {
return { src: '' };
}
});
for (const item of jsonLines) {
if (!item.src) throw new Error("image block's content error");
}
content.push(
...jsonLines.map((item) => ({
type: 'image_url' as any,
image_url: {
url: item.src
}
}))
);
}
lastIndex = regex.lastIndex;
}
// add remaining text
if (lastIndex < str.length) {
const remainingText = str.substring(lastIndex).trim();
if (remainingText) {
content.push({ type: 'text', text: remainingText });
}
}
// Continuous text type content, if type=text, merge them
for (let i = 0; i < content.length - 1; i++) {
const currentContent = content[i];
const nextContent = content[i + 1];
if (currentContent.type === 'text' && nextContent.type === 'text') {
currentContent.text += nextContent.text;
content.splice(i + 1, 1);
i--;
}
}
if (content.length === 1 && content[0].type === 'text') {
return content[0].text;
}
return content ? content : null;
}

View File

@@ -22,9 +22,9 @@ export async function findDatasetIdTreeByTopDatasetId(
} }
export async function getCollectionWithDataset(collectionId: string) { export async function getCollectionWithDataset(collectionId: string) {
const data = ( const data = (await MongoDatasetCollection.findById(collectionId)
await MongoDatasetCollection.findById(collectionId).populate('datasetId') .populate('datasetId')
)?.toJSON() as CollectionWithDatasetType; .lean()) as CollectionWithDatasetType;
if (!data) { if (!data) {
return Promise.reject('Collection is not exist'); return Promise.reject('Collection is not exist');
} }

View File

@@ -76,3 +76,4 @@ try {
export const MongoDatasetData: Model<DatasetDataSchemaType> = export const MongoDatasetData: Model<DatasetDataSchemaType> =
models[DatasetDataCollectionName] || model(DatasetDataCollectionName, DatasetDataSchema); models[DatasetDataCollectionName] || model(DatasetDataCollectionName, DatasetDataSchema);
MongoDatasetData.syncIndexes();

View File

@@ -82,3 +82,4 @@ try {
export const MongoDataset: Model<DatasetSchemaType> = export const MongoDataset: Model<DatasetSchemaType> =
models[DatasetCollectionName] || model(DatasetCollectionName, DatasetSchema); models[DatasetCollectionName] || model(DatasetCollectionName, DatasetSchema);
MongoDataset.syncIndexes();

View File

@@ -104,3 +104,5 @@ try {
export const MongoDatasetTraining: Model<DatasetTrainingSchemaType> = export const MongoDatasetTraining: Model<DatasetTrainingSchemaType> =
models[DatasetTrainingCollectionName] || model(DatasetTrainingCollectionName, TrainingDataSchema); models[DatasetTrainingCollectionName] || model(DatasetTrainingCollectionName, TrainingDataSchema);
MongoDatasetTraining.syncIndexes();

View File

@@ -46,10 +46,11 @@ const PluginSchema = new Schema({
}); });
try { try {
PluginSchema.index({ userId: 1 }); PluginSchema.index({ tmbId: 1 });
} catch (error) { } catch (error) {
console.log(error); console.log(error);
} }
export const MongoPlugin: Model<PluginItemSchema> = export const MongoPlugin: Model<PluginItemSchema> =
models[ModuleCollectionName] || model(ModuleCollectionName, PluginSchema); models[ModuleCollectionName] || model(ModuleCollectionName, PluginSchema);
MongoPlugin.syncIndexes();

View File

@@ -31,3 +31,4 @@ const PromotionRecordSchema = new Schema({
export const MongoPromotionRecord: Model<PromotionRecordType> = export const MongoPromotionRecord: Model<PromotionRecordType> =
models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema); models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema);
MongoPromotionRecord.syncIndexes();

View File

@@ -70,3 +70,4 @@ const OpenApiSchema = new Schema(
export const MongoOpenApi: Model<OpenApiSchema> = export const MongoOpenApi: Model<OpenApiSchema> =
models['openapi'] || model('openapi', OpenApiSchema); models['openapi'] || model('openapi', OpenApiSchema);
MongoOpenApi.syncIndexes();

View File

@@ -71,3 +71,5 @@ const OutLinkSchema = new Schema({
export const MongoOutLink: Model<SchemaType> = export const MongoOutLink: Model<SchemaType> =
models['outlinks'] || model('outlinks', OutLinkSchema); models['outlinks'] || model('outlinks', OutLinkSchema);
MongoOutLink.syncIndexes();

View File

@@ -22,12 +22,12 @@ export async function authApp({
} }
> { > {
const result = await parseHeaderCert(props); const result = await parseHeaderCert(props);
const { userId, teamId, tmbId } = result; const { teamId, tmbId } = result;
const { role } = await getTeamInfoByTmbId({ tmbId }); const { role } = await getTeamInfoByTmbId({ tmbId });
const { app, isOwner, canWrite } = await (async () => { const { app, isOwner, canWrite } = await (async () => {
// get app // get app
const app = (await MongoApp.findOne({ _id: appId, teamId }))?.toJSON(); const app = await MongoApp.findOne({ _id: appId, teamId }).lean();
if (!app) { if (!app) {
return Promise.reject(AppErrEnum.unAuthApp); return Promise.reject(AppErrEnum.unAuthApp);
} }

View File

@@ -24,9 +24,9 @@ export async function authChat({
const { chat, isOwner, canWrite } = await (async () => { const { chat, isOwner, canWrite } = await (async () => {
// get chat // get chat
const chat = ( const chat = (await MongoChat.findOne({ chatId, teamId })
await MongoChat.findOne({ chatId, teamId }).populate('appId') .populate('appId')
)?.toJSON() as ChatWithAppSchema; .lean()) as ChatWithAppSchema;
if (!chat) { if (!chat) {
return Promise.reject('Chat is not exists'); return Promise.reject('Chat is not exists');

View File

@@ -31,7 +31,7 @@ export async function authDataset({
const { role } = await getTeamInfoByTmbId({ tmbId }); const { role } = await getTeamInfoByTmbId({ tmbId });
const { dataset, isOwner, canWrite } = await (async () => { const { dataset, isOwner, canWrite } = await (async () => {
const dataset = (await MongoDataset.findOne({ _id: datasetId, teamId }))?.toObject(); const dataset = await MongoDataset.findOne({ _id: datasetId, teamId }).lean();
if (!dataset) { if (!dataset) {
return Promise.reject(DatasetErrEnum.unAuthDataset); return Promise.reject(DatasetErrEnum.unAuthDataset);

View File

@@ -64,3 +64,4 @@ const UserSchema = new Schema({
export const MongoUser: Model<UserModelSchema> = export const MongoUser: Model<UserModelSchema> =
models[userCollectionName] || model(userCollectionName, UserSchema); models[userCollectionName] || model(userCollectionName, UserSchema);
MongoUser.syncIndexes();

View File

@@ -59,3 +59,4 @@ try {
} }
export const MongoBill: Model<BillType> = models['bill'] || model('bill', BillSchema); export const MongoBill: Model<BillType> = models['bill'] || model('bill', BillSchema);
MongoBill.syncIndexes();

View File

@@ -15,6 +15,7 @@
"quoteMaxToken": 2000, "quoteMaxToken": 2000,
"maxTemperature": 1.2, "maxTemperature": 1.2,
"censor": false, "censor": false,
"vision": false,
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
}, },
{ {
@@ -26,6 +27,7 @@
"quoteMaxToken": 8000, "quoteMaxToken": 8000,
"maxTemperature": 1.2, "maxTemperature": 1.2,
"censor": false, "censor": false,
"vision": false,
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
}, },
{ {
@@ -37,6 +39,19 @@
"quoteMaxToken": 4000, "quoteMaxToken": 4000,
"maxTemperature": 1.2, "maxTemperature": 1.2,
"censor": false, "censor": false,
"vision": false,
"defaultSystemChatPrompt": ""
},
{
"model": "gpt-4-vision-preview",
"name": "GPT4-Vision",
"maxContext": 128000,
"maxResponse": 4000,
"price": 0,
"quoteMaxToken": 100000,
"maxTemperature": 1.2,
"censor": false,
"vision": true,
"defaultSystemChatPrompt": "" "defaultSystemChatPrompt": ""
} }
], ],

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -191,6 +191,7 @@
"Update Success": "Update Success", "Update Success": "Update Success",
"Update Successful": "Update Successful", "Update Successful": "Update Successful",
"Update Time": "Update Time", "Update Time": "Update Time",
"Upload File Failed": "Upload File Failed",
"Username": "UserName", "Username": "UserName",
"error": { "error": {
"unKnow": "There was an accident" "unKnow": "There was an accident"

View File

@@ -191,6 +191,7 @@
"Update Success": "更新成功", "Update Success": "更新成功",
"Update Successful": "更新成功", "Update Successful": "更新成功",
"Update Time": "更新时间", "Update Time": "更新时间",
"Upload File Failed": "上传文件失败",
"Username": "用户名", "Username": "用户名",
"error": { "error": {
"unKnow": "出现了点意外~" "unKnow": "出现了点意外~"

View File

@@ -1,7 +1,7 @@
import { useSpeech } from '@/web/common/hooks/useSpeech'; import { useSpeech } from '@/web/common/hooks/useSpeech';
import { useSystemStore } from '@/web/common/system/useSystemStore'; import { useSystemStore } from '@/web/common/system/useSystemStore';
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react'; import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
import React, { useRef, useEffect, useCallback, useState, useMemo } from 'react'; import React, { useRef, useEffect, useCallback, useState } from 'react';
import { useTranslation } from 'react-i18next'; import { useTranslation } from 'react-i18next';
import MyTooltip from '../MyTooltip'; import MyTooltip from '../MyTooltip';
import MyIcon from '../Icon'; import MyIcon from '../Icon';
@@ -10,6 +10,23 @@ import { useRouter } from 'next/router';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile'; import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { compressImgAndUpload } from '@/web/common/file/controller'; import { compressImgAndUpload } from '@/web/common/file/controller';
import { useToast } from '@/web/common/hooks/useToast'; import { useToast } from '@/web/common/hooks/useToast';
import { customAlphabet } from 'nanoid';
import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { addDays } from 'date-fns';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
enum FileTypeEnum {
image = 'image',
file = 'file'
}
type FileItemType = {
id: string;
rawFile: File;
type: `${FileTypeEnum}`;
name: string;
icon: string; // img is base64
src?: string;
};
const MessageInput = ({ const MessageInput = ({
onChange, onChange,
@@ -17,16 +34,19 @@ const MessageInput = ({
onStop, onStop,
isChatting, isChatting,
TextareaDom, TextareaDom,
showFileSelector = false,
resetInputVal resetInputVal
}: { }: {
onChange: (e: string) => void; onChange: (e: string) => void;
onSendMessage: (e: string) => void; onSendMessage: (e: string) => void;
onStop: () => void; onStop: () => void;
isChatting: boolean; isChatting: boolean;
showFileSelector?: boolean;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>; TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
resetInputVal: (val: string) => void; resetInputVal: (val: string) => void;
}) => { }) => {
const { shareId } = useRouter().query as { shareId?: string }; const { shareId } = useRouter().query as { shareId?: string };
const { toast } = useToast();
const { const {
isSpeaking, isSpeaking,
isTransCription, isTransCription,
@@ -37,64 +57,106 @@ const MessageInput = ({
stream stream
} = useSpeech({ shareId }); } = useSpeech({ shareId });
const { isPc } = useSystemStore(); const { isPc } = useSystemStore();
const canvasRef = useRef<HTMLCanvasElement>(); const canvasRef = useRef<HTMLCanvasElement>(null);
const { t } = useTranslation(); const { t } = useTranslation();
const textareaMinH = '22px'; const textareaMinH = '22px';
const havInput = !!TextareaDom.current?.value; const [fileList, setFileList] = useState<FileItemType[]>([]);
const { toast } = useToast(); const havInput = !!TextareaDom.current?.value || fileList.length > 0;
const [imgBase64Array, setImgBase64Array] = useState<string[]>([]);
const [fileList, setFileList] = useState<File[]>([]);
const [imgSrcArray, setImgSrcArray] = useState<string[]>([]);
const { File, onOpen: onOpenSelectFile } = useSelectFile({ const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: '.jpg,.png', fileType: 'image/*',
multiple: true multiple: true,
maxCount: 10
}); });
useEffect(() => { const uploadFile = async (file: FileItemType) => {
fileList.forEach((file) => { if (file.type === FileTypeEnum.image) {
const reader = new FileReader(); try {
reader.readAsDataURL(file); const src = await compressImgAndUpload({
reader.onload = async () => { file: file.rawFile,
setImgBase64Array((prev) => [...prev, reader.result as string]); maxW: 1000,
maxH: 1000,
maxSize: 1024 * 1024 * 5,
// 30 day expired.
expiredTime: addDays(new Date(), 30)
});
setFileList((state) =>
state.map((item) =>
item.id === file.id
? {
...item,
src: `${location.origin}${src}`
}
: item
)
);
} catch (error) {
setFileList((state) => state.filter((item) => item.id !== file.id));
toast({
status: 'error',
title: t('common.Upload File Failed')
});
}
}
}; };
}); const onSelectFile = useCallback(async (files: File[]) => {
}, [fileList]); if (!files || files.length === 0) {
const onSelectFile = useCallback((e: File[]) => {
if (!e || e.length === 0) {
return; return;
} }
setFileList(e); const loadFiles = await Promise.all(
files.map(
(file) =>
new Promise<FileItemType>((resolve, reject) => {
if (file.type.includes('image')) {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
const item = {
id: nanoid(),
rawFile: file,
type: FileTypeEnum.image,
name: file.name,
icon: reader.result as string
};
uploadFile(item);
resolve(item);
};
reader.onerror = () => {
reject(reader.error);
};
} else {
resolve({
id: nanoid(),
rawFile: file,
type: FileTypeEnum.file,
name: file.name,
icon: 'pdf'
});
}
})
)
);
setFileList((state) => [...state, ...loadFiles]);
}, []); }, []);
const handleSend = useCallback(async () => { const handleSend = useCallback(async () => {
try {
for (const file of fileList) {
const src = await compressImgAndUpload({
file,
maxW: 1000,
maxH: 1000,
maxSize: 1024 * 1024 * 2
});
imgSrcArray.push(src);
}
} catch (err: any) {
toast({
title: typeof err === 'string' ? err : '文件上传异常',
status: 'warning'
});
}
const textareaValue = TextareaDom.current?.value || ''; const textareaValue = TextareaDom.current?.value || '';
const inputMessage =
imgSrcArray.length === 0 const images = fileList.filter((item) => item.type === FileTypeEnum.image);
? textareaValue const imagesText =
: `\`\`\`img-block\n${JSON.stringify(imgSrcArray)}\n\`\`\`\n${textareaValue}`; images.length === 0
? ''
: `\`\`\`${IMG_BLOCK_KEY}
${images.map((img) => JSON.stringify({ src: img.src })).join('\n')}
\`\`\`
`;
const inputMessage = `${imagesText}${textareaValue}`;
onSendMessage(inputMessage); onSendMessage(inputMessage);
setImgBase64Array([]); setFileList([]);
setImgSrcArray([]); }, [TextareaDom, fileList, onSendMessage]);
}, [TextareaDom, fileList, imgSrcArray, onSendMessage, toast]);
useEffect(() => { useEffect(() => {
if (!stream) { if (!stream) {
@@ -107,19 +169,22 @@ const MessageInput = ({
const source = audioContext.createMediaStreamSource(stream); const source = audioContext.createMediaStreamSource(stream);
source.connect(analyser); source.connect(analyser);
const renderCurve = () => { const renderCurve = () => {
renderAudioGraph(analyser, canvasRef.current as HTMLCanvasElement); if (!canvasRef.current) return;
renderAudioGraph(analyser, canvasRef.current);
window.requestAnimationFrame(renderCurve); window.requestAnimationFrame(renderCurve);
}; };
renderCurve(); renderCurve();
}, [renderAudioGraph, stream]); }, [renderAudioGraph, stream]);
return ( return (
<>
<Box m={['0 auto', '10px auto']} w={'100%'} maxW={['auto', 'min(800px, 100%)']} px={[0, 5]}> <Box m={['0 auto', '10px auto']} w={'100%'} maxW={['auto', 'min(800px, 100%)']} px={[0, 5]}>
<Box <Box
py={imgBase64Array.length > 0 ? '8px' : '18px'} pt={fileList.length > 0 ? '10px' : ['14px', '18px']}
pb={['14px', '18px']}
position={'relative'} position={'relative'}
boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`} boxShadow={isSpeaking ? `0 0 10px rgba(54,111,255,0.4)` : `0 0 10px rgba(0,0,0,0.2)`}
borderRadius={['none', 'md']}
bg={'white'}
{...(isPc {...(isPc
? { ? {
border: '1px solid', border: '1px solid',
@@ -129,58 +194,56 @@ const MessageInput = ({
borderTop: '1px solid', borderTop: '1px solid',
borderTopColor: 'rgba(0,0,0,0.15)' borderTopColor: 'rgba(0,0,0,0.15)'
})} })}
borderRadius={['none', 'md']}
backgroundColor={'white'}
> >
{/* translate loading */} {/* translate loading */}
<Box <Flex
position={'absolute'} position={'absolute'}
top={0} top={0}
bottom={0} bottom={0}
left={4} left={0}
right={['8px', '4px']} right={0}
zIndex={10} zIndex={10}
display={'flex'} pl={5}
alignItems={'center'} alignItems={'center'}
bg={'white'} bg={'white'}
pl={['5px', '10px']} color={'myBlue.600'}
color="rgba(54,111,255,0.6)"
visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'} visibility={isSpeaking && isTransCription ? 'visible' : 'hidden'}
> >
<Spinner size={'sm'} mr={4} /> <Spinner size={'sm'} mr={4} />
{t('chat.Converting to text')} {t('chat.Converting to text')}
</Box>
{/* file uploader */}
<Flex
position={'absolute'}
alignItems={'center'}
left={['12px', '14px']}
bottom={['15px', '13px']}
h={['26px', '32px']}
zIndex={10}
cursor={'pointer'}
onClick={onOpenSelectFile}
>
<MyTooltip label={t('core.chat.Select File')}>
<MyIcon name={'core/chat/fileSelect'} />
</MyTooltip>
<File onSelect={onSelectFile} />
</Flex> </Flex>
{/* file preview */} {/* file preview */}
<Flex w={'96%'} wrap={'wrap'} ml={4}> <Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
{imgBase64Array.length > 0 && {fileList.map((item) => (
imgBase64Array.map((src, index) => (
<Box <Box
key={index} key={item.id}
border={'1px solid rgba(0,0,0,0.12)'} border={'1px solid rgba(0,0,0,0.12)'}
mr={2} mr={2}
mb={2} mb={2}
rounded={'md'} rounded={'md'}
position={'relative'} position={'relative'}
_hover={{ _hover={{
'.close-icon': { display: 'block' } '.close-icon': { display: item.src ? 'block' : 'none' }
}} }}
> >
{/* uploading */}
{!item.src && (
<Flex
position={'absolute'}
alignItems={'center'}
justifyContent={'center'}
rounded={'md'}
color={'myBlue.600'}
top={0}
left={0}
bottom={0}
right={0}
bg={'rgba(255,255,255,0.8)'}
>
<Spinner />
</Flex>
)}
<MyIcon <MyIcon
name={'closeSolid'} name={'closeSolid'}
w={'16px'} w={'16px'}
@@ -189,35 +252,56 @@ const MessageInput = ({
cursor={'pointer'} cursor={'pointer'}
_hover={{ color: 'myBlue.600' }} _hover={{ color: 'myBlue.600' }}
position={'absolute'} position={'absolute'}
right={-2} bg={'white'}
top={-2} right={'-8px'}
top={'-8px'}
onClick={() => { onClick={() => {
setImgBase64Array((prev) => { setFileList((state) => state.filter((file) => file.id !== item.id));
prev.splice(index, 1);
return [...prev];
});
}} }}
className="close-icon" className="close-icon"
display={['', 'none']} display={['', 'none']}
/> />
{item.type === FileTypeEnum.image && (
<Image <Image
alt={'img'} alt={'img'}
src={src} src={item.icon}
w={'80px'} w={['50px', '70px']}
h={'80px'} h={['50px', '70px']}
rounded={'md'} borderRadius={'md'}
objectFit={'cover'} objectFit={'contain'}
/> />
)}
</Box> </Box>
))} ))}
</Flex> </Flex>
<Flex alignItems={'flex-end'} mt={fileList.length > 0 ? 1 : 0} pl={[2, 4]}>
{/* file selector */}
{showFileSelector && (
<Flex
h={'22px'}
alignItems={'center'}
justifyContent={'center'}
cursor={'pointer'}
transform={'translateY(1px)'}
onClick={() => {
if (isSpeaking) return;
onOpenSelectFile;
}}
>
<MyTooltip label={t('core.chat.Select File')}>
<MyIcon name={'core/chat/fileSelect'} />
</MyTooltip>
<File onSelect={onSelectFile} />
</Flex>
)}
{/* input area */} {/* input area */}
<Textarea <Textarea
ref={TextareaDom} ref={TextareaDom}
py={0} py={0}
pr={['45px', '55px']} pl={2}
pl={['36px', '40px']} pr={['30px', '48px']}
mt={imgBase64Array.length > 0 ? 4 : 0}
border={'none'} border={'none'}
_focusVisible={{ _focusVisible={{
border: 'none' border: 'none'
@@ -255,29 +339,24 @@ const MessageInput = ({
const clipboardData = e.clipboardData; const clipboardData = e.clipboardData;
if (clipboardData) { if (clipboardData) {
const items = clipboardData.items; const items = clipboardData.items;
const files: File[] = []; const files = Array.from(items)
for (let i = 0; i < items.length; i++) { .map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
const item = items[i]; .filter((item) => item) as File[];
if (item.kind === 'file') { onSelectFile(files);
const file = item.getAsFile();
files.push(file as File);
}
}
setFileList(files);
} }
}} }}
/> />
<Flex <Flex
position={'absolute'}
alignItems={'center'} alignItems={'center'}
right={['12px', '14px']} position={'absolute'}
bottom={['15px', '13px']} right={[2, 4]}
bottom={['10px', '12px']}
> >
{/* voice-input */} {/* voice-input */}
{!shareId && !havInput && !isChatting && ( {!shareId && !havInput && !isChatting && (
<> <>
<canvas <canvas
ref={canvasRef as any} ref={canvasRef}
style={{ style={{
height: '30px', height: '30px',
width: isSpeaking && !isTransCription ? '100px' : 0, width: isSpeaking && !isTransCription ? '100px' : 0,
@@ -289,6 +368,7 @@ const MessageInput = ({
mr={2} mr={2}
alignItems={'center'} alignItems={'center'}
justifyContent={'center'} justifyContent={'center'}
flexShrink={0}
h={['26px', '32px']} h={['26px', '32px']}
w={['26px', '32px']} w={['26px', '32px']}
borderRadius={'md'} borderRadius={'md'}
@@ -314,11 +394,14 @@ const MessageInput = ({
)} )}
{/* send and stop icon */} {/* send and stop icon */}
{isSpeaking ? ( {isSpeaking ? (
<Box color={'#5A646E'}>{speakingTimeString}</Box> <Box color={'#5A646E'} w={'36px'} textAlign={'right'}>
{speakingTimeString}
</Box>
) : ( ) : (
<Flex <Flex
alignItems={'center'} alignItems={'center'}
justifyContent={'center'} justifyContent={'center'}
flexShrink={0}
h={['28px', '32px']} h={['28px', '32px']}
w={['28px', '32px']} w={['28px', '32px']}
borderRadius={'md'} borderRadius={'md'}
@@ -356,9 +439,9 @@ const MessageInput = ({
</Flex> </Flex>
)} )}
</Flex> </Flex>
</Flex>
</Box> </Box>
</Box> </Box>
</>
); );
}; };

View File

@@ -17,7 +17,17 @@ import { useToast } from '@/web/common/hooks/useToast';
import { useAudioPlay } from '@/web/common/utils/voice'; import { useAudioPlay } from '@/web/common/utils/voice';
import { getErrText } from '@fastgpt/global/common/error/utils'; import { getErrText } from '@fastgpt/global/common/error/utils';
import { useCopyData } from '@/web/common/hooks/useCopyData'; import { useCopyData } from '@/web/common/hooks/useCopyData';
import { Box, Card, Flex, Input, Button, useTheme, BoxProps, FlexProps } from '@chakra-ui/react'; import {
Box,
Card,
Flex,
Input,
Button,
useTheme,
BoxProps,
FlexProps,
Image
} from '@chakra-ui/react';
import { feConfigs } from '@/web/common/system/staticData'; import { feConfigs } from '@/web/common/system/staticData';
import { eventBus } from '@/web/common/utils/eventbus'; import { eventBus } from '@/web/common/utils/eventbus';
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt'; import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
@@ -90,6 +100,7 @@ type Props = {
appAvatar?: string; appAvatar?: string;
userAvatar?: string; userAvatar?: string;
userGuideModule?: ModuleItemType; userGuideModule?: ModuleItemType;
showFileSelector?: boolean;
active?: boolean; // can use active?: boolean; // can use
onUpdateVariable?: (e: Record<string, any>) => void; onUpdateVariable?: (e: Record<string, any>) => void;
onStartChat?: (e: StartChatFnProps) => Promise<{ onStartChat?: (e: StartChatFnProps) => Promise<{
@@ -109,6 +120,7 @@ const ChatBox = (
appAvatar, appAvatar,
userAvatar, userAvatar,
userGuideModule, userGuideModule,
showFileSelector,
active = true, active = true,
onUpdateVariable, onUpdateVariable,
onStartChat, onStartChat,
@@ -795,6 +807,7 @@ const ChatBox = (
isChatting={isChatting} isChatting={isChatting}
TextareaDom={TextareaDom} TextareaDom={TextareaDom}
resetInputVal={resetInputVal} resetInputVal={resetInputVal}
showFileSelector={showFileSelector}
/> />
) : null} ) : null}
@@ -1109,11 +1122,11 @@ function ChatController({
{...controlIconStyle} {...controlIconStyle}
mr={1} mr={1}
name={'core/chat/stopSpeech'} name={'core/chat/stopSpeech'}
_hover={{ color: '#E74694' }} color={'#E74694'}
onClick={() => cancelAudio()} onClick={() => cancelAudio()}
/> />
</MyTooltip> </MyTooltip>
{/* <MyIcon name={'loading'} w={'16px'} /> */} <Image src="/icon/speaking.gif" w={'23px'} alt={''} />
</Flex> </Flex>
) : ( ) : (
<MyTooltip label={t('core.app.TTS')}> <MyTooltip label={t('core.app.TTS')}>

View File

@@ -1,12 +1,25 @@
import { Box, Flex } from '@chakra-ui/react'; import { Box, Flex } from '@chakra-ui/react';
import MdImage from '../img/Image'; import MdImage from '../img/Image';
import { useMemo } from 'react';
const ImageBlock = ({ images }: { images: string }) => { const ImageBlock = ({ images }: { images: string }) => {
const formatData = useMemo(
() =>
images.split('\n').map((item) => {
try {
return JSON.parse(item) as { src: string };
} catch (error) {
return { src: '' };
}
}),
[images]
);
return ( return (
<Flex w={'100%'} wrap={'wrap'}> <Flex alignItems={'center'} wrap={'wrap'} gap={4}>
{JSON.parse(images).map((src: string) => { {formatData.map(({ src }) => {
return ( return (
<Box key={src} mr={2} mb={2} rounded={'md'} flex={'0 0 auto'} w={'100px'} h={'100px'}> <Box key={src} rounded={'md'} flex={'0 0 auto'} w={'120px'}>
<MdImage src={src} /> <MdImage src={src} />
</Box> </Box>
); );

View File

@@ -27,6 +27,7 @@ const MdImage = ({ src }: { src?: string }) => {
borderRadius={'md'} borderRadius={'md'}
src={src} src={src}
alt={''} alt={''}
maxH={'150px'}
fallbackSrc={'/imgs/errImg.png'} fallbackSrc={'/imgs/errImg.png'}
fallbackStrategy={'onError'} fallbackStrategy={'onError'}
cursor={succeed ? 'pointer' : 'default'} cursor={succeed ? 'pointer' : 'default'}
@@ -45,7 +46,6 @@ const MdImage = ({ src }: { src?: string }) => {
<Modal isOpen={isOpen} onClose={onClose}> <Modal isOpen={isOpen} onClose={onClose}>
<ModalOverlay /> <ModalOverlay />
<ModalContent m={'auto'}> <ModalContent m={'auto'}>
<ModalCloseButton />
<Image <Image
src={src} src={src}
alt={''} alt={''}
@@ -55,6 +55,7 @@ const MdImage = ({ src }: { src?: string }) => {
objectFit={'contain'} objectFit={'contain'}
/> />
</ModalContent> </ModalContent>
<ModalCloseButton bg={'myWhite.500'} zIndex={999999} />
</Modal> </Modal>
</Skeleton> </Skeleton>
); );

View File

@@ -16,6 +16,7 @@ import MyTooltip from '@/components/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore'; import { useUserStore } from '@/web/support/user/useUserStore';
import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox'; import ChatBox, { type ComponentRef, type StartChatFnProps } from '@/components/ChatBox';
import { getGuideModule } from '@/global/core/app/modules/utils'; import { getGuideModule } from '@/global/core/app/modules/utils';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
export type ChatTestComponentRef = { export type ChatTestComponentRef = {
resetChatTest: () => void; resetChatTest: () => void;
@@ -115,6 +116,7 @@ const ChatTest = (
userAvatar={userInfo?.avatar} userAvatar={userInfo?.avatar}
showMarkIcon showMarkIcon
userGuideModule={getGuideModule(modules)} userGuideModule={getGuideModule(modules)}
showFileSelector={checkChatSupportSelectFileByModules(modules)}
onStartChat={startChat} onStartChat={startChat}
onDelMessage={() => {}} onDelMessage={() => {}}
/> />

View File

@@ -52,19 +52,19 @@ const UpdateInviteModal = () => {
return ( return (
<MyModal <MyModal
isOpen={inviteList.length > 0} isOpen={inviteList && inviteList.length > 0}
title={ title={
<> <>
<Box>{t('user.team.Processing invitations')}</Box> <Box>{t('user.team.Processing invitations')}</Box>
<Box fontWeight={'normal'} fontSize={'sm'} color={'myGray.500'}> <Box fontWeight={'normal'} fontSize={'sm'} color={'myGray.500'}>
{t('user.team.Processing invitations Tips', { amount: inviteList.length })} {t('user.team.Processing invitations Tips', { amount: inviteList?.length })}
</Box> </Box>
</> </>
} }
maxW={['90vw', '500px']} maxW={['90vw', '500px']}
> >
<ModalBody> <ModalBody>
{inviteList.map((item) => ( {inviteList?.map((item) => (
<Flex <Flex
key={item.teamId} key={item.teamId}
alignItems={'center'} alignItems={'center'}

View File

@@ -95,7 +95,7 @@ function App({ Component, pageProps }: AppProps) {
<title>{feConfigs?.systemTitle || process.env.SYSTEM_NAME || 'GPT'}</title> <title>{feConfigs?.systemTitle || process.env.SYSTEM_NAME || 'GPT'}</title>
<meta <meta
name="description" name="description"
content="FastGPT is a knowledge-based question answering system built on the LLM. It offers out-of-the-box data processing and model invocation capabilities. Moreover, it allows for workflow orchestration through Flow visualization, thereby enabling complex question and answer scenarios!" content="FastGPT 是一个大模型应用编排系统,提供开箱即用的数据处理、模型调用等能力,可以快速的构建知识库并通过 Flow 可视化进行工作流编排,实现复杂的知识库场景!"
/> />
<meta <meta
name="viewport" name="viewport"

View File

@@ -4,17 +4,18 @@ import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common'; import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadMongoImg } from '@fastgpt/service/common/file/image/controller'; import { uploadMongoImg } from '@fastgpt/service/common/file/image/controller';
type Props = { base64Img: string }; type Props = { base64Img: string; expiredTime?: Date };
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
await connectToDatabase(); await connectToDatabase();
const { userId } = await authCert({ req, authToken: true }); const { teamId } = await authCert({ req, authToken: true });
const { base64Img } = req.body as Props; const { base64Img, expiredTime } = req.body as Props;
const data = await uploadMongoImg({ const data = await uploadMongoImg({
userId, teamId,
base64Img base64Img,
expiredTime
}); });
jsonRes(res, { data }); jsonRes(res, { data });

View File

@@ -17,10 +17,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const datasets = await MongoDataset.find({ const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, role }), ...mongoRPermission({ teamId, tmbId, role }),
type: 'dataset' type: 'dataset'
}); }).lean();
const data = datasets.map((item) => ({ const data = datasets.map((item) => ({
...item.toJSON(), ...item,
vectorModel: getVectorModel(item.vectorModel), vectorModel: getVectorModel(item.vectorModel),
agentModel: getQAModel(item.agentModel), agentModel: getQAModel(item.agentModel),
canWrite: String(item.tmbId) === tmbId, canWrite: String(item.tmbId) === tmbId,

View File

@@ -22,11 +22,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
...mongoRPermission({ teamId, tmbId, role }), ...mongoRPermission({ teamId, tmbId, role }),
...(parentId !== undefined && { parentId: parentId || null }), ...(parentId !== undefined && { parentId: parentId || null }),
...(type && { type }) ...(type && { type })
}).sort({ updateTime: -1 }); })
.sort({ updateTime: -1 })
.lean();
const data = await Promise.all( const data = await Promise.all(
datasets.map(async (item) => ({ datasets.map(async (item) => ({
...item.toJSON(), ...item,
vectorModel: getVectorModel(item.vectorModel), vectorModel: getVectorModel(item.vectorModel),
agentModel: getQAModel(item.agentModel), agentModel: getQAModel(item.agentModel),
canWrite, canWrite,

View File

@@ -59,6 +59,7 @@ import { useAppStore } from '@/web/core/app/store/useAppStore';
import PermissionIconText from '@/components/support/permission/IconText'; import PermissionIconText from '@/components/support/permission/IconText';
import QGSwitch from '../QGSwitch'; import QGSwitch from '../QGSwitch';
import TTSSelect from '../TTSSelect'; import TTSSelect from '../TTSSelect';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
const VariableEditModal = dynamic(() => import('@/components/core/module/VariableEditModal')); const VariableEditModal = dynamic(() => import('@/components/core/module/VariableEditModal'));
const InfoModal = dynamic(() => import('../InfoModal')); const InfoModal = dynamic(() => import('../InfoModal'));
@@ -676,6 +677,7 @@ const ChatTest = ({ appId }: { appId: string }) => {
userAvatar={userInfo?.avatar} userAvatar={userInfo?.avatar}
showMarkIcon showMarkIcon
userGuideModule={getGuideModule(modules)} userGuideModule={getGuideModule(modules)}
showFileSelector={checkChatSupportSelectFileByModules(modules)}
onStartChat={startChat} onStartChat={startChat}
onDelMessage={() => {}} onDelMessage={() => {}}
/> />

View File

@@ -1,7 +1,7 @@
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import MyTooltip from '@/components/MyTooltip'; import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons'; import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { Box, Button, Flex, ModalBody, useDisclosure } from '@chakra-ui/react'; import { Box, Button, Flex, ModalBody, useDisclosure, Image } from '@chakra-ui/react';
import React, { useCallback, useMemo } from 'react'; import React, { useCallback, useMemo } from 'react';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import MySelect from '@/components/Select'; import MySelect from '@/components/Select';
@@ -130,9 +130,9 @@ const TTSSelect = ({
<Flex mt={10} justifyContent={'end'}> <Flex mt={10} justifyContent={'end'}>
{audioPlaying ? ( {audioPlaying ? (
<Flex> <Flex>
<MyIcon name={'core/chat/speaking'} w={'16px'} /> <Image src="/icon/speaking.gif" w={'24px'} alt={''} />
<Button <Button
ml={3} ml={2}
variant={'gray'} variant={'gray'}
isLoading={audioLoading} isLoading={audioLoading}
leftIcon={<MyIcon name={'core/chat/stopSpeech'} w={'16px'} />} leftIcon={<MyIcon name={'core/chat/stopSpeech'} w={'16px'} />}

View File

@@ -7,6 +7,7 @@ import Avatar from '@/components/Avatar';
import ToolMenu from './ToolMenu'; import ToolMenu from './ToolMenu';
import type { ChatItemType } from '@fastgpt/global/core/chat/type'; import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
const ChatHeader = ({ const ChatHeader = ({
history, history,
@@ -27,7 +28,10 @@ const ChatHeader = ({
const theme = useTheme(); const theme = useTheme();
const { isPc } = useSystemStore(); const { isPc } = useSystemStore();
const title = useMemo( const title = useMemo(
() => history[history.length - 2]?.value?.slice(0, 8) || appName || '新对话', () =>
chatContentReplaceBlock(history[history.length - 2]?.value)?.slice(0, 8) ||
appName ||
'新对话',
[appName, history] [appName, history]
); );

View File

@@ -32,6 +32,8 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import { useUserStore } from '@/web/support/user/useUserStore'; import { useUserStore } from '@/web/support/user/useUserStore';
import { serviceSideProps } from '@/web/common/utils/i18n'; import { serviceSideProps } from '@/web/common/utils/i18n';
import { useAppStore } from '@/web/core/app/store/useAppStore'; import { useAppStore } from '@/web/core/app/store/useAppStore';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => { const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const router = useRouter(); const router = useRouter();
@@ -78,7 +80,10 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
abortSignal: controller abortSignal: controller
}); });
const newTitle = prompts[0].content?.slice(0, 20) || '新对话'; const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
'新对话';
// update history // update history
if (completionChatId !== chatId) { if (completionChatId !== chatId) {
@@ -363,6 +368,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
appAvatar={chatData.app.avatar} appAvatar={chatData.app.avatar}
userAvatar={userInfo?.avatar} userAvatar={userInfo?.avatar}
userGuideModule={chatData.app?.userGuideModule} userGuideModule={chatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
feedbackType={'user'} feedbackType={'user'}
onUpdateVariable={(e) => {}} onUpdateVariable={(e) => {}}
onStartChat={startChat} onStartChat={startChat}

View File

@@ -20,6 +20,7 @@ import PageContainer from '@/components/PageContainer';
import ChatHeader from './components/ChatHeader'; import ChatHeader from './components/ChatHeader';
import ChatHistorySlider from './components/ChatHistorySlider'; import ChatHistorySlider from './components/ChatHistorySlider';
import { serviceSideProps } from '@/web/common/utils/i18n'; import { serviceSideProps } from '@/web/common/utils/i18n';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
const OutLink = ({ const OutLink = ({
shareId, shareId,
@@ -254,6 +255,9 @@ const OutLink = ({
appAvatar={shareChatData.app.avatar} appAvatar={shareChatData.app.avatar}
userAvatar={shareChatData.userAvatar} userAvatar={shareChatData.userAvatar}
userGuideModule={shareChatData.app?.userGuideModule} userGuideModule={shareChatData.app?.userGuideModule}
showFileSelector={checkChatSupportSelectFileByChatModels(
shareChatData.app.chatModels
)}
feedbackType={'user'} feedbackType={'user'}
onUpdateVariable={(e) => { onUpdateVariable={(e) => {
setShareChatData((state) => ({ setShareChatData((state) => ({

View File

@@ -29,8 +29,7 @@ export async function generateQA(): Promise<any> {
error = false error = false
} = await (async () => { } = await (async () => {
try { try {
const data = ( const data = await MongoDatasetTraining.findOneAndUpdate(
await MongoDatasetTraining.findOneAndUpdate(
{ {
mode: TrainingModeEnum.qa, mode: TrainingModeEnum.qa,
lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) } lockTime: { $lte: new Date(Date.now() - 10 * 60 * 1000) }
@@ -38,7 +37,8 @@ export async function generateQA(): Promise<any> {
{ {
lockTime: new Date() lockTime: new Date()
} }
).select({ )
.select({
_id: 1, _id: 1,
userId: 1, userId: 1,
teamId: 1, teamId: 1,
@@ -50,7 +50,7 @@ export async function generateQA(): Promise<any> {
billId: 1, billId: 1,
prompt: 1 prompt: 1
}) })
)?.toJSON(); .lean();
// task preemption // task preemption
if (!data) { if (!data) {

View File

@@ -24,8 +24,7 @@ export async function generateVector(): Promise<any> {
error = false error = false
} = await (async () => { } = await (async () => {
try { try {
const data = ( const data = await MongoDatasetTraining.findOneAndUpdate(
await MongoDatasetTraining.findOneAndUpdate(
{ {
mode: TrainingModeEnum.chunk, mode: TrainingModeEnum.chunk,
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) } lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
@@ -33,7 +32,8 @@ export async function generateVector(): Promise<any> {
{ {
lockTime: new Date() lockTime: new Date()
} }
).select({ )
.select({
_id: 1, _id: 1,
userId: 1, userId: 1,
teamId: 1, teamId: 1,
@@ -46,7 +46,7 @@ export async function generateVector(): Promise<any> {
model: 1, model: 1,
billId: 1 billId: 1
}) })
)?.toJSON(); .lean();
// task preemption // task preemption
if (!data) { if (!data) {

View File

@@ -21,6 +21,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response'; import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model'; import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type'; import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
export type ChatProps = ModuleDispatchProps< export type ChatProps = ModuleDispatchProps<
AIChatProps & { AIChatProps & {
@@ -106,14 +107,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
temperature = Math.max(temperature, 0.01); temperature = Math.max(temperature, 0.01);
const ai = getAIApi(user.openaiAccount, 480000); const ai = getAIApi(user.openaiAccount, 480000);
const response = await ai.chat.completions.create( const concatMessages = [
{
model,
temperature,
max_tokens,
stream,
seed: temperature < 0.3 ? 1 : undefined,
messages: [
...(modelConstantsData.defaultSystemChatPrompt ...(modelConstantsData.defaultSystemChatPrompt
? [ ? [
{ {
@@ -122,8 +116,20 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
} }
] ]
: []), : []),
...messages ...messages.map((item) => ({
] ...item,
content: modelConstantsData.vision ? formatStr2ChatContent(item.content) : item.content
}))
];
const response = await ai.chat.completions.create(
{
model,
temperature,
max_tokens,
stream,
seed: temperature < 0.3 ? 1 : undefined,
messages: concatMessages
}, },
{ {
headers: { headers: {

View File

@@ -4,6 +4,7 @@ import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema'; import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema'; import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { addLog } from '@fastgpt/service/common/mongo/controller'; import { addLog } from '@fastgpt/service/common/mongo/controller';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
type Props = { type Props = {
chatId: string; chatId: string;
@@ -51,12 +52,17 @@ export async function saveChat({
) )
]; ];
const title =
chatContentReplaceBlock(content[0].value).slice(0, 20) ||
content[1]?.value?.slice(0, 20) ||
'Chat';
if (chatHistory) { if (chatHistory) {
promise.push( promise.push(
MongoChat.updateOne( MongoChat.updateOne(
{ chatId }, { chatId },
{ {
title: content[0].value.slice(0, 20), title,
updateTime: new Date() updateTime: new Date()
} }
) )
@@ -69,7 +75,7 @@ export async function saveChat({
tmbId, tmbId,
appId, appId,
variables, variables,
title: content[0].value.slice(0, 20), title,
source, source,
shareId shareId
}) })

View File

@@ -1,8 +1,8 @@
import { GET, POST, PUT, DELETE } from '@/web/common/api/request'; import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
import { AxiosProgressEvent } from 'axios'; import { AxiosProgressEvent } from 'axios';
export const postUploadImg = (base64Img: string) => export const postUploadImg = (base64Img: string, expiredTime?: Date) =>
POST<string>('/common/file/uploadImage', { base64Img }); POST<string>('/common/file/uploadImage', { base64Img, expiredTime });
export const postUploadFiles = ( export const postUploadFiles = (
data: FormData, data: FormData,

View File

@@ -37,12 +37,14 @@ export const compressImgAndUpload = ({
file, file,
maxW = 200, maxW = 200,
maxH = 200, maxH = 200,
maxSize = 1024 * 100 maxSize = 1024 * 100, // 100kb
expiredTime
}: { }: {
file: File; file: File;
maxW?: number; maxW?: number;
maxH?: number; maxH?: number;
maxSize?: number; maxSize?: number;
expiredTime?: Date;
}) => }) =>
new Promise<string>((resolve, reject) => { new Promise<string>((resolve, reject) => {
const reader = new FileReader(); const reader = new FileReader();
@@ -87,7 +89,7 @@ export const compressImgAndUpload = ({
const src = await (async () => { const src = await (async () => {
try { try {
const src = await postUploadImg(compressedDataUrl); const src = await postUploadImg(compressedDataUrl, expiredTime);
return src; return src;
} catch (error) { } catch (error) {
return compressedDataUrl; return compressedDataUrl;

View File

@@ -3,9 +3,13 @@ import { Box } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast'; import { useToast } from '@/web/common/hooks/useToast';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
export const useSelectFile = (props?: { fileType?: string; multiple?: boolean }) => { export const useSelectFile = (props?: {
fileType?: string;
multiple?: boolean;
maxCount?: number;
}) => {
const { t } = useTranslation(); const { t } = useTranslation();
const { fileType = '*', multiple = false } = props || {}; const { fileType = '*', multiple = false, maxCount = 10 } = props || {};
const { toast } = useToast(); const { toast } = useToast();
const SelectFileDom = useRef<HTMLInputElement>(null); const SelectFileDom = useRef<HTMLInputElement>(null);
@@ -19,7 +23,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
multiple={multiple} multiple={multiple}
onChange={(e) => { onChange={(e) => {
if (!e.target.files || e.target.files?.length === 0) return; if (!e.target.files || e.target.files?.length === 0) return;
if (e.target.files.length > 10) { if (e.target.files.length > maxCount) {
return toast({ return toast({
status: 'warning', status: 'warning',
title: t('file.Select a maximum of 10 files') title: t('file.Select a maximum of 10 files')
@@ -30,7 +34,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
/> />
</Box> </Box>
), ),
[fileType, multiple, t, toast] [fileType, maxCount, multiple, t, toast]
); );
const onOpen = useCallback(() => { const onOpen = useCallback(() => {

View File

@@ -1,4 +1,4 @@
import { useState, useCallback, useEffect, useMemo } from 'react'; import { useState, useCallback, useEffect, useMemo, useRef } from 'react';
import { useToast } from '@/web/common/hooks/useToast'; import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils'; import { getErrText } from '@fastgpt/global/common/error/utils';
import { AppTTSConfigType } from '@/types/app'; import { AppTTSConfigType } from '@/types/app';
@@ -14,6 +14,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
const [audio, setAudio] = useState<HTMLAudioElement>(); const [audio, setAudio] = useState<HTMLAudioElement>();
const [audioLoading, setAudioLoading] = useState(false); const [audioLoading, setAudioLoading] = useState(false);
const [audioPlaying, setAudioPlaying] = useState(false); const [audioPlaying, setAudioPlaying] = useState(false);
const audioController = useRef(new AbortController());
// Check whether the voice is supported // Check whether the voice is supported
const hasAudio = useMemo(() => { const hasAudio = useMemo(() => {
@@ -49,12 +50,15 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
return resolve({ buffer }); return resolve({ buffer });
} }
audioController.current = new AbortController();
/* request tts */ /* request tts */
const response = await fetch('/api/core/chat/item/getSpeech', { const response = await fetch('/api/core/chat/item/getSpeech', {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
signal: audioController.current.signal,
body: JSON.stringify({ body: JSON.stringify({
chatItemId, chatItemId,
ttsConfig, ttsConfig,
@@ -120,6 +124,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
audio.src = ''; audio.src = '';
} }
window.speechSynthesis?.cancel(); window.speechSynthesis?.cancel();
audioController.current?.abort();
setAudioPlaying(false); setAudioPlaying(false);
}, [audio]); }, [audio]);

View File

@@ -7,6 +7,7 @@ import type {
} from '@fastgpt/global/support/outLink/api.d'; } from '@fastgpt/global/support/outLink/api.d';
import type { ChatSiteItemType } from '@fastgpt/global/core/chat/type.d'; import type { ChatSiteItemType } from '@fastgpt/global/core/chat/type.d';
import { HUMAN_ICON } from '@fastgpt/global/core/chat/constants'; import { HUMAN_ICON } from '@fastgpt/global/core/chat/constants';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
type State = { type State = {
shareChatData: ShareChatType; shareChatData: ShareChatType;
@@ -64,6 +65,10 @@ export const useShareChatStore = create<State>()(
shareChatHistory: [], shareChatHistory: [],
saveChatResponse({ chatId, prompts, variables, shareId }) { saveChatResponse({ chatId, prompts, variables, shareId }) {
const chatHistory = get().shareChatHistory.find((item) => item.chatId === chatId); const chatHistory = get().shareChatHistory.find((item) => item.chatId === chatId);
const newTitle =
chatContentReplaceBlock(prompts[prompts.length - 2]?.value).slice(0, 20) ||
prompts[prompts.length - 1]?.value?.slice(0, 20) ||
'Chat';
const historyList = (() => { const historyList = (() => {
if (chatHistory) { if (chatHistory) {
@@ -71,7 +76,7 @@ export const useShareChatStore = create<State>()(
item.chatId === chatId item.chatId === chatId
? { ? {
...item, ...item,
title: prompts[prompts.length - 2]?.value, title: newTitle,
updateTime: new Date(), updateTime: new Date(),
chats: chatHistory.chats.concat(prompts).slice(-30), chats: chatHistory.chats.concat(prompts).slice(-30),
variables variables
@@ -82,7 +87,7 @@ export const useShareChatStore = create<State>()(
return get().shareChatHistory.concat({ return get().shareChatHistory.concat({
chatId, chatId,
shareId, shareId,
title: prompts[prompts.length - 2]?.value, title: newTitle,
updateTime: new Date(), updateTime: new Date(),
chats: prompts, chats: prompts,
variables variables

View File

@@ -0,0 +1,21 @@
import { chatModelList } from '@/web/common/system/staticData';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
export function checkChatSupportSelectFileByChatModels(models: string[] = []) {
for (const model of models) {
const modelData = chatModelList.find((item) => item.model === model || item.name === model);
if (modelData?.vision) {
return true;
}
}
return false;
}
export function checkChatSupportSelectFileByModules(modules: ModuleItemType[] = []) {
const chatModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.chatNode);
const models: string[] = chatModules.map(
(item) => item.inputs.find((item) => item.key === 'model')?.value || ''
);
return checkChatSupportSelectFileByChatModels(models);
}

View File

@@ -83,7 +83,7 @@ const Button = defineStyleConfig({
_hover: { _hover: {
color: 'myBlue.600', color: 'myBlue.600',
bg: 'myWhite.400', bg: 'myWhite.400',
boxShadow: '0 0 5px rgba(0,0,0,0.2)' boxShadow: '0 0 5px rgba(0,0,0,0.1)'
}, },
_active: { _active: {
color: 'myBlue.700' color: 'myBlue.700'