feat: vision model (#489)

* mongo init

* perf: mongo connect

* perf: tts

perf: whisper and tts

peref: tts whisper permission

log

reabase (#488)

* perf: modal

* i18n

* perf: schema lean

* feat: vision model format

* perf: tts loading

* perf: static data

* perf: tts

* feat: image

* perf: image

* perf: upload image and title

* perf: image size

* doc

* perf: color

* doc

* speaking can not select file

* doc
This commit is contained in:
Archer
2023-11-18 15:42:35 +08:00
committed by GitHub
parent 70f3373246
commit c5664c7e90
58 changed files with 650 additions and 254 deletions

View File

@@ -1,8 +1,8 @@
import { GET, POST, PUT, DELETE } from '@/web/common/api/request';
import { AxiosProgressEvent } from 'axios';
export const postUploadImg = (base64Img: string) =>
POST<string>('/common/file/uploadImage', { base64Img });
export const postUploadImg = (base64Img: string, expiredTime?: Date) =>
POST<string>('/common/file/uploadImage', { base64Img, expiredTime });
export const postUploadFiles = (
data: FormData,

View File

@@ -37,12 +37,14 @@ export const compressImgAndUpload = ({
file,
maxW = 200,
maxH = 200,
maxSize = 1024 * 100
maxSize = 1024 * 100, // 100kb
expiredTime
}: {
file: File;
maxW?: number;
maxH?: number;
maxSize?: number;
expiredTime?: Date;
}) =>
new Promise<string>((resolve, reject) => {
const reader = new FileReader();
@@ -87,7 +89,7 @@ export const compressImgAndUpload = ({
const src = await (async () => {
try {
const src = await postUploadImg(compressedDataUrl);
const src = await postUploadImg(compressedDataUrl, expiredTime);
return src;
} catch (error) {
return compressedDataUrl;

View File

@@ -3,9 +3,13 @@ import { Box } from '@chakra-ui/react';
import { useToast } from '@/web/common/hooks/useToast';
import { useTranslation } from 'next-i18next';
export const useSelectFile = (props?: { fileType?: string; multiple?: boolean }) => {
export const useSelectFile = (props?: {
fileType?: string;
multiple?: boolean;
maxCount?: number;
}) => {
const { t } = useTranslation();
const { fileType = '*', multiple = false } = props || {};
const { fileType = '*', multiple = false, maxCount = 10 } = props || {};
const { toast } = useToast();
const SelectFileDom = useRef<HTMLInputElement>(null);
@@ -19,7 +23,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
multiple={multiple}
onChange={(e) => {
if (!e.target.files || e.target.files?.length === 0) return;
if (e.target.files.length > 10) {
if (e.target.files.length > maxCount) {
return toast({
status: 'warning',
title: t('file.Select a maximum of 10 files')
@@ -30,7 +34,7 @@ export const useSelectFile = (props?: { fileType?: string; multiple?: boolean })
/>
</Box>
),
[fileType, multiple, t, toast]
[fileType, maxCount, multiple, t, toast]
);
const onOpen = useCallback(() => {

View File

@@ -1,4 +1,4 @@
import { useState, useCallback, useEffect, useMemo } from 'react';
import { useState, useCallback, useEffect, useMemo, useRef } from 'react';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { AppTTSConfigType } from '@/types/app';
@@ -14,6 +14,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
const [audio, setAudio] = useState<HTMLAudioElement>();
const [audioLoading, setAudioLoading] = useState(false);
const [audioPlaying, setAudioPlaying] = useState(false);
const audioController = useRef(new AbortController());
// Check whether the voice is supported
const hasAudio = useMemo(() => {
@@ -49,12 +50,15 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
return resolve({ buffer });
}
audioController.current = new AbortController();
/* request tts */
const response = await fetch('/api/core/chat/item/getSpeech', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
signal: audioController.current.signal,
body: JSON.stringify({
chatItemId,
ttsConfig,
@@ -120,6 +124,7 @@ export const useAudioPlay = (props?: { ttsConfig?: AppTTSConfigType }) => {
audio.src = '';
}
window.speechSynthesis?.cancel();
audioController.current?.abort();
setAudioPlaying(false);
}, [audio]);

View File

@@ -7,6 +7,7 @@ import type {
} from '@fastgpt/global/support/outLink/api.d';
import type { ChatSiteItemType } from '@fastgpt/global/core/chat/type.d';
import { HUMAN_ICON } from '@fastgpt/global/core/chat/constants';
import { chatContentReplaceBlock } from '@fastgpt/global/core/chat/utils';
type State = {
shareChatData: ShareChatType;
@@ -64,6 +65,10 @@ export const useShareChatStore = create<State>()(
shareChatHistory: [],
saveChatResponse({ chatId, prompts, variables, shareId }) {
const chatHistory = get().shareChatHistory.find((item) => item.chatId === chatId);
const newTitle =
chatContentReplaceBlock(prompts[prompts.length - 2]?.value).slice(0, 20) ||
prompts[prompts.length - 1]?.value?.slice(0, 20) ||
'Chat';
const historyList = (() => {
if (chatHistory) {
@@ -71,7 +76,7 @@ export const useShareChatStore = create<State>()(
item.chatId === chatId
? {
...item,
title: prompts[prompts.length - 2]?.value,
title: newTitle,
updateTime: new Date(),
chats: chatHistory.chats.concat(prompts).slice(-30),
variables
@@ -82,7 +87,7 @@ export const useShareChatStore = create<State>()(
return get().shareChatHistory.concat({
chatId,
shareId,
title: prompts[prompts.length - 2]?.value,
title: newTitle,
updateTime: new Date(),
chats: prompts,
variables

View File

@@ -0,0 +1,21 @@
import { chatModelList } from '@/web/common/system/staticData';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type.d';
export function checkChatSupportSelectFileByChatModels(models: string[] = []) {
for (const model of models) {
const modelData = chatModelList.find((item) => item.model === model || item.name === model);
if (modelData?.vision) {
return true;
}
}
return false;
}
export function checkChatSupportSelectFileByModules(modules: ModuleItemType[] = []) {
const chatModules = modules.filter((item) => item.flowType === FlowNodeTypeEnum.chatNode);
const models: string[] = chatModules.map(
(item) => item.inputs.find((item) => item.key === 'model')?.value || ''
);
return checkChatSupportSelectFileByChatModels(models);
}

View File

@@ -83,7 +83,7 @@ const Button = defineStyleConfig({
_hover: {
color: 'myBlue.600',
bg: 'myWhite.400',
boxShadow: '0 0 5px rgba(0,0,0,0.2)'
boxShadow: '0 0 5px rgba(0,0,0,0.1)'
},
_active: {
color: 'myBlue.700'