mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
v4.6-3 (#463)
This commit is contained in:
@@ -72,7 +72,7 @@
|
||||
"@types/jsdom": "^21.1.1",
|
||||
"@types/jsonwebtoken": "^9.0.3",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/multer": "^1.4.7",
|
||||
"@types/multer": "^1.4.10",
|
||||
"@types/node": "^20.8.5",
|
||||
"@types/papaparse": "^5.3.7",
|
||||
"@types/react": "18.0.28",
|
||||
|
@@ -203,6 +203,9 @@
|
||||
},
|
||||
"input": {
|
||||
"Repeat Value": "Repeat Value"
|
||||
},
|
||||
"speech": {
|
||||
"error tip": "Speech Failed"
|
||||
}
|
||||
},
|
||||
"core": {
|
||||
@@ -226,8 +229,10 @@
|
||||
},
|
||||
"chat": {
|
||||
"Audio Speech Error": "Audio Speech Error",
|
||||
"Record": "Speech",
|
||||
"Restart": "Restart",
|
||||
"Send Message": "Send Message"
|
||||
"Send Message": "Send Message",
|
||||
"Stop Speak": "Stop Speak"
|
||||
},
|
||||
"dataset": {
|
||||
"Choose Dataset": "Choose Dataset",
|
||||
|
@@ -203,6 +203,9 @@
|
||||
},
|
||||
"input": {
|
||||
"Repeat Value": "有重复的值"
|
||||
},
|
||||
"speech": {
|
||||
"error tip": "语音转文字失败"
|
||||
}
|
||||
},
|
||||
"core": {
|
||||
@@ -226,8 +229,10 @@
|
||||
},
|
||||
"chat": {
|
||||
"Audio Speech Error": "语音播报异常",
|
||||
"Record": "语音输入",
|
||||
"Restart": "重开对话",
|
||||
"Send Message": "发送"
|
||||
"Send Message": "发送",
|
||||
"Stop Speak": "停止录音"
|
||||
},
|
||||
"dataset": {
|
||||
"Choose Dataset": "关联知识库",
|
||||
|
@@ -62,6 +62,7 @@ import styles from './index.module.scss';
|
||||
import { postQuestionGuide } from '@/web/core/ai/api';
|
||||
import { splitGuideModule } from '@/global/core/app/modules/utils';
|
||||
import { AppTTSConfigType } from '@/types/app';
|
||||
import { useSpeech } from '@/web/common/hooks/useSpeech';
|
||||
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24);
|
||||
|
||||
@@ -149,6 +150,8 @@ const ChatBox = (
|
||||
const [adminMarkData, setAdminMarkData] = useState<AdminMarkType & { chatItemId: string }>();
|
||||
const [questionGuides, setQuestionGuide] = useState<string[]>([]);
|
||||
|
||||
const { isSpeaking, startSpeak, stopSpeak } = useSpeech();
|
||||
|
||||
const isChatting = useMemo(
|
||||
() =>
|
||||
chatHistory[chatHistory.length - 1] &&
|
||||
@@ -857,8 +860,22 @@ const ChatBox = (
|
||||
right={['12px', '14px']}
|
||||
bottom={['15px', '13px']}
|
||||
borderRadius={'md'}
|
||||
bg={TextareaDom.current?.value ? 'myBlue.600' : ''}
|
||||
// bg={TextareaDom.current?.value ? 'myBlue.600' : ''}
|
||||
cursor={'pointer'}
|
||||
lineHeight={1}
|
||||
onClick={() => {
|
||||
if (isChatting) {
|
||||
return chatController.current?.abort('stop');
|
||||
}
|
||||
if (TextareaDom.current?.value) {
|
||||
return handleSubmit((data) => sendPrompt(data, TextareaDom.current?.value))();
|
||||
}
|
||||
// speech
|
||||
// if (isSpeaking) {
|
||||
// return stopSpeak();
|
||||
// }
|
||||
// startSpeak();
|
||||
}}
|
||||
>
|
||||
{isChatting ? (
|
||||
<MyIcon
|
||||
@@ -868,19 +885,14 @@ const ChatBox = (
|
||||
cursor={'pointer'}
|
||||
name={'stop'}
|
||||
color={'gray.500'}
|
||||
onClick={() => chatController.current?.abort('stop')}
|
||||
/>
|
||||
) : (
|
||||
<MyTooltip label={t('core.chat.Send Message')}>
|
||||
<MyIcon
|
||||
name={'core/chat/sendFill'}
|
||||
width={'16px'}
|
||||
height={'16px'}
|
||||
cursor={'pointer'}
|
||||
color={TextareaDom.current?.value ? 'white' : 'myBlue.600'}
|
||||
onClick={() => {
|
||||
handleSubmit((data) => sendPrompt(data, TextareaDom.current?.value))();
|
||||
}}
|
||||
width={['16px', '22px']}
|
||||
height={['16px', '22px']}
|
||||
color={TextareaDom.current?.value ? 'myBlue.600' : 'myGray.400'}
|
||||
/>
|
||||
</MyTooltip>
|
||||
)}
|
||||
|
@@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1699507042803" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2849" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128"><path d="M512 628.50844445L512 628.50844445c106.79940741 0 194.18074075-87.38133333 194.18074075-194.18074075L706.18074075 201.31081482c0-106.79940741-87.38133333-194.18074075-194.18074075-194.18074074l0 0c-106.79940741 0-194.18074075 87.38133333-194.18074075 194.18074074l0 233.01688888C317.81925925 541.12711111 405.20059259 628.50844445 512 628.50844445z" p-id="2850"></path><path d="M857.39899259 488.21285925c3.2768-21.23851852-11.16539259-41.02068148-32.40391111-44.29748147-21.23851852-3.15543703-41.02068148 11.28675555-44.29748148 32.40391111C760.30862222 607.39128889 644.89244445 706.18074075 512 706.18074075c-132.89244445 0-248.42998518-98.91081482-268.6976-229.98281483-3.2768-21.23851852-23.18032592-35.68071111-44.29748148-32.4039111-21.23851852 3.2768-35.68071111 23.05896297-32.40391111 44.29748148 24.51531852 158.37866667 150.49007408 276.46482963 306.56284444 293.45564445L473.16385185 900.36148148l-116.50844444 0c-21.48124445 0-38.83614815 17.3549037-38.83614816 38.83614815s17.3549037 38.83614815 38.83614816 38.83614815l310.68918518 0c21.48124445 0 38.83614815-17.3549037 38.83614816-38.83614815s-17.3549037-38.83614815-38.83614816-38.83614815l-116.50844444 0 0-118.81434073C706.78755555 764.55632592 832.88367408 646.59152592 857.39899259 488.21285925z" p-id="2851"></path></svg>
|
After Width: | Height: | Size: 1.6 KiB |
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1699507299637"
|
||||
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3033"
|
||||
xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128">
|
||||
<path
|
||||
d="M512 0a512 512 0 0 1 512 512c0 282.769067-229.230933 512-512 512S0 794.769067 0 512 229.230933 0 512 0zM388.022613 314.88C347.62752 314.88 314.88 347.62752 314.88 388.022613v247.954774C314.88 676.37248 347.62752 709.12 388.022613 709.12h247.954774C676.37248 709.12 709.12 676.37248 709.12 635.977387V388.022613C709.12 347.62752 676.37248 314.88 635.977387 314.88H388.022613z"
|
||||
p-id="3034"></path>
|
||||
</svg>
|
After Width: | Height: | Size: 765 B |
@@ -106,7 +106,9 @@ const iconPaths = {
|
||||
'support/permission/publicLight': () => import('./icons/support/permission/publicLight.svg'),
|
||||
'core/app/ttsFill': () => import('./icons/core/app/ttsFill.svg'),
|
||||
'common/playLight': () => import('./icons/common/playLight.svg'),
|
||||
'core/chat/sendFill': () => import('./icons/core/chat/sendFill.svg')
|
||||
'core/chat/sendFill': () => import('./icons/core/chat/sendFill.svg'),
|
||||
'core/chat/recordFill': () => import('./icons/core/chat/recordFill.svg'),
|
||||
'core/chat/stopSpeechFill': () => import('./icons/core/chat/stopSpeechFill.svg')
|
||||
};
|
||||
|
||||
export type IconName = keyof typeof iconPaths;
|
||||
|
@@ -67,7 +67,7 @@ const Layout = ({ children }: { children: JSX.Element }) => {
|
||||
}, [loadGitStar, setScreenWidth]);
|
||||
|
||||
const { data: unread = 0 } = useQuery(['getUnreadCount'], getUnreadCount, {
|
||||
enabled: !!userInfo && feConfigs.isPlus,
|
||||
enabled: !!userInfo && !!feConfigs.isPlus,
|
||||
refetchInterval: 10000
|
||||
});
|
||||
|
||||
|
@@ -29,7 +29,7 @@ const TagTextarea = ({ defaultValues, onUpdate, ...props }: Props) => {
|
||||
return;
|
||||
}
|
||||
if (tags.includes(value)) {
|
||||
toast({
|
||||
return toast({
|
||||
status: 'warning',
|
||||
title: t('common.input.Repeat Value')
|
||||
});
|
||||
|
@@ -88,11 +88,11 @@ function App({ Component, pageProps }: AppProps) {
|
||||
setLastRoute(router.asPath);
|
||||
};
|
||||
}, [router.asPath]);
|
||||
``;
|
||||
|
||||
return (
|
||||
<>
|
||||
<Head>
|
||||
<title>{feConfigs?.systemTitle || 'AI'}</title>
|
||||
<title>{feConfigs?.systemTitle || 'FastGPT'}</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="FastGPT is a knowledge-based question answering system built on the LLM. It offers out-of-the-box data processing and model invocation capabilities. Moreover, it allows for workflow orchestration through Flow visualization, thereby enabling complex question and answer scenarios!"
|
||||
|
@@ -51,13 +51,13 @@ class UploadModel {
|
||||
}
|
||||
|
||||
resolve({
|
||||
...req.body,
|
||||
files:
|
||||
// @ts-ignore
|
||||
req.files?.map((file) => ({
|
||||
...file,
|
||||
originalname: decodeURIComponent(file.originalname)
|
||||
})) || [],
|
||||
bucketName: req.body.bucketName,
|
||||
metadata: (() => {
|
||||
if (!req.body?.metadata) return {};
|
||||
try {
|
||||
@@ -80,6 +80,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
|
||||
console.log(req.body);
|
||||
|
||||
const { files, bucketName, metadata } = await upload.doUpload(req, res);
|
||||
|
||||
|
@@ -5,7 +5,7 @@ import { MongoUser } from '@fastgpt/service/support/user/schema';
|
||||
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
|
||||
import { findAllChildrenIds } from '../delete';
|
||||
import QueryStream from 'pg-query-stream';
|
||||
import { PgClient, Pg } from '@fastgpt/service/common/pg';
|
||||
import { PgClient } from '@fastgpt/service/common/pg';
|
||||
import { addLog } from '@fastgpt/service/common/mongo/controller';
|
||||
import { responseWriteController } from '@fastgpt/service/common/response';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
@@ -17,7 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
datasetId: string;
|
||||
};
|
||||
|
||||
if (!datasetId || !Pg) {
|
||||
if (!datasetId || !global.pgClient) {
|
||||
throw new Error('缺少参数');
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
}
|
||||
|
||||
// connect pg
|
||||
Pg.connect((err, client, done) => {
|
||||
global.pgClient.connect((err, client, done) => {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
res.end('Error connecting to database');
|
||||
|
48
projects/app/src/pages/api/v1/audio/transcriptions.ts
Normal file
48
projects/app/src/pages/api/v1/audio/transcriptions.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { withNextCors } from '@fastgpt/service/common/middle/cors';
|
||||
import { getUploadModel } from '@fastgpt/service/common/file/upload/multer';
|
||||
import fs from 'fs';
|
||||
import { getAIApi } from '@fastgpt/service/core/ai/config';
|
||||
|
||||
const upload = getUploadModel({
|
||||
maxSize: 2
|
||||
});
|
||||
|
||||
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
const { teamId, tmbId } = await authCert({ req, authToken: true });
|
||||
|
||||
const { files } = await upload.doUpload(req, res);
|
||||
|
||||
const file = files[0];
|
||||
|
||||
if (!file) {
|
||||
throw new Error('file not found');
|
||||
}
|
||||
|
||||
const ai = getAIApi();
|
||||
|
||||
const result = await ai.audio.transcriptions.create({
|
||||
file: fs.createReadStream(file.path),
|
||||
model: 'whisper-1'
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: result.text
|
||||
});
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false
|
||||
}
|
||||
};
|
70
projects/app/src/web/common/hooks/useSpeech.ts
Normal file
70
projects/app/src/web/common/hooks/useSpeech.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
import { POST } from '../api/request';
|
||||
import { useToast } from './useToast';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
export const useSpeech = () => {
|
||||
const { t } = useTranslation();
|
||||
const mediaRecorder = useRef<MediaRecorder>();
|
||||
const { toast } = useToast();
|
||||
const [isSpeaking, setIsSpeaking] = useState(false);
|
||||
|
||||
const startSpeak = async () => {
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
mediaRecorder.current = new MediaRecorder(stream);
|
||||
const chunks: Blob[] = [];
|
||||
|
||||
mediaRecorder.current.ondataavailable = (e) => {
|
||||
chunks.push(e.data);
|
||||
};
|
||||
|
||||
mediaRecorder.current.onstop = async () => {
|
||||
const formData = new FormData();
|
||||
const blob = new Blob(chunks, { type: 'audio/webm' });
|
||||
formData.append('files', blob, 'recording.webm');
|
||||
|
||||
const link = document.createElement('a');
|
||||
link.href = URL.createObjectURL(blob);
|
||||
link.download = 'recording.webm';
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
link.remove();
|
||||
|
||||
try {
|
||||
const result = await POST<string[]>('/v1/audio/transcriptions', formData, {
|
||||
timeout: 60000,
|
||||
headers: {
|
||||
'Content-Type': 'multipart/form-data; charset=utf-8'
|
||||
}
|
||||
});
|
||||
|
||||
console.log(result, '===');
|
||||
} catch (error) {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: getErrText(error, t('common.speech.error tip'))
|
||||
});
|
||||
}
|
||||
setIsSpeaking(false);
|
||||
};
|
||||
|
||||
mediaRecorder.current.start();
|
||||
|
||||
setIsSpeaking(true);
|
||||
} catch (error) {}
|
||||
};
|
||||
|
||||
const stopSpeak = () => {
|
||||
if (mediaRecorder.current) {
|
||||
mediaRecorder.current?.stop();
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
startSpeak,
|
||||
stopSpeak,
|
||||
isSpeaking
|
||||
};
|
||||
};
|
Reference in New Issue
Block a user