This commit is contained in:
Archer
2023-11-10 11:14:08 +08:00
committed by GitHub
parent 0a0fe31d3c
commit d91551e6be
17 changed files with 257 additions and 26 deletions

View File

@@ -8,6 +8,7 @@
"format-doc": "zhlint --dir ./docSite *.md --fix"
},
"devDependencies": {
"@types/multer": "^1.4.10",
"husky": "^8.0.3",
"i18next": "^22.5.1",
"lint-staged": "^13.2.1",
@@ -24,6 +25,7 @@
"node": ">=18.0.0"
},
"dependencies": {
"multer": "1.4.5-lts.1",
"openai": "4.16.1"
}
}

View File

@@ -0,0 +1,71 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { customAlphabet } from 'nanoid';
import multer from 'multer';
import path from 'path';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
const nanoid = customAlphabet('1234567890abcdef', 12);
type FileType = {
fieldname: string;
originalname: string;
encoding: string;
mimetype: string;
filename: string;
path: string;
size: number;
};
export function getUploadModel({ maxSize = 500 }: { maxSize?: number }) {
maxSize *= 1024 * 1024;
class UploadModel {
uploader = multer({
limits: {
fieldSize: maxSize
},
preservePath: true,
storage: multer.diskStorage({
filename: (_req, file, cb) => {
const { ext } = path.parse(decodeURIComponent(file.originalname));
cb(null, nanoid() + ext);
}
})
}).any();
async doUpload(req: NextApiRequest, res: NextApiResponse) {
return new Promise<{
files: FileType[];
metadata: Record<string, any>;
bucketName?: `${BucketNameEnum}`;
}>((resolve, reject) => {
// @ts-ignore
this.uploader(req, res, (error) => {
if (error) {
return reject(error);
}
resolve({
...req.body,
files:
// @ts-ignore
req.files?.map((file) => ({
...file,
originalname: decodeURIComponent(file.originalname)
})) || [],
metadata: (() => {
if (!req.body?.metadata) return {};
try {
return JSON.parse(req.body.metadata);
} catch (error) {
console.log(error);
return {};
}
})()
});
});
});
}
}
return new UploadModel();
}

16
pnpm-lock.yaml generated
View File

@@ -8,10 +8,16 @@ importers:
.:
dependencies:
multer:
specifier: 1.4.5-lts.1
version: registry.npmmirror.com/multer@1.4.5-lts.1
openai:
specifier: 4.16.1
version: registry.npmmirror.com/openai@4.16.1(encoding@0.1.13)
devDependencies:
'@types/multer':
specifier: ^1.4.10
version: registry.npmmirror.com/@types/multer@1.4.10
husky:
specifier: ^8.0.3
version: registry.npmmirror.com/husky@8.0.3
@@ -307,8 +313,8 @@ importers:
specifier: ^4.14.191
version: registry.npmmirror.com/@types/lodash@4.14.200
'@types/multer':
specifier: ^1.4.7
version: registry.npmmirror.com/@types/multer@1.4.9
specifier: ^1.4.10
version: registry.npmmirror.com/@types/multer@1.4.10
'@types/node':
specifier: ^20.8.5
version: registry.npmmirror.com/@types/node@20.8.7
@@ -4522,10 +4528,10 @@ packages:
version: 0.7.33
dev: false
registry.npmmirror.com/@types/multer@1.4.9:
resolution: {integrity: sha512-9NSvPJ2E8bNTc8XtJq1Cimx2Wrn2Ah48F15B2Du/hM8a8CHLhVbJMlF3ZCqhvMdht7Sa+YdP0aKP7N4fxDcrrg==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@types/multer/-/multer-1.4.9.tgz}
registry.npmmirror.com/@types/multer@1.4.10:
resolution: {integrity: sha512-6l9mYMhUe8wbnz/67YIjc7ZJyQNZoKq7fRXVf7nMdgWgalD0KyzJ2ywI7hoATUSXSbTu9q2HBiEwzy0tNN1v2w==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/@types/multer/-/multer-1.4.10.tgz}
name: '@types/multer'
version: 1.4.9
version: 1.4.10
dependencies:
'@types/express': registry.npmmirror.com/@types/express@4.17.20
dev: true

View File

@@ -72,7 +72,7 @@
"@types/jsdom": "^21.1.1",
"@types/jsonwebtoken": "^9.0.3",
"@types/lodash": "^4.14.191",
"@types/multer": "^1.4.7",
"@types/multer": "^1.4.10",
"@types/node": "^20.8.5",
"@types/papaparse": "^5.3.7",
"@types/react": "18.0.28",

View File

@@ -203,6 +203,9 @@
},
"input": {
"Repeat Value": "Repeat Value"
},
"speech": {
"error tip": "Speech Failed"
}
},
"core": {
@@ -226,8 +229,10 @@
},
"chat": {
"Audio Speech Error": "Audio Speech Error",
"Record": "Speech",
"Restart": "Restart",
"Send Message": "Send Message"
"Send Message": "Send Message",
"Stop Speak": "Stop Speak"
},
"dataset": {
"Choose Dataset": "Choose Dataset",

View File

@@ -203,6 +203,9 @@
},
"input": {
"Repeat Value": "有重复的值"
},
"speech": {
"error tip": "语音转文字失败"
}
},
"core": {
@@ -226,8 +229,10 @@
},
"chat": {
"Audio Speech Error": "语音播报异常",
"Record": "语音输入",
"Restart": "重开对话",
"Send Message": "发送"
"Send Message": "发送",
"Stop Speak": "停止录音"
},
"dataset": {
"Choose Dataset": "关联知识库",

View File

@@ -62,6 +62,7 @@ import styles from './index.module.scss';
import { postQuestionGuide } from '@/web/core/ai/api';
import { splitGuideModule } from '@/global/core/app/modules/utils';
import { AppTTSConfigType } from '@/types/app';
import { useSpeech } from '@/web/common/hooks/useSpeech';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24);
@@ -149,6 +150,8 @@ const ChatBox = (
const [adminMarkData, setAdminMarkData] = useState<AdminMarkType & { chatItemId: string }>();
const [questionGuides, setQuestionGuide] = useState<string[]>([]);
const { isSpeaking, startSpeak, stopSpeak } = useSpeech();
const isChatting = useMemo(
() =>
chatHistory[chatHistory.length - 1] &&
@@ -857,8 +860,22 @@ const ChatBox = (
right={['12px', '14px']}
bottom={['15px', '13px']}
borderRadius={'md'}
bg={TextareaDom.current?.value ? 'myBlue.600' : ''}
// bg={TextareaDom.current?.value ? 'myBlue.600' : ''}
cursor={'pointer'}
lineHeight={1}
onClick={() => {
if (isChatting) {
return chatController.current?.abort('stop');
}
if (TextareaDom.current?.value) {
return handleSubmit((data) => sendPrompt(data, TextareaDom.current?.value))();
}
// speech
// if (isSpeaking) {
// return stopSpeak();
// }
// startSpeak();
}}
>
{isChatting ? (
<MyIcon
@@ -868,19 +885,14 @@ const ChatBox = (
cursor={'pointer'}
name={'stop'}
color={'gray.500'}
onClick={() => chatController.current?.abort('stop')}
/>
) : (
<MyTooltip label={t('core.chat.Send Message')}>
<MyIcon
name={'core/chat/sendFill'}
width={'16px'}
height={'16px'}
cursor={'pointer'}
color={TextareaDom.current?.value ? 'white' : 'myBlue.600'}
onClick={() => {
handleSubmit((data) => sendPrompt(data, TextareaDom.current?.value))();
}}
width={['16px', '22px']}
height={['16px', '22px']}
color={TextareaDom.current?.value ? 'myBlue.600' : 'myGray.400'}
/>
</MyTooltip>
)}

View File

@@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1699507042803" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="2849" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128"><path d="M512 628.50844445L512 628.50844445c106.79940741 0 194.18074075-87.38133333 194.18074075-194.18074075L706.18074075 201.31081482c0-106.79940741-87.38133333-194.18074075-194.18074075-194.18074074l0 0c-106.79940741 0-194.18074075 87.38133333-194.18074075 194.18074074l0 233.01688888C317.81925925 541.12711111 405.20059259 628.50844445 512 628.50844445z" p-id="2850"></path><path d="M857.39899259 488.21285925c3.2768-21.23851852-11.16539259-41.02068148-32.40391111-44.29748147-21.23851852-3.15543703-41.02068148 11.28675555-44.29748148 32.40391111C760.30862222 607.39128889 644.89244445 706.18074075 512 706.18074075c-132.89244445 0-248.42998518-98.91081482-268.6976-229.98281483-3.2768-21.23851852-23.18032592-35.68071111-44.29748148-32.4039111-21.23851852 3.2768-35.68071111 23.05896297-32.40391111 44.29748148 24.51531852 158.37866667 150.49007408 276.46482963 306.56284444 293.45564445L473.16385185 900.36148148l-116.50844444 0c-21.48124445 0-38.83614815 17.3549037-38.83614816 38.83614815s17.3549037 38.83614815 38.83614816 38.83614815l310.68918518 0c21.48124445 0 38.83614815-17.3549037 38.83614816-38.83614815s-17.3549037-38.83614815-38.83614816-38.83614815l-116.50844444 0 0-118.81434073C706.78755555 764.55632592 832.88367408 646.59152592 857.39899259 488.21285925z" p-id="2851"></path></svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -0,0 +1,8 @@
<?xml version="1.0" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1699507299637"
class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3033"
xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128">
<path
d="M512 0a512 512 0 0 1 512 512c0 282.769067-229.230933 512-512 512S0 794.769067 0 512 229.230933 0 512 0zM388.022613 314.88C347.62752 314.88 314.88 347.62752 314.88 388.022613v247.954774C314.88 676.37248 347.62752 709.12 388.022613 709.12h247.954774C676.37248 709.12 709.12 676.37248 709.12 635.977387V388.022613C709.12 347.62752 676.37248 314.88 635.977387 314.88H388.022613z"
p-id="3034"></path>
</svg>

After

Width:  |  Height:  |  Size: 765 B

View File

@@ -106,7 +106,9 @@ const iconPaths = {
'support/permission/publicLight': () => import('./icons/support/permission/publicLight.svg'),
'core/app/ttsFill': () => import('./icons/core/app/ttsFill.svg'),
'common/playLight': () => import('./icons/common/playLight.svg'),
'core/chat/sendFill': () => import('./icons/core/chat/sendFill.svg')
'core/chat/sendFill': () => import('./icons/core/chat/sendFill.svg'),
'core/chat/recordFill': () => import('./icons/core/chat/recordFill.svg'),
'core/chat/stopSpeechFill': () => import('./icons/core/chat/stopSpeechFill.svg')
};
export type IconName = keyof typeof iconPaths;

View File

@@ -67,7 +67,7 @@ const Layout = ({ children }: { children: JSX.Element }) => {
}, [loadGitStar, setScreenWidth]);
const { data: unread = 0 } = useQuery(['getUnreadCount'], getUnreadCount, {
enabled: !!userInfo && feConfigs.isPlus,
enabled: !!userInfo && !!feConfigs.isPlus,
refetchInterval: 10000
});

View File

@@ -29,7 +29,7 @@ const TagTextarea = ({ defaultValues, onUpdate, ...props }: Props) => {
return;
}
if (tags.includes(value)) {
toast({
return toast({
status: 'warning',
title: t('common.input.Repeat Value')
});

View File

@@ -88,11 +88,11 @@ function App({ Component, pageProps }: AppProps) {
setLastRoute(router.asPath);
};
}, [router.asPath]);
``;
return (
<>
<Head>
<title>{feConfigs?.systemTitle || 'AI'}</title>
<title>{feConfigs?.systemTitle || 'FastGPT'}</title>
<meta
name="description"
content="FastGPT is a knowledge-based question answering system built on the LLM. It offers out-of-the-box data processing and model invocation capabilities. Moreover, it allows for workflow orchestration through Flow visualization, thereby enabling complex question and answer scenarios!"

View File

@@ -51,13 +51,13 @@ class UploadModel {
}
resolve({
...req.body,
files:
// @ts-ignore
req.files?.map((file) => ({
...file,
originalname: decodeURIComponent(file.originalname)
})) || [],
bucketName: req.body.bucketName,
metadata: (() => {
if (!req.body?.metadata) return {};
try {
@@ -80,6 +80,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
try {
await connectToDatabase();
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
console.log(req.body);
const { files, bucketName, metadata } = await upload.doUpload(req, res);

View File

@@ -5,7 +5,7 @@ import { MongoUser } from '@fastgpt/service/support/user/schema';
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
import { findAllChildrenIds } from '../delete';
import QueryStream from 'pg-query-stream';
import { PgClient, Pg } from '@fastgpt/service/common/pg';
import { PgClient } from '@fastgpt/service/common/pg';
import { addLog } from '@fastgpt/service/common/mongo/controller';
import { responseWriteController } from '@fastgpt/service/common/response';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
@@ -17,7 +17,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
datasetId: string;
};
if (!datasetId || !Pg) {
if (!datasetId || !global.pgClient) {
throw new Error('缺少参数');
}
@@ -61,7 +61,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
// connect pg
Pg.connect((err, client, done) => {
global.pgClient.connect((err, client, done) => {
if (err) {
console.error(err);
res.end('Error connecting to database');

View File

@@ -0,0 +1,48 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { getUploadModel } from '@fastgpt/service/common/file/upload/multer';
import fs from 'fs';
import { getAIApi } from '@fastgpt/service/core/ai/config';
const upload = getUploadModel({
maxSize: 2
});
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { teamId, tmbId } = await authCert({ req, authToken: true });
const { files } = await upload.doUpload(req, res);
const file = files[0];
if (!file) {
throw new Error('file not found');
}
const ai = getAIApi();
const result = await ai.audio.transcriptions.create({
file: fs.createReadStream(file.path),
model: 'whisper-1'
});
jsonRes(res, {
data: result.text
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export const config = {
api: {
bodyParser: false
}
};

View File

@@ -0,0 +1,70 @@
import { useEffect, useRef, useState } from 'react';
import { POST } from '../api/request';
import { useToast } from './useToast';
import { useTranslation } from 'react-i18next';
import { getErrText } from '@fastgpt/global/common/error/utils';
export const useSpeech = () => {
const { t } = useTranslation();
const mediaRecorder = useRef<MediaRecorder>();
const { toast } = useToast();
const [isSpeaking, setIsSpeaking] = useState(false);
const startSpeak = async () => {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
mediaRecorder.current = new MediaRecorder(stream);
const chunks: Blob[] = [];
mediaRecorder.current.ondataavailable = (e) => {
chunks.push(e.data);
};
mediaRecorder.current.onstop = async () => {
const formData = new FormData();
const blob = new Blob(chunks, { type: 'audio/webm' });
formData.append('files', blob, 'recording.webm');
const link = document.createElement('a');
link.href = URL.createObjectURL(blob);
link.download = 'recording.webm';
document.body.appendChild(link);
link.click();
link.remove();
try {
const result = await POST<string[]>('/v1/audio/transcriptions', formData, {
timeout: 60000,
headers: {
'Content-Type': 'multipart/form-data; charset=utf-8'
}
});
console.log(result, '===');
} catch (error) {
toast({
status: 'warning',
title: getErrText(error, t('common.speech.error tip'))
});
}
setIsSpeaking(false);
};
mediaRecorder.current.start();
setIsSpeaking(true);
} catch (error) {}
};
const stopSpeak = () => {
if (mediaRecorder.current) {
mediaRecorder.current?.stop();
}
};
return {
startSpeak,
stopSpeak,
isSpeaking
};
};