This commit is contained in:
Archer
2023-10-11 17:18:43 +08:00
committed by GitHub
parent d0041a98b4
commit bcf9491999
51 changed files with 852 additions and 460 deletions

View File

@@ -2,9 +2,9 @@ import { GET, POST, DELETE, PUT } from './request';
import type { AppSchema } from '@/types/mongoSchema';
import type { AppListItemType, AppUpdateParams } from '@/types/app';
import { RequestPaging } from '../types/index';
import type { Props as CreateAppProps } from '@/pages/api/app/create';
import { addDays } from 'date-fns';
import { GetAppChatLogsParams } from './request/app';
import type { CreateAppParams } from '@/types/app';
/**
* 获取模型列表
@@ -14,7 +14,7 @@ export const getMyModels = () => GET<AppListItemType[]>('/app/myApps');
/**
* 创建一个模型
*/
export const postCreateApp = (data: CreateAppProps) => POST<string>('/app/create', data);
export const postCreateApp = (data: CreateAppParams) => POST<string>('/app/create', data);
/**
* 根据 ID 删除模型

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useMemo, useState } from 'react';
import { ModalBody, Box, useTheme } from '@chakra-ui/react';
import { ModalBody, Box, useTheme, Flex, Progress } from '@chakra-ui/react';
import { getDatasetDataItemById } from '@/api/core/dataset/data';
import { useLoading } from '@/hooks/useLoading';
import { useToast } from '@/hooks/useToast';
@@ -8,22 +8,19 @@ import { QuoteItemType } from '@/types/chat';
import MyIcon from '@/components/Icon';
import InputDataModal, { RawFileText } from '@/pages/kb/detail/components/InputDataModal';
import MyModal from '../MyModal';
import type { PgDataItemType } from '@/types/core/dataset/data';
import { useTranslation } from 'react-i18next';
import { useRouter } from 'next/router';
type SearchType = PgDataItemType & {
kb_id?: string;
};
const QuoteModal = ({
onUpdateQuote,
rawSearch = [],
onClose
}: {
onUpdateQuote: (quoteId: string, sourceText?: string) => Promise<void>;
rawSearch: SearchType[];
rawSearch: QuoteItemType[];
onClose: () => void;
}) => {
const { t } = useTranslation();
const theme = useTheme();
const router = useRouter();
const { toast } = useToast();
@@ -36,7 +33,7 @@ const QuoteModal = ({
* click edit, get new kbDataItem
*/
const onclickEdit = useCallback(
async (item: SearchType) => {
async (item: QuoteItemType) => {
if (!item.id) return;
try {
setIsLoading(true);
@@ -95,9 +92,30 @@ const QuoteModal = ({
_hover={{ '& .edit': { display: 'flex' } }}
overflow={'hidden'}
>
{item.source && !isShare && (
<RawFileText filename={item.source} fileId={item.file_id} />
{!isShare && (
<Flex alignItems={'center'} mb={1}>
<RawFileText
filename={item.source || t('common.Unknow') || 'Unknow'}
fileId={item.file_id}
/>
<Box flex={'1'} />
{item.score && (
<>
<Progress
mx={2}
w={['60px', '100px']}
value={item.score * 100}
size="sm"
borderRadius={'20px'}
colorScheme="gray"
border={theme.borders.base}
/>
<Box>{item.score.toFixed(4)}</Box>
</>
)}
</Flex>
)}
<Box>{item.q}</Box>
<Box>{item.a}</Box>
{item.id && !isShare && (

View File

@@ -102,7 +102,7 @@ const Layout = ({ children }: { children: JSX.Element }) => {
</>
)}
</Box>
<Loading loading={loading} />
<Loading loading={loading} zIndex={9999} />
</>
);
};

View File

@@ -4,16 +4,18 @@ import { Spinner, Flex, Box } from '@chakra-ui/react';
const Loading = ({
fixed = true,
text = '',
bg = 'rgba(255,255,255,0.5)'
bg = 'rgba(255,255,255,0.5)',
zIndex = 1000
}: {
fixed?: boolean;
text?: string;
bg?: string;
zIndex?: number;
}) => {
return (
<Flex
position={fixed ? 'fixed' : 'absolute'}
zIndex={1000}
zIndex={zIndex}
bg={bg}
top={0}
left={0}

View File

@@ -59,5 +59,5 @@ export enum OutLinkTypeEnum {
apikey = 'apikey'
}
export const HUMAN_ICON = `/icon/human.png`;
export const HUMAN_ICON = `/icon/human.svg`;
export const LOGO_ICON = `/icon/logo.svg`;

View File

@@ -1,4 +1,4 @@
import { SystemInputEnum } from '../app';
import { AppTypeEnum, SystemInputEnum } from '../app';
import { TaskResponseKeyEnum } from '../chat';
import {
FlowModuleTypeEnum,
@@ -575,12 +575,17 @@ export const ModuleTemplatesFlat = [
];
// template
export const appTemplates: (AppItemType & { avatar: string; intro: string })[] = [
export const appTemplates: (AppItemType & {
avatar: string;
intro: string;
type: `${AppTypeEnum}`;
})[] = [
{
id: 'simpleChat',
avatar: '/imgs/module/AI.png',
name: '简单的对话',
intro: '一个极其简单的 AI 对话应用',
type: AppTypeEnum.basic,
modules: [
{
moduleId: 'userGuide',
@@ -797,6 +802,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
avatar: '/imgs/module/db.png',
name: '知识库 + 对话引导',
intro: '每次提问时进行一次知识库搜索,将搜索结果注入 LLM 模型进行参考回答',
type: AppTypeEnum.basic,
modules: [
{
moduleId: 'userGuide',
@@ -811,7 +817,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
key: 'welcomeText',
type: 'input',
label: '开场白',
value: '你好,我是 laf 助手,有什么可以帮助你的么?',
value: '你好,我是知识库助手,请不要忘记选择知识库噢~',
connected: true
}
],
@@ -1162,6 +1168,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
avatar: '/imgs/module/userGuide.png',
name: '对话引导 + 变量',
intro: '可以在对话开始发送一段提示,或者让用户填写一些内容,作为本次对话的变量',
type: AppTypeEnum.basic,
modules: [
{
moduleId: 'userGuide',
@@ -1174,27 +1181,15 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
inputs: [
{
key: 'welcomeText',
type: 'input',
type: 'hidden',
label: '开场白',
value: '你好,我可以为你翻译各种语言,请告诉我你需要翻译成什么语言?',
connected: true
}
],
outputs: []
},
{
moduleId: 'variable',
name: '全局变量',
flowType: 'variable',
position: {
x: 444.0369195277651,
y: 1008.5185781784537
},
inputs: [
},
{
key: 'variables',
type: 'systemInput',
label: '变量输入',
type: 'hidden',
label: '对话框变量',
value: [
{
id: '35c640eb-cf22-431f-bb57-3fc21643880e',
@@ -1227,6 +1222,13 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
}
],
connected: true
},
{
key: 'questionGuide',
type: 'switch',
label: '问题引导',
value: false,
connected: true
}
],
outputs: []
@@ -1275,7 +1277,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 10,
value: 2,
min: 0,
max: 50,
connected: true
@@ -1317,7 +1319,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
type: 'custom',
label: '对话模型',
value: 'gpt-3.5-turbo-16k',
list: [],
connected: true
},
{
@@ -1346,7 +1347,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
label: '回复上限',
value: 8000,
min: 100,
max: 16000,
max: 4000,
step: 50,
markList: [
{
@@ -1354,8 +1355,8 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
value: 100
},
{
label: '16000',
value: 16000
label: '4000',
value: 4000
}
],
connected: true
@@ -1364,11 +1365,28 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
key: 'systemPrompt',
type: 'textarea',
label: '系统提示词',
max: 300,
valueType: 'string',
description:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
placeholder:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。可使用变量,例如 {{language}}',
value: '请直接将我的问题翻译成{{language}},不需要回答问题。',
connected: true
},
{
key: 'quoteTemplate',
type: 'hidden',
label: '引用内容模板',
valueType: 'string',
value: '',
connected: true
},
{
key: 'quotePrompt',
type: 'hidden',
label: '引用内容提示词',
valueType: 'string',
value: '',
connected: true
},
@@ -1381,8 +1399,9 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
key: 'quoteQA',
type: 'target',
type: 'custom',
label: '引用内容',
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
valueType: 'kb_quote',
connected: false
},
@@ -1406,8 +1425,9 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
{
key: 'answerText',
label: '模型回复',
description: '直接响应,无需配置',
type: 'hidden',
description: '将在 stream 回复完毕后触发',
valueType: 'string',
type: 'source',
targets: []
},
{
@@ -1417,6 +1437,14 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
valueType: 'boolean',
type: 'source',
targets: []
},
{
key: 'history',
label: '新的上下文',
description: '将本次回复内容拼接上历史记录,作为新的上下文返回',
valueType: 'chat_history',
type: 'source',
targets: []
}
]
}
@@ -1427,6 +1455,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
avatar: '/imgs/module/cq.png',
name: '问题分类 + 知识库',
intro: '先对用户的问题进行分类,再根据不同类型问题,执行不同的操作',
type: AppTypeEnum.advanced,
modules: [
{
moduleId: '7z5g5h',

View File

@@ -8,13 +8,17 @@ export const useLoading = (props?: { defaultLoading: boolean }) => {
({
loading,
fixed = true,
text = ''
text = '',
zIndex
}: {
loading?: boolean;
fixed?: boolean;
text?: string;
zIndex?: number;
}): JSX.Element | null => {
return isLoading || loading ? <LoadingComponent fixed={fixed} text={text} /> : null;
return isLoading || loading ? (
<LoadingComponent fixed={fixed} text={text} zIndex={zIndex} />
) : null;
},
[isLoading]
);

View File

@@ -15,16 +15,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
await authUser({ req, authRoot: true });
console.log('add index');
await PgClient.query(
`
ALTER TABLE modeldata
ALTER COLUMN source TYPE VARCHAR(256),
ALTER COLUMN file_id TYPE VARCHAR(256);
CREATE INDEX IF NOT EXISTS modelData_fileId_index ON modeldata (file_id);
`
);
console.log('index success');
console.log('count rows');
// 去重获取 fileId
const { rows } = await PgClient.query(`SELECT DISTINCT file_id
@@ -36,8 +26,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await init(rows.slice(i, i + limit), initFileIds);
console.log(i);
}
console.log('filter success');
console.log('start update');
for (let i = 0; i < initFileIds.length; i++) {
await PgClient.query(`UPDATE ${PgDatasetTableName}
@@ -49,9 +37,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { rows: emptyIds } = await PgClient.query(
`SELECT id FROM ${PgDatasetTableName} WHERE file_id IS NULL OR file_id=''`
);
console.log('filter success');
console.log(emptyIds.length);
await delay(5000);
console.log('start update');
async function start(start: number) {
for (let i = start; i < emptyIds.length; i += limit) {
@@ -65,12 +55,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
start(i);
}
// await PgClient.query(
// `UPDATE ${PgDatasetTableName}
// SET file_id = '${DatasetSpecialIdEnum.manual}'
// WHERE file_id IS NULL OR file_id = ''`
// );
console.log('update success');
jsonRes(res, {

View File

@@ -4,17 +4,17 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { App } from '@/service/models/app';
import { AppModuleItemType } from '@/types/app';
export type Props = {
name: string;
avatar?: string;
modules: AppModuleItemType[];
};
import type { CreateAppParams } from '@/types/app';
import { AppTypeEnum } from '@/constants/app';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { name, avatar, modules } = req.body as Props;
const {
name = 'APP',
avatar,
type = AppTypeEnum.advanced,
modules
} = req.body as CreateAppParams;
if (!name || !Array.isArray(modules)) {
throw new Error('缺少参数');
@@ -38,7 +38,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
avatar,
name,
userId,
modules
modules,
type
});
jsonRes(res, {

View File

@@ -9,7 +9,7 @@ import { authApp } from '@/service/utils/auth';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { name, avatar, type, chat, share, intro, modules } = req.body as AppUpdateParams;
const { name, avatar, type, share, intro, modules } = req.body as AppUpdateParams;
const { appId } = req.query as { appId: string };
if (!appId) {
@@ -37,7 +37,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type,
avatar,
intro,
chat,
...(share && {
'share.isShare': share.isShare,
'share.isShareDetail': share.isShareDetail

View File

@@ -13,6 +13,7 @@ import { getVectorModel } from '@/service/utils/data';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';
import { authFileIdValid } from '@/service/dataset/auth';
export type Props = {
kbId: string;
@@ -72,6 +73,8 @@ export async function getVectorAndInsertDataset(
return Promise.reject('已经存在完全一致的数据');
}
await authFileIdValid(data.file_id);
const { vectors } = await getVector({
model: kb.vectorModel,
input: [q],

View File

@@ -1,16 +1,17 @@
/* push data to training queue */
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { connectToDatabase, TrainingData, KB } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { authKb } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { PgDatasetTableName, TrainingModeEnum } from '@/constants/plugin';
import { TrainingModeEnum } from '@/constants/plugin';
import { startQueue } from '@/service/utils/tools';
import { PgClient } from '@/service/pg';
import { getVectorModel } from '@/service/utils/data';
import { DatasetDataItemType } from '@/types/core/dataset/data';
import { countPromptTokens } from '@/utils/common/tiktoken';
import type { PushDataProps, PushDataResponse } from '@/api/core/dataset/data.d';
import { authFileIdValid } from '@/service/dataset/auth';
const modeMap = {
[TrainingModeEnum.index]: true,
@@ -80,69 +81,49 @@ export async function pushDataToKb({
[TrainingModeEnum.qa]: global.qaModel.maxToken * 0.8
};
// 过滤重复的 qa 内容
// filter repeat or equal content
const set = new Set();
const filterData: DatasetDataItemType[] = [];
const filterResult: Record<string, DatasetDataItemType[]> = {
success: [],
overToken: [],
fileIdInvalid: [],
error: []
};
data.forEach((item) => {
if (!item.q) return;
await Promise.all(
data.map(async (item) => {
if (!item.q) {
filterResult.error.push(item);
return;
}
const text = item.q + item.a;
const text = item.q + item.a;
// count q token
const token = countPromptTokens(item.q, 'system');
// count q token
const token = countPromptTokens(item.q, 'system');
if (token > modeMaxToken[mode]) {
return;
}
if (token > modeMaxToken[mode]) {
filterResult.overToken.push(item);
return;
}
if (!set.has(text)) {
filterData.push(item);
set.add(text);
}
});
try {
await authFileIdValid(item.file_id);
} catch (error) {
filterResult.fileIdInvalid.push(item);
return;
}
// 数据库去重
const insertData = (
await Promise.allSettled(
filterData.map(async (data) => {
let { q, a } = data;
if (mode !== TrainingModeEnum.index) {
return Promise.resolve(data);
}
if (!q) {
return Promise.reject('q为空');
}
q = q.replace(/\\n/g, '\n').trim().replace(/'/g, '"');
a = a.replace(/\\n/g, '\n').trim().replace(/'/g, '"');
// Exactly the same data, not push
try {
const { rows } = await PgClient.query(`
SELECT COUNT(*) > 0 AS exists
FROM ${PgDatasetTableName}
WHERE md5(q)=md5('${q}') AND md5(a)=md5('${a}') AND user_id='${userId}' AND kb_id='${kbId}'
`);
const exists = rows[0]?.exists || false;
if (exists) {
return Promise.reject('已经存在');
}
} catch (error) {
console.log(error);
}
return Promise.resolve(data);
})
)
)
.filter((item) => item.status === 'fulfilled')
.map<DatasetDataItemType>((item: any) => item.value);
if (!set.has(text)) {
filterResult.success.push(item);
set.add(text);
}
})
);
// 插入记录
const insertRes = await TrainingData.insertMany(
insertData.map((item) => ({
filterResult.success.map((item) => ({
...item,
userId,
kbId,
@@ -154,9 +135,11 @@ export async function pushDataToKb({
);
insertRes.length > 0 && startQueue();
delete filterResult.success;
return {
insertLen: insertRes.length
insertLen: insertRes.length,
...filterResult
};
}

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { connectToDatabase, TrainingData } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { GridFSStorage } from '@/service/lib/gridfs';
import { PgClient } from '@/service/pg';
import { PgClient, updateDataFileId } from '@/service/pg';
import { PgDatasetTableName } from '@/constants/plugin';
import { FileStatusEnum } from '@/constants/dataset';
import { strIsLink } from '@fastgpt/common/tools/str';
@@ -35,8 +35,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
.join(' ')}
${searchText ? `AND source ILIKE '%${searchText}%'` : ''}`;
const [{ rows }, { rowCount: total }] = await Promise.all([
PgClient.query(`SELECT file_id, COUNT(*) AS count
let [{ rows }, { rowCount: total }] = await Promise.all([
PgClient.query<{ file_id: string; count: number }>(`SELECT file_id, COUNT(*) AS count
FROM ${PgDatasetTableName}
where ${pgWhere}
GROUP BY file_id
@@ -49,6 +49,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
`)
]);
// If fileId is invalid, reset it to manual
await Promise.all(
rows.map((row) => {
if (!strIsLink(row.file_id) && row.file_id.length !== 24) {
return updateDataFileId({
oldFileId: row.file_id,
userId,
newFileId: DatasetSpecialIdEnum.manual
});
}
})
);
// just filter link or fileData
rows = rows.filter((row) => strIsLink(row.file_id) || row.file_id.length === 24);
// find files
const gridFs = new GridFSStorage('dataset', userId);
const collection = gridFs.Collection();
@@ -96,6 +111,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const data = await Promise.all([
getSpecialData(),
...rows.map(async (row) => {
if (!row.file_id) return null;
// link data
if (strIsLink(row.file_id)) {
const { rows } = await PgClient.select(PgDatasetTableName, {

View File

@@ -47,7 +47,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
data: response?.[2]?.rows || []
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authBalanceByUid, authUser } from '@/service/utils/auth';
import { withNextCors } from '@/service/utils/tools';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import { pushGenerateVectorBill } from '@/service/common/bill/push';
type Props = {
@@ -54,29 +54,31 @@ export async function getVector({
}
// 获取 chatAPI
const chatAPI = getAIChatApi();
const ai = getAIApi();
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
const result = await ai.embeddings
.create(
{
model,
input
},
{
timeout: 60000,
...axiosConfig()
timeout: 60000
}
)
.then(async (res) => {
if (!res.data?.data?.[0]?.embedding) {
console.log(res.data);
if (!res.data) {
return Promise.reject('Embedding API 404');
}
if (!res?.data?.[0]?.embedding) {
console.log(res?.data);
// @ts-ignore
return Promise.reject(res.data?.err?.message || 'Embedding API Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: await Promise.all(res.data.data.map((item) => unityDimensional(item.embedding)))
tokenLen: res.usage.total_tokens || 0,
vectors: await Promise.all(res.data.map((item) => unityDimensional(item.embedding)))
};
});

View File

@@ -5,7 +5,7 @@ import { User } from '@/service/models/user';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { UserUpdateParams } from '@/types/user';
import { axiosConfig, getAIChatApi, openaiBaseUrl } from '@fastgpt/core/ai/config';
import { getAIApi, openaiBaseUrl } from '@fastgpt/core/ai/config';
/* update user info */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -22,20 +22,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const baseUrl = openaiAccount?.baseUrl || openaiBaseUrl;
openaiAccount.baseUrl = baseUrl;
const chatAPI = getAIChatApi(openaiAccount);
const ai = getAIApi(openaiAccount);
const response = await chatAPI.createChatCompletion(
{
model: 'gpt-3.5-turbo',
max_tokens: 1,
messages: [{ role: 'user', content: 'hi' }]
},
{
...axiosConfig(openaiAccount)
}
);
if (response?.data?.choices?.[0]?.message?.content === undefined) {
throw new Error(JSON.stringify(response?.data));
const response = await ai.chat.completions.create({
model: 'gpt-3.5-turbo',
max_tokens: 1,
messages: [{ role: 'user', content: 'hi' }]
});
if (response?.choices?.[0]?.message?.content === undefined) {
throw new Error('Key response is empty');
}
}

View File

@@ -6,6 +6,7 @@ import dynamic from 'next/dynamic';
import { defaultApp } from '@/constants/model';
import { useToast } from '@/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { feConfigs } from '@/store/static';
import Tabs from '@/components/Tabs';
import SideTabs from '@/components/SideTabs';
@@ -52,7 +53,9 @@ const AppDetail = ({ currentTab }: { currentTab: `${TabEnum}` }) => {
const tabList = useMemo(
() => [
{ label: '简易配置', id: TabEnum.basicEdit, icon: 'overviewLight' },
{ label: '高级编排', id: TabEnum.adEdit, icon: 'settingLight' },
...(feConfigs?.hide_app_flow
? []
: [{ label: '高级编排', id: TabEnum.adEdit, icon: 'settingLight' }]),
{ label: '外部使用', id: TabEnum.outLink, icon: 'shareLight' },
{ label: '对话日志', id: TabEnum.logs, icon: 'logsLight' },
{ label: '立即对话', id: TabEnum.startChat, icon: 'chat' }

View File

@@ -21,6 +21,7 @@ import { useRouter } from 'next/router';
import { appTemplates } from '@/constants/flow/ModuleTemplate';
import { useGlobalStore } from '@/store/global';
import { useRequest } from '@/hooks/useRequest';
import { feConfigs } from '@/store/static';
import Avatar from '@/components/Avatar';
import MyTooltip from '@/components/MyTooltip';
import MyModal from '@/components/MyModal';
@@ -74,10 +75,15 @@ const CreateModal = ({ onClose, onSuccess }: { onClose: () => void; onSuccess: (
const { mutate: onclickCreate, isLoading: creating } = useRequest({
mutationFn: async (data: FormType) => {
const template = appTemplates.find((item) => item.id === data.templateId);
if (!template) {
return Promise.reject('模板不存在');
}
return postCreateApp({
avatar: data.avatar,
name: data.name,
modules: appTemplates.find((item) => item.id === data.templateId)?.modules || []
type: template.type,
modules: template.modules || []
});
},
onSuccess(id: string) {
@@ -118,48 +124,52 @@ const CreateModal = ({ onClose, onSuccess }: { onClose: () => void; onSuccess: (
})}
/>
</Flex>
<Box mt={[4, 7]} mb={[0, 3]} color={'myGray.800'} fontWeight={'bold'}>
</Box>
<Grid
userSelect={'none'}
gridTemplateColumns={['repeat(1,1fr)', 'repeat(2,1fr)']}
gridGap={[2, 4]}
>
{appTemplates.map((item) => (
<Card
key={item.id}
border={theme.borders.base}
p={3}
borderRadius={'md'}
cursor={'pointer'}
boxShadow={'sm'}
{...(getValues('templateId') === item.id
? {
bg: 'myWhite.600'
}
: {
_hover: {
boxShadow: 'md'
}
})}
onClick={() => {
setValue('templateId', item.id);
setRefresh((state) => !state);
}}
{!feConfigs?.hide_app_flow && (
<>
<Box mt={[4, 7]} mb={[0, 3]} color={'myGray.800'} fontWeight={'bold'}>
</Box>
<Grid
userSelect={'none'}
gridTemplateColumns={['repeat(1,1fr)', 'repeat(2,1fr)']}
gridGap={[2, 4]}
>
<Flex alignItems={'center'}>
<Avatar src={item.avatar} borderRadius={'md'} w={'20px'} />
<Box ml={3} fontWeight={'bold'}>
{item.name}
</Box>
</Flex>
<Box fontSize={'sm'} mt={4}>
{item.intro}
</Box>
</Card>
))}
</Grid>
{appTemplates.map((item) => (
<Card
key={item.id}
border={theme.borders.base}
p={3}
borderRadius={'md'}
cursor={'pointer'}
boxShadow={'sm'}
{...(getValues('templateId') === item.id
? {
bg: 'myWhite.600'
}
: {
_hover: {
boxShadow: 'md'
}
})}
onClick={() => {
setValue('templateId', item.id);
setRefresh((state) => !state);
}}
>
<Flex alignItems={'center'}>
<Avatar src={item.avatar} borderRadius={'md'} w={'20px'} />
<Box ml={3} fontWeight={'bold'}>
{item.name}
</Box>
</Flex>
<Box fontSize={'sm'} mt={4}>
{item.intro}
</Box>
</Card>
))}
</Grid>
</>
)}
</ModalBody>
<ModalFooter>

View File

@@ -263,6 +263,10 @@ export function RawFileText({ fileId, filename = '', ...props }: RawFileTextProp
const { setLoading } = useGlobalStore();
const hasFile = useMemo(() => fileId && !datasetSpecialIds.includes(fileId), [fileId]);
const formatName = useMemo(
() => (filename.startsWith('kb') ? t(filename) : filename),
[filename, t]
);
return (
<MyTooltip label={hasFile ? t('file.Click to view file') || '' : ''} shouldWrapChildren={false}>
@@ -293,7 +297,7 @@ export function RawFileText({ fileId, filename = '', ...props }: RawFileTextProp
: {})}
{...props}
>
{t(filename)}
{formatName}
</Box>
</MyTooltip>
);

View File

@@ -0,0 +1,22 @@
import { isSpecialFileId } from '@fastgpt/core/dataset/utils';
import { GridFSStorage } from '../lib/gridfs';
import { Types } from 'mongoose';
export async function authFileIdValid(fileId?: string) {
if (!fileId) return true;
if (isSpecialFileId(fileId)) return true;
try {
// find file
const gridFs = new GridFSStorage('dataset', '');
const collection = gridFs.Collection();
const file = await collection.findOne(
{ _id: new Types.ObjectId(fileId) },
{ projection: { _id: 1 } }
);
if (!file) {
return Promise.reject('Invalid fileId');
}
} catch (error) {
return Promise.reject('Invalid fileId');
}
}

View File

@@ -17,19 +17,6 @@ export const TOKEN_ERROR_CODE: Record<number, string> = {
403: '登录状态无效,请重新登录'
};
export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法',
rate_limit_reached: 'API被限制请稍后再试',
'Bad Request': 'Bad Request~ 可能内容太多了',
'Bad Gateway': '网关异常,请重试'
};
export const openaiAccountError: Record<string, string> = {
insufficient_quota: 'API 余额不足',
invalid_api_key: 'openai 账号异常',
account_deactivated: '账号已停用',
invalid_request_error: '无效请求'
};
export const proxyError: Record<string, boolean> = {
ECONNABORTED: true,
ECONNRESET: true

View File

@@ -4,7 +4,7 @@ import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
@@ -58,8 +58,6 @@ export async function generateQA(): Promise<any> {
const startTime = Date.now();
const chatAPI = getAIChatApi();
// request LLM to get QA
const text = data.q;
const messages: ChatCompletionRequestMessage[] = [
@@ -73,19 +71,13 @@ export async function generateQA(): Promise<any> {
})
}
];
const { data: chatResponse } = await chatAPI.createChatCompletion(
{
model: global.qaModel.model,
temperature: 0.01,
messages,
stream: false
},
{
timeout: 480000,
...axiosConfig()
}
);
const ai = getAIApi(undefined, 480000);
const chatResponse = await ai.chat.completions.create({
model: global.qaModel.model,
temperature: 0.01,
messages,
stream: false
});
const answer = chatResponse.choices?.[0].message?.content;
const totalTokens = chatResponse.usage?.total_tokens || 0;

View File

@@ -23,7 +23,7 @@ const UserSchema = new Schema({
},
avatar: {
type: String,
default: '/icon/human.png'
default: '/icon/human.svg'
},
balance: {
type: Number,

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
@@ -105,27 +105,22 @@ async function functionCall({
required: ['type']
}
};
const chatAPI = getAIChatApi(user.openaiAccount);
const ai = getAIApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
model: cqModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig(user.openaiAccount)
}
);
const response = await ai.chat.completions.create({
model: cqModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
});
try {
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
const arg = JSON.parse(response.choices?.[0]?.message?.function_call?.arguments || '');
return {
arg,
tokens: response.data.usage?.total_tokens || 0
tokens: response.usage?.total_tokens || 0
};
} catch (error) {
console.log('Your model may not support function_call');
@@ -155,20 +150,14 @@ Human:${userChatInput}`
}
];
const chatAPI = getAIChatApi(user.openaiAccount);
const ai = getAIApi(user.openaiAccount, 480000);
const { data } = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
},
{
timeout: 480000,
...axiosConfig(user.openaiAccount)
}
);
const data = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { FlowModuleTypeEnum } from '@/constants/flow';
@@ -126,30 +126,25 @@ async function functionCall({
}
};
const chatAPI = getAIChatApi(user.openaiAccount);
const ai = getAIApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig(user.openaiAccount)
}
);
const response = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
});
const arg: Record<string, any> = (() => {
try {
return JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '{}');
return JSON.parse(response.choices?.[0]?.message?.function_call?.arguments || '{}');
} catch (error) {
return {};
}
})();
const tokens = response.data.usage?.total_tokens || 0;
const tokens = response.usage?.total_tokens || 0;
return {
tokens,
arg
@@ -181,20 +176,14 @@ Human: ${content}`
}
];
const chatAPI = getAIChatApi(user.openaiAccount);
const ai = getAIApi(user.openaiAccount, 480000);
const { data } = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
},
{
timeout: 480000,
...axiosConfig(user.openaiAccount)
}
);
const data = await ai.chat.completions.create({
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
});
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;

View File

@@ -3,9 +3,9 @@ import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/core/ai/type';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
@@ -20,9 +20,7 @@ import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { Readable } from 'stream';
import { responseWrite, responseWriteController } from '@/service/common/stream';
import { addLog } from '@/service/utils/tools';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
@@ -106,32 +104,25 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
// FastGPT temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const chatAPI = getAIChatApi(user.openaiAccount);
const ai = getAIApi(user.openaiAccount, 480000);
const response = await chatAPI.createChatCompletion(
{
model,
temperature,
max_tokens,
messages: [
...(modelConstantsData.defaultSystem
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystem
}
]
: []),
...messages
],
stream
},
{
timeout: 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(user.openaiAccount)
}
);
const response = await ai.chat.completions.create({
model,
temperature,
max_tokens,
messages: [
...(modelConstantsData.defaultSystem
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystem
}
]
: []),
...messages
],
stream
});
const { answerText, totalTokens, completeMessages } = await (async () => {
if (stream) {
@@ -139,7 +130,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const { answer } = await streamResponse({
res,
detail,
response
stream: response
});
// count tokens
const completeMessages = filterMessages.concat({
@@ -159,8 +150,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
completeMessages
};
} else {
const answer = response.data.choices?.[0].message?.content || '';
const totalTokens = response.data.usage?.total_tokens || 0;
const unStreamResponse = response as ChatCompletion;
const answer = unStreamResponse.choices?.[0].message?.content || '';
const totalTokens = unStreamResponse.usage?.total_tokens || 0;
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
@@ -208,7 +200,7 @@ function filterQuote({
obj: ChatRoleEnum.System,
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
...item,
index: `${index + 1}`
index: index + 1
})
}))
});
@@ -340,59 +332,40 @@ function targetResponse({
async function streamResponse({
res,
detail,
response
stream
}: {
res: NextApiResponse;
detail: boolean;
response: any;
stream: StreamChatType;
}) {
return new Promise<{ answer: string }>((resolve, reject) => {
const stream = response.data as Readable;
let answer = '';
const parseData = new SSEParseData();
const write = responseWriteController({
res,
readStream: stream
});
stream.on('data', (data) => {
if (res.closed) {
stream.destroy();
return resolve({ answer });
}
const parse = parseStreamChunk(data);
parse.forEach((item) => {
const { data } = parseData.parse(item);
if (!data || data === '[DONE]') return;
const content: string = data?.choices?.[0]?.delta?.content || '';
if (data.error) {
addLog.error(`SSE response`, data.error);
} else {
answer += content;
responseWrite({
write,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
}
});
});
stream.on('end', () => {
resolve({ answer });
});
stream.on('close', () => {
resolve({ answer });
});
stream.on('error', (err) => {
reject(err);
});
const write = responseWriteController({
res,
readStream: stream
});
let answer = '';
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
break;
}
const content = part.choices[0]?.delta?.content || '';
answer += content;
responseWrite({
write,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
}
if (!answer) {
return Promise.reject('Chat API is error or undefined');
}
return { answer };
}
function getHistoryPreview(completeMessages: ChatItemType[]) {

View File

@@ -46,7 +46,9 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select kb_id,id,q,a,source,file_id from ${PgDatasetTableName} where kb_id IN (${kbList
select id, kb_id, q, a, source, file_id, (vector <#> '[${
vectors[0]
}]') * -1 AS score from ${PgDatasetTableName} where kb_id IN (${kbList
.map((item) => `'${item.kbId}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
vectors[0]

View File

@@ -3,6 +3,7 @@ import type { QueryResultRow } from 'pg';
import { PgDatasetTableName } from '@/constants/plugin';
import { addLog } from './utils/tools';
import type { DatasetDataItemType } from '@/types/core/dataset/data';
import { DatasetSpecialIdEnum, datasetSpecialIdMap } from '@fastgpt/core/dataset/constant';
export const connectPg = async (): Promise<Pool> => {
if (global.pgClient) {
@@ -179,8 +180,13 @@ export const insertData2Dataset = ({
values: data.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'source', value: item.source?.slice(0, 200)?.trim() || '' },
{ key: 'file_id', value: item.file_id?.slice(0, 200)?.trim() || '' },
{
key: 'source',
value:
item.source?.slice(0, 200)?.trim() ||
datasetSpecialIdMap[DatasetSpecialIdEnum.manual].sourceName
},
{ key: 'file_id', value: item.file_id?.slice(0, 200)?.trim() || DatasetSpecialIdEnum.manual },
{ key: 'q', value: item.q.replace(/'/g, '"') },
{ key: 'a', value: item.a.replace(/'/g, '"') },
{ key: 'vector', value: `[${item.vector}]` }
@@ -188,6 +194,25 @@ export const insertData2Dataset = ({
});
};
/**
* Update data file_id
*/
export const updateDataFileId = async ({
oldFileId,
userId,
newFileId = DatasetSpecialIdEnum.manual
}: {
oldFileId: string;
userId: string;
newFileId?: string;
}) => {
await PgClient.update(PgDatasetTableName, {
where: [['file_id', oldFileId], 'AND', ['user_id', userId]],
values: [{ key: 'file_id', value: newFileId }]
});
return newFileId;
};
export async function initPg() {
try {
await connectPg();
@@ -203,10 +228,6 @@ export async function initPg() {
q TEXT NOT NULL,
a TEXT
);
CREATE INDEX IF NOT EXISTS modelData_userId_index ON ${PgDatasetTableName} USING HASH (user_id);
CREATE INDEX IF NOT EXISTS modelData_kb_id_index ON ${PgDatasetTableName} (kb_id);
CREATE INDEX IF NOT EXISTS modelData_fileId_index ON ${PgDatasetTableName} (file_id);
CREATE INDEX IF NOT EXISTS idx_model_data_md5_q_a_user_id_kb_id ON ${PgDatasetTableName} (md5(q), md5(a), user_id, kb_id);
`);
console.log('init pg successful');
} catch (error) {

View File

@@ -1,12 +1,6 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { NextApiResponse } from 'next';
import {
openaiError,
openaiAccountError,
proxyError,
ERROR_RESPONSE,
ERROR_ENUM
} from './errorCode';
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from './errorCode';
import { clearCookie, sseResponse, addLog } from './utils/tools';
export interface ResponseType<T = any> {
@@ -47,10 +41,8 @@ export const jsonRes = <T = any>(
msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) {
msg = openaiAccountError[error?.response?.data?.error?.code];
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
} else if (error?.error?.message) {
msg = error?.error?.message;
}
addLog.error(`response error: ${msg}`, error);
@@ -88,10 +80,8 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) {
msg = openaiAccountError[error?.response?.data?.error?.code];
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
} else if (error?.error?.message) {
msg = error?.error?.message;
}
addLog.error(`sse error: ${msg}`, error);

View File

@@ -22,12 +22,17 @@ export type AppListItemType = {
intro: string;
};
export type CreateAppParams = {
name?: string;
avatar?: string;
type?: `${AppTypeEnum}`;
modules: AppSchema['modules'];
};
export interface AppUpdateParams {
name?: string;
type?: `${AppTypeEnum}`;
avatar?: string;
intro?: string;
chat?: AppSchema['chat'];
share?: AppSchema['share'];
modules?: AppSchema['modules'];
}

View File

@@ -45,6 +45,7 @@ export type ShareChatType = InitShareChatResponse & {
export type QuoteItemType = PgDataItemType & {
kb_id: string;
score?: number;
};
// response data

View File

@@ -3,7 +3,7 @@ import type { NextApiResponse } from 'next';
import { RunningModuleItemType } from '@/types/app';
import { UserModelSchema } from '@/types/mongoSchema';
export type MessageItemType = ChatCompletionRequestMessage & { dataId?: string };
export type MessageItemType = ChatCompletionRequestMessage & { dataId?: string; content: string };
// module dispatch props type
export type ModuleDispatchProps<T> = {

View File

@@ -29,6 +29,7 @@ export type FeConfigsType = {
show_pay?: boolean;
show_openai_account?: boolean;
show_promotion?: boolean;
hide_app_flow?: boolean;
openAPIUrl?: string;
systemTitle?: string;
authorText?: string;

View File

@@ -25,7 +25,7 @@ export const adaptBill = (bill: BillSchema): UserBillType => {
};
export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[] => {
const roleMap: Record<`${ChatCompletionRequestMessageRoleEnum}`, `${ChatRoleEnum}`> = {
const roleMap = {
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System,

View File

@@ -1,7 +1,7 @@
/*
replace {{variable}} to value
*/
export function replaceVariable(text: string, obj: Record<string, string>) {
export function replaceVariable(text: string, obj: Record<string, string | number>) {
for (const key in obj) {
const val = obj[key];
if (typeof val !== 'string') continue;

View File

@@ -11,7 +11,14 @@ export const splitText2Chunks = ({ text, maxLen }: { text: string; maxLen: numbe
const overlapLen = Math.floor(maxLen * 0.25); // Overlap length
try {
const splitTexts = text.split(/(?<=[。!?;.!?;\n])/g);
const tempMarker = 'SPLIT_HERE';
text = text.replace(/\n{3,}/g, '\n');
text = text.replace(/\s/g, ' ');
text = text.replace('\n\n', '');
const splitTexts = text
.replace(/([。!?;]|\.\s|!\s|\?\s|;\s|\n)/g, `$1${tempMarker}`)
.split(tempMarker)
.filter((part) => part);
const chunks: string[] = [];
let preChunk = '';

View File

@@ -9,7 +9,7 @@ export async function chunksUpload({
mode,
chunks,
prompt,
rate = 50,
rate = 150,
onUploading
}: {
kbId: string;