mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-30 02:12:38 +00:00
4.8.9 test fix (#2291)
* perf: read file icon * perf:icon * fix: i18n * perf: hide pro api * perf: upload expired time * perf: upload file frequency limit * perf: upload file ux * perf: input file tip * perf: qa custom chunk size * feat: dataset openapi * fix: auth dataset list * fix: openapi doc * perf: zero temperature change to 0.01 * perf: read file prompt * perf: read file prompt * perf: free plan tip * feat: cron job usage
This commit is contained in:
@@ -430,10 +430,6 @@ const PlanUsage = () => {
|
||||
|
||||
{isFreeTeam ? (
|
||||
<>
|
||||
<Flex mt="2" color={'#485264'} fontSize="sm">
|
||||
<Box>{t('common:support.wallet.Plan reset time')}:</Box>
|
||||
<Box ml={2}>{formatTime2YMD(standardPlan?.expiredTime)}</Box>
|
||||
</Flex>
|
||||
<Box mt="2" color={'#485264'} fontSize="sm">
|
||||
{t('common:info.free_plan')}
|
||||
</Box>
|
||||
|
@@ -7,8 +7,21 @@ import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { createFileToken } from '@fastgpt/service/support/permission/controller';
|
||||
import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
||||
import { addLog } from '@fastgpt/service/common/system/log';
|
||||
import { authFrequencyLimit } from '@/service/common/frequencyLimit/api';
|
||||
import { addSeconds } from 'date-fns';
|
||||
|
||||
const authUploadLimit = (tmbId: string) => {
|
||||
if (!global.feConfigs.uploadFileMaxAmount) return;
|
||||
return authFrequencyLimit({
|
||||
eventId: `${tmbId}-uploadfile`,
|
||||
maxAmount: global.feConfigs.uploadFileMaxAmount * 2,
|
||||
expiredTime: addSeconds(new Date(), 30) // 30s
|
||||
});
|
||||
};
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
const start = Date.now();
|
||||
/* Creates the multer uploader */
|
||||
const upload = getUploadModel({
|
||||
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
|
||||
@@ -16,9 +29,13 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
const filePaths: string[] = [];
|
||||
|
||||
try {
|
||||
const { teamId, tmbId } = await authCert({ req, authToken: true });
|
||||
|
||||
await authUploadLimit(tmbId);
|
||||
|
||||
const { file, bucketName, metadata } = await upload.doUpload(req, res);
|
||||
|
||||
const { teamId, tmbId } = await authCert({ req, authToken: true });
|
||||
addLog.info(`Upload file success ${file.originalname}, cost ${Date.now() - start}ms`);
|
||||
|
||||
if (!bucketName) {
|
||||
throw new Error('bucketName is empty');
|
||||
@@ -34,22 +51,15 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
metadata: metadata
|
||||
});
|
||||
|
||||
jsonRes<{
|
||||
fileId: string;
|
||||
previewUrl: string;
|
||||
}>(res, {
|
||||
data: {
|
||||
fileId,
|
||||
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken(
|
||||
{
|
||||
bucketName,
|
||||
teamId,
|
||||
tmbId,
|
||||
fileId
|
||||
}
|
||||
)}`
|
||||
}
|
||||
});
|
||||
return {
|
||||
fileId,
|
||||
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken({
|
||||
bucketName,
|
||||
teamId,
|
||||
tmbId,
|
||||
fileId
|
||||
})}`
|
||||
};
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
|
@@ -36,6 +36,7 @@ async function handler(req: ApiRequestProps<GetDatasetListBody>) {
|
||||
return await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: ReadPermissionVal,
|
||||
datasetId: parentId
|
||||
});
|
||||
|
@@ -37,11 +37,12 @@ type DatasetImportContextType = {
|
||||
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
|
||||
} & TrainingFiledType;
|
||||
|
||||
type ChunkSizeFieldType = 'embeddingChunkSize';
|
||||
type ChunkSizeFieldType = 'embeddingChunkSize' | 'qaChunkSize';
|
||||
export type ImportFormType = {
|
||||
mode: TrainingModeEnum;
|
||||
way: ImportProcessWayEnum;
|
||||
embeddingChunkSize: number;
|
||||
qaChunkSize: number;
|
||||
customSplitChar: string;
|
||||
qaPrompt: string;
|
||||
webSelector: string;
|
||||
@@ -147,7 +148,6 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
steps
|
||||
});
|
||||
|
||||
// -----
|
||||
const vectorModel = datasetDetail.vectorModel;
|
||||
const agentModel = datasetDetail.agentModel;
|
||||
|
||||
@@ -156,6 +156,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
mode: TrainingModeEnum.chunk,
|
||||
way: ImportProcessWayEnum.auto,
|
||||
embeddingChunkSize: vectorModel?.defaultToken || 512,
|
||||
qaChunkSize: Math.min(agentModel.maxResponse * 1, agentModel.maxContext * 0.7),
|
||||
customSplitChar: '',
|
||||
qaPrompt: Prompt_AgentQA.description,
|
||||
webSelector: ''
|
||||
@@ -168,6 +169,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
const mode = processParamsForm.watch('mode');
|
||||
const way = processParamsForm.watch('way');
|
||||
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
|
||||
const qaChunkSize = processParamsForm.watch('qaChunkSize');
|
||||
const customSplitChar = processParamsForm.watch('customSplitChar');
|
||||
|
||||
const modeStaticParams: Record<TrainingModeEnum, TrainingFiledType> = {
|
||||
@@ -180,7 +182,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
showChunkInput: false,
|
||||
showPromptInput: false,
|
||||
charsPointsPrice: agentModel.charsPointsPrice,
|
||||
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
|
||||
priceTip: t('common:core.dataset.import.Auto mode Estimated Price Tips', {
|
||||
price: agentModel.charsPointsPrice
|
||||
}),
|
||||
uploadRate: 100
|
||||
@@ -195,22 +197,22 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
showChunkInput: true,
|
||||
showPromptInput: false,
|
||||
charsPointsPrice: vectorModel.charsPointsPrice,
|
||||
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
|
||||
priceTip: t('common:core.dataset.import.Embedding Estimated Price Tips', {
|
||||
price: vectorModel.charsPointsPrice
|
||||
}),
|
||||
uploadRate: 150
|
||||
},
|
||||
[TrainingModeEnum.qa]: {
|
||||
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
|
||||
chunkSizeField: 'qaChunkSize' as ChunkSizeFieldType,
|
||||
chunkOverlapRatio: 0,
|
||||
maxChunkSize: 4096,
|
||||
minChunkSize: 512,
|
||||
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
|
||||
chunkSize: embeddingChunkSize || agentModel.maxContext * 0.55 || 6000,
|
||||
maxChunkSize: Math.min(agentModel.maxResponse * 4, agentModel.maxContext * 0.7),
|
||||
minChunkSize: 4000,
|
||||
autoChunkSize: Math.min(agentModel.maxResponse * 1, agentModel.maxContext * 0.7),
|
||||
chunkSize: qaChunkSize,
|
||||
showChunkInput: true,
|
||||
showPromptInput: true,
|
||||
charsPointsPrice: agentModel.charsPointsPrice,
|
||||
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
|
||||
priceTip: t('common:core.dataset.import.QA Estimated Price Tips', {
|
||||
price: agentModel?.charsPointsPrice
|
||||
}),
|
||||
uploadRate: 30
|
||||
@@ -228,7 +230,6 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
|
||||
customSplitChar
|
||||
}
|
||||
};
|
||||
|
||||
const chunkSize = wayStaticPrams[way].chunkSize;
|
||||
|
||||
const contextValue = {
|
||||
|
@@ -42,7 +42,8 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
|
||||
showChunkInput,
|
||||
showPromptInput,
|
||||
maxChunkSize,
|
||||
priceTip
|
||||
priceTip,
|
||||
chunkSize
|
||||
} = useContextSelector(DatasetImportContext, (v) => v);
|
||||
const { getValues, setValue, register, watch } = processParamsForm;
|
||||
const { toast } = useToast();
|
||||
@@ -74,8 +75,15 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
|
||||
);
|
||||
|
||||
return (
|
||||
<Box h={'100%'} display={['block', 'flex']} gap={5} fontSize={'sm'}>
|
||||
<Box flex={'1 0 0'} minW={['auto', '540px']} maxW={'600px'}>
|
||||
<Box h={'100%'} display={['block', 'flex']} fontSize={'sm'}>
|
||||
<Box
|
||||
flex={'1 0 0'}
|
||||
minW={['auto', '540px']}
|
||||
maxW={'600px'}
|
||||
h={['auto', '100%']}
|
||||
overflow={'auto'}
|
||||
pr={[0, 3]}
|
||||
>
|
||||
<Flex alignItems={'center'}>
|
||||
<MyIcon name={'common/settingLight'} w={'20px'} />
|
||||
<Box fontSize={'md'}>{t('common:core.dataset.import.Data process params')}</Box>
|
||||
@@ -138,7 +146,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
|
||||
}}
|
||||
>
|
||||
<MyTooltip
|
||||
label={t('core.dataset.import.Chunk Range', {
|
||||
label={t('common:core.dataset.import.Chunk Range', {
|
||||
min: minChunkSize,
|
||||
max: maxChunkSize
|
||||
})}
|
||||
@@ -148,6 +156,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
|
||||
step={100}
|
||||
min={minChunkSize}
|
||||
max={maxChunkSize}
|
||||
value={chunkSize}
|
||||
onChange={(e) => {
|
||||
setValue(chunkSizeField, +e);
|
||||
}}
|
||||
@@ -279,7 +288,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
|
||||
</Button>
|
||||
</Flex>
|
||||
</Box>
|
||||
<Box flex={'1 0 0'} w={['auto', '0']}>
|
||||
<Box flex={'1 0 0'} w={['auto', '0']} h={['auto', '100%']} overflow={'auto'} pl={[0, 3]}>
|
||||
<Preview showPreviewChunks={showPreviewChunks} />
|
||||
</Box>
|
||||
|
||||
|
Reference in New Issue
Block a user