4.8.9 test fix (#2291)

* perf: read file icon

* perf:icon

* fix: i18n

* perf: hide pro api

* perf: upload expired time

* perf: upload file frequency limit

* perf: upload file ux

* perf: input file tip

* perf: qa custom chunk size

* feat: dataset openapi

* fix: auth dataset list

* fix: openapi doc

* perf: zero temperature change to 0.01

* perf: read file prompt

* perf: read file prompt

* perf: free plan tip

* feat: cron job usage
This commit is contained in:
Archer
2024-08-08 10:07:24 +08:00
committed by GitHub
parent 7b388b287a
commit 3ba9c21828
42 changed files with 822 additions and 813 deletions

View File

@@ -37,11 +37,12 @@ type DatasetImportContextType = {
setSources: React.Dispatch<React.SetStateAction<ImportSourceItemType[]>>;
} & TrainingFiledType;
type ChunkSizeFieldType = 'embeddingChunkSize';
type ChunkSizeFieldType = 'embeddingChunkSize' | 'qaChunkSize';
export type ImportFormType = {
mode: TrainingModeEnum;
way: ImportProcessWayEnum;
embeddingChunkSize: number;
qaChunkSize: number;
customSplitChar: string;
qaPrompt: string;
webSelector: string;
@@ -147,7 +148,6 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
steps
});
// -----
const vectorModel = datasetDetail.vectorModel;
const agentModel = datasetDetail.agentModel;
@@ -156,6 +156,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
mode: TrainingModeEnum.chunk,
way: ImportProcessWayEnum.auto,
embeddingChunkSize: vectorModel?.defaultToken || 512,
qaChunkSize: Math.min(agentModel.maxResponse * 1, agentModel.maxContext * 0.7),
customSplitChar: '',
qaPrompt: Prompt_AgentQA.description,
webSelector: ''
@@ -168,6 +169,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
const mode = processParamsForm.watch('mode');
const way = processParamsForm.watch('way');
const embeddingChunkSize = processParamsForm.watch('embeddingChunkSize');
const qaChunkSize = processParamsForm.watch('qaChunkSize');
const customSplitChar = processParamsForm.watch('customSplitChar');
const modeStaticParams: Record<TrainingModeEnum, TrainingFiledType> = {
@@ -180,7 +182,7 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
showChunkInput: false,
showPromptInput: false,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.Auto mode Estimated Price Tips', {
priceTip: t('common:core.dataset.import.Auto mode Estimated Price Tips', {
price: agentModel.charsPointsPrice
}),
uploadRate: 100
@@ -195,22 +197,22 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
showChunkInput: true,
showPromptInput: false,
charsPointsPrice: vectorModel.charsPointsPrice,
priceTip: t('core.dataset.import.Embedding Estimated Price Tips', {
priceTip: t('common:core.dataset.import.Embedding Estimated Price Tips', {
price: vectorModel.charsPointsPrice
}),
uploadRate: 150
},
[TrainingModeEnum.qa]: {
chunkSizeField: 'embeddingChunkSize' as ChunkSizeFieldType,
chunkSizeField: 'qaChunkSize' as ChunkSizeFieldType,
chunkOverlapRatio: 0,
maxChunkSize: 4096,
minChunkSize: 512,
autoChunkSize: agentModel.maxContext * 0.55 || 6000,
chunkSize: embeddingChunkSize || agentModel.maxContext * 0.55 || 6000,
maxChunkSize: Math.min(agentModel.maxResponse * 4, agentModel.maxContext * 0.7),
minChunkSize: 4000,
autoChunkSize: Math.min(agentModel.maxResponse * 1, agentModel.maxContext * 0.7),
chunkSize: qaChunkSize,
showChunkInput: true,
showPromptInput: true,
charsPointsPrice: agentModel.charsPointsPrice,
priceTip: t('core.dataset.import.QA Estimated Price Tips', {
priceTip: t('common:core.dataset.import.QA Estimated Price Tips', {
price: agentModel?.charsPointsPrice
}),
uploadRate: 30
@@ -228,7 +230,6 @@ const DatasetImportContextProvider = ({ children }: { children: React.ReactNode
customSplitChar
}
};
const chunkSize = wayStaticPrams[way].chunkSize;
const contextValue = {

View File

@@ -42,7 +42,8 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
showChunkInput,
showPromptInput,
maxChunkSize,
priceTip
priceTip,
chunkSize
} = useContextSelector(DatasetImportContext, (v) => v);
const { getValues, setValue, register, watch } = processParamsForm;
const { toast } = useToast();
@@ -74,8 +75,15 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
);
return (
<Box h={'100%'} display={['block', 'flex']} gap={5} fontSize={'sm'}>
<Box flex={'1 0 0'} minW={['auto', '540px']} maxW={'600px'}>
<Box h={'100%'} display={['block', 'flex']} fontSize={'sm'}>
<Box
flex={'1 0 0'}
minW={['auto', '540px']}
maxW={'600px'}
h={['auto', '100%']}
overflow={'auto'}
pr={[0, 3]}
>
<Flex alignItems={'center'}>
<MyIcon name={'common/settingLight'} w={'20px'} />
<Box fontSize={'md'}>{t('common:core.dataset.import.Data process params')}</Box>
@@ -138,7 +146,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
}}
>
<MyTooltip
label={t('core.dataset.import.Chunk Range', {
label={t('common:core.dataset.import.Chunk Range', {
min: minChunkSize,
max: maxChunkSize
})}
@@ -148,6 +156,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
step={100}
min={minChunkSize}
max={maxChunkSize}
value={chunkSize}
onChange={(e) => {
setValue(chunkSizeField, +e);
}}
@@ -279,7 +288,7 @@ function DataProcess({ showPreviewChunks = true }: { showPreviewChunks: boolean
</Button>
</Flex>
</Box>
<Box flex={'1 0 0'} w={['auto', '0']}>
<Box flex={'1 0 0'} w={['auto', '0']} h={['auto', '100%']} overflow={'auto'} pl={[0, 3]}>
<Preview showPreviewChunks={showPreviewChunks} />
</Box>