mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-07 16:30:40 +00:00
v4.6-4 (#473)
This commit is contained in:
@@ -34,10 +34,10 @@ const ImportData = ({
|
||||
const theme = useTheme();
|
||||
const { datasetDetail } = useDatasetStore();
|
||||
const [importType, setImportType] = useState<`${ImportTypeEnum}`>(ImportTypeEnum.chunk);
|
||||
const vectorModel = datasetDetail.vectorModel;
|
||||
const agentModel = datasetDetail.agentModel;
|
||||
|
||||
const typeMap = useMemo(() => {
|
||||
const vectorModel = datasetDetail.vectorModel;
|
||||
const qaModel = qaModelList[0];
|
||||
const map = {
|
||||
[ImportTypeEnum.chunk]: {
|
||||
defaultChunkLen: vectorModel?.defaultToken || 500,
|
||||
@@ -45,8 +45,8 @@ const ImportData = ({
|
||||
mode: TrainingModeEnum.chunk
|
||||
},
|
||||
[ImportTypeEnum.qa]: {
|
||||
defaultChunkLen: qaModel?.maxContext * 0.5 || 8000,
|
||||
unitPrice: qaModel?.price || 3,
|
||||
defaultChunkLen: agentModel?.maxContext * 0.6 || 9000,
|
||||
unitPrice: agentModel?.price || 3,
|
||||
mode: TrainingModeEnum.qa
|
||||
},
|
||||
[ImportTypeEnum.csv]: {
|
||||
@@ -56,7 +56,13 @@ const ImportData = ({
|
||||
}
|
||||
};
|
||||
return map[importType];
|
||||
}, [datasetDetail.vectorModel, importType]);
|
||||
}, [
|
||||
agentModel?.maxContext,
|
||||
agentModel?.price,
|
||||
importType,
|
||||
vectorModel?.defaultToken,
|
||||
vectorModel?.price
|
||||
]);
|
||||
|
||||
const TitleStyle: BoxProps = {
|
||||
fontWeight: 'bold',
|
||||
@@ -104,8 +110,10 @@ const ImportData = ({
|
||||
|
||||
<Provider
|
||||
{...typeMap}
|
||||
vectorModel={vectorModel.model}
|
||||
agentModel={agentModel.model}
|
||||
datasetId={datasetDetail._id}
|
||||
importType={importType}
|
||||
datasetId={datasetId}
|
||||
parentId={parentId}
|
||||
onUploadSuccess={uploadSuccess}
|
||||
>
|
||||
|
@@ -90,6 +90,8 @@ const Provider = ({
|
||||
parentId,
|
||||
unitPrice,
|
||||
mode,
|
||||
vectorModel,
|
||||
agentModel,
|
||||
defaultChunkLen = 500,
|
||||
importType,
|
||||
onUploadSuccess,
|
||||
@@ -99,6 +101,8 @@ const Provider = ({
|
||||
parentId: string;
|
||||
unitPrice: number;
|
||||
mode: `${TrainingModeEnum}`;
|
||||
vectorModel: string;
|
||||
agentModel: string;
|
||||
defaultChunkLen: number;
|
||||
importType: `${ImportTypeEnum}`;
|
||||
onUploadSuccess: () => void;
|
||||
@@ -132,7 +136,9 @@ const Provider = ({
|
||||
const chunks = file.chunks;
|
||||
// create training bill
|
||||
const billId = await postCreateTrainingBill({
|
||||
name: t('dataset.collections.Create Training Data', { filename: file.filename })
|
||||
name: t('dataset.collections.Create Training Data', { filename: file.filename }),
|
||||
vectorModel,
|
||||
agentModel
|
||||
});
|
||||
// create a file collection and training bill
|
||||
const collectionId = await postDatasetCollection({
|
||||
|
@@ -13,8 +13,8 @@ const fileExtension = '.txt, .doc, .docx, .pdf, .md';
|
||||
|
||||
const QAImport = () => {
|
||||
const { datasetDetail } = useDatasetStore();
|
||||
const vectorModel = datasetDetail.vectorModel;
|
||||
const unitPrice = vectorModel?.price || 0.2;
|
||||
const agentModel = datasetDetail.agentModel;
|
||||
const unitPrice = agentModel?.price || 3;
|
||||
|
||||
const {
|
||||
successChunks,
|
||||
|
Reference in New Issue
Block a user