mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
4.6.3-alpha1 (#529)
This commit is contained in:
3
packages/global/common/string/time.ts
Normal file
3
packages/global/common/string/time.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
export const formatTime2YMDHM = (time: Date) => dayjs(time).format('YYYY-MM-DD HH:mm');
|
@@ -77,7 +77,7 @@ export const appModules2Form = ({
|
||||
);
|
||||
defaultAppForm.aiSettings.quotePrompt = findInputValueByKey(
|
||||
module.inputs,
|
||||
ModuleInputKeyEnum.aiChatQuoteTemplate
|
||||
ModuleInputKeyEnum.aiChatQuotePrompt
|
||||
);
|
||||
} else if (module.flowType === FlowNodeTypeEnum.datasetSearchNode) {
|
||||
defaultAppForm.dataset.datasets = findInputValueByKey(
|
||||
|
2
packages/global/core/chat/type.d.ts
vendored
2
packages/global/core/chat/type.d.ts
vendored
@@ -4,6 +4,7 @@ import { ChatRoleEnum, ChatSourceEnum } from './constants';
|
||||
import { FlowNodeTypeEnum } from '../module/node/constant';
|
||||
import { ModuleOutputKeyEnum } from '../module/constants';
|
||||
import { AppSchema } from '../app/type';
|
||||
import { DatasetSearchModeEnum } from '../dataset/constant';
|
||||
|
||||
export type ChatSchema = {
|
||||
_id: string;
|
||||
@@ -94,6 +95,7 @@ export type moduleDispatchResType = {
|
||||
// dataset search
|
||||
similarity?: number;
|
||||
limit?: number;
|
||||
searchMode?: `${DatasetSearchModeEnum}`;
|
||||
|
||||
// cq
|
||||
cqList?: ClassifyQuestionAgentItemType[];
|
||||
|
@@ -1,5 +1,6 @@
|
||||
export const PgDatasetTableName = 'modeldata';
|
||||
|
||||
/* ------------ dataset -------------- */
|
||||
export enum DatasetTypeEnum {
|
||||
folder = 'folder',
|
||||
dataset = 'dataset'
|
||||
@@ -14,28 +15,45 @@ export const DatasetTypeMap = {
|
||||
}
|
||||
};
|
||||
|
||||
/* ------------ collection -------------- */
|
||||
export enum DatasetCollectionTypeEnum {
|
||||
file = 'file',
|
||||
folder = 'folder',
|
||||
file = 'file',
|
||||
link = 'link',
|
||||
virtual = 'virtual'
|
||||
}
|
||||
|
||||
export const DatasetCollectionTypeMap = {
|
||||
[DatasetCollectionTypeEnum.file]: {
|
||||
name: 'dataset.file'
|
||||
},
|
||||
[DatasetCollectionTypeEnum.folder]: {
|
||||
name: 'dataset.folder'
|
||||
name: 'core.dataset.folder'
|
||||
},
|
||||
[DatasetCollectionTypeEnum.file]: {
|
||||
name: 'core.dataset.file'
|
||||
},
|
||||
[DatasetCollectionTypeEnum.link]: {
|
||||
name: 'dataset.link'
|
||||
name: 'core.dataset.link'
|
||||
},
|
||||
[DatasetCollectionTypeEnum.virtual]: {
|
||||
name: 'dataset.Virtual File'
|
||||
name: 'core.dataset.Virtual File'
|
||||
}
|
||||
};
|
||||
export enum DatasetCollectionTrainingModeEnum {
|
||||
manual = 'manual',
|
||||
chunk = 'chunk',
|
||||
qa = 'qa'
|
||||
}
|
||||
export const DatasetCollectionTrainingTypeMap = {
|
||||
[DatasetCollectionTrainingModeEnum.manual]: {
|
||||
label: 'core.dataset.collection.training.type manual'
|
||||
},
|
||||
[DatasetCollectionTrainingModeEnum.chunk]: {
|
||||
label: 'core.dataset.collection.training.type chunk'
|
||||
},
|
||||
[DatasetCollectionTrainingModeEnum.qa]: {
|
||||
label: 'core.dataset.collection.training.type qa'
|
||||
}
|
||||
};
|
||||
|
||||
/* ------------ data -------------- */
|
||||
export enum DatasetDataIndexTypeEnum {
|
||||
chunk = 'chunk',
|
||||
qa = 'qa',
|
||||
@@ -61,31 +79,22 @@ export const DatasetDataIndexTypeMap = {
|
||||
}
|
||||
};
|
||||
|
||||
/* ------------ training -------------- */
|
||||
export enum TrainingModeEnum {
|
||||
'chunk' = 'chunk',
|
||||
'qa' = 'qa'
|
||||
// 'hypothetical' = 'hypothetical',
|
||||
// 'summary' = 'summary',
|
||||
// 'multipleIndex' = 'multipleIndex'
|
||||
chunk = 'chunk',
|
||||
qa = 'qa'
|
||||
}
|
||||
|
||||
export const TrainingTypeMap = {
|
||||
[TrainingModeEnum.chunk]: {
|
||||
name: 'chunk'
|
||||
label: 'core.dataset.training.type chunk'
|
||||
},
|
||||
[TrainingModeEnum.qa]: {
|
||||
name: 'qa'
|
||||
label: 'core.dataset.training.type qa'
|
||||
}
|
||||
// [TrainingModeEnum.hypothetical]: {
|
||||
// name: 'hypothetical'
|
||||
// },
|
||||
// [TrainingModeEnum.summary]: {
|
||||
// name: 'summary'
|
||||
// },
|
||||
// [TrainingModeEnum.multipleIndex]: {
|
||||
// name: 'multipleIndex'
|
||||
// }
|
||||
};
|
||||
|
||||
/* ------------ search -------------- */
|
||||
export enum DatasetSearchModeEnum {
|
||||
embedding = 'embedding',
|
||||
embeddingReRank = 'embeddingReRank',
|
||||
|
1
packages/global/core/dataset/controller.d.ts
vendored
1
packages/global/core/dataset/controller.d.ts
vendored
@@ -5,6 +5,7 @@ export type CreateDatasetDataProps = {
|
||||
tmbId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
chunkIndex?: number;
|
||||
q: string;
|
||||
a?: string;
|
||||
indexes?: Omit<DatasetDataIndexItemType, 'dataId'>[];
|
||||
|
15
packages/global/core/dataset/type.d.ts
vendored
15
packages/global/core/dataset/type.d.ts
vendored
@@ -27,19 +27,18 @@ export type DatasetSchemaType = {
|
||||
|
||||
export type DatasetCollectionSchemaType = {
|
||||
_id: string;
|
||||
userId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
datasetId: string;
|
||||
parentId?: string;
|
||||
name: string;
|
||||
type: `${DatasetCollectionTypeEnum}`;
|
||||
createTime: Date;
|
||||
updateTime: Date;
|
||||
metadata: {
|
||||
fileId?: string;
|
||||
rawLink?: string;
|
||||
pgCollectionId?: string;
|
||||
};
|
||||
trainingType: `${TrainingModeEnum}`;
|
||||
chunkSize: number;
|
||||
fileId?: string;
|
||||
rawLink?: string;
|
||||
};
|
||||
|
||||
export type DatasetDataIndexItemType = {
|
||||
@@ -57,6 +56,8 @@ export type DatasetDataSchemaType = {
|
||||
collectionId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
chunkIndex: number;
|
||||
updateTime: Date;
|
||||
q: string; // large chunks or question
|
||||
a: string; // answer or custom content
|
||||
fullTextToken: string;
|
||||
@@ -78,6 +79,7 @@ export type DatasetTrainingSchemaType = {
|
||||
prompt: string;
|
||||
q: string;
|
||||
a: string;
|
||||
chunkIndex: number;
|
||||
indexes: Omit<DatasetDataIndexItemType, 'dataId'>[];
|
||||
};
|
||||
|
||||
@@ -101,6 +103,7 @@ export type DatasetCollectionItemType = CollectionWithDatasetType & {
|
||||
canWrite: boolean;
|
||||
sourceName: string;
|
||||
sourceId?: string;
|
||||
file?: DatasetFileSchema;
|
||||
};
|
||||
|
||||
/* ================= data ===================== */
|
||||
|
Reference in New Issue
Block a user