Revert "sub plan page (#885)" (#886)

This reverts commit 443ad37b6a.
This commit is contained in:
Archer
2024-02-23 17:48:15 +08:00
committed by GitHub
parent 443ad37b6a
commit fd9b6291af
246 changed files with 4281 additions and 6286 deletions

View File

@@ -6,7 +6,8 @@ export type LLMModelItemType = {
quoteMaxToken: number;
maxTemperature: number;
charsPointsPrice: number; // 1k chars=n points
inputPrice: number;
outputPrice: number;
censor?: boolean;
vision?: boolean;
@@ -26,7 +27,8 @@ export type VectorModelItemType = {
model: string;
name: string;
defaultToken: number;
charsPointsPrice: number;
inputPrice: number;
outputPrice: number;
maxToken: number;
weight: number;
hidden?: boolean;
@@ -36,7 +38,8 @@ export type VectorModelItemType = {
export type ReRankModelItemType = {
model: string;
name: string;
charsPointsPrice: number;
inputPrice: number;
outputPrice?: number;
requestUrl?: string;
requestAuth?: string;
};
@@ -44,12 +47,14 @@ export type ReRankModelItemType = {
export type AudioSpeechModelType = {
model: string;
name: string;
charsPointsPrice: number;
inputPrice: number;
outputPrice?: number;
voices: { label: string; value: string; bufferId: string }[];
};
export type WhisperModelType = {
model: string;
name: string;
charsPointsPrice: number; // 60s = n points
inputPrice: number;
outputPrice?: number;
};

View File

@@ -8,7 +8,8 @@ export const defaultQAModels: LLMModelItemType[] = [
maxResponse: 16000,
quoteMaxToken: 13000,
maxTemperature: 1.2,
charsPointsPrice: 0,
inputPrice: 0,
outputPrice: 0,
censor: false,
vision: false,
datasetProcess: true,
@@ -25,7 +26,8 @@ export const defaultVectorModels: VectorModelItemType[] = [
{
model: 'text-embedding-ada-002',
name: 'Embedding-2',
charsPointsPrice: 0,
inputPrice: 0,
outputPrice: 0,
defaultToken: 500,
maxToken: 3000,
weight: 100

View File

@@ -17,7 +17,6 @@ export interface AppUpdateParams {
intro?: string;
modules?: AppSchema['modules'];
permission?: AppSchema['permission'];
teamTags?: AppSchema['teamTags'];
}
export type FormatForm2ModulesProps = {

View File

@@ -5,7 +5,7 @@ import type { AIChatModuleProps, DatasetModuleProps } from '../module/node/type.
import { VariableInputEnum } from '../module/constants';
import { SelectedDatasetType } from '../module/api';
import { DatasetSearchModeEnum } from '../dataset/constants';
import { TeamTagsSchema as TeamTagsSchemaType } from '@fastgpt/global/support/user/team/type.d';
export interface AppSchema {
_id: string;
userId: string;
@@ -20,7 +20,6 @@ export interface AppSchema {
modules: ModuleItemType[];
permission: `${PermissionTypeEnum}`;
inited?: boolean;
teamTags: [string];
}
export type AppListItemType = {

View File

@@ -27,8 +27,7 @@ export enum ChatSourceEnum {
test = 'test',
online = 'online',
share = 'share',
api = 'api',
team = 'team'
api = 'api'
}
export const ChatSourceMap = {
[ChatSourceEnum.test]: {
@@ -42,9 +41,6 @@ export const ChatSourceMap = {
},
[ChatSourceEnum.api]: {
name: 'core.chat.logs.api'
},
[ChatSourceEnum.team]: {
name: 'core.chat.logs.team'
}
};

View File

@@ -4,7 +4,6 @@ import { ChatRoleEnum, ChatSourceEnum, ChatStatusEnum } from './constants';
import { FlowNodeTypeEnum } from '../module/node/constant';
import { ModuleOutputKeyEnum } from '../module/constants';
import { AppSchema } from '../app/type';
import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d';
import { DatasetSearchModeEnum } from '../dataset/constants';
export type ChatSchema = {
@@ -26,22 +25,6 @@ export type ChatSchema = {
metadata?: Record<string, any>;
};
export type teamInfoType = {
avatar: string;
balance: number;
createTime: string;
maxSize: number;
name: string;
ownerId: string;
tagsUrl: string;
_id: string;
}
export type chatAppListSchema = {
apps: Array<AppType>,
teamInfo: teamInfoSchema
}
export type ChatWithAppSchema = Omit<ChatSchema, 'appId'> & {
appId: AppSchema;
};
@@ -105,15 +88,15 @@ export type ChatHistoryItemType = HistoryItemType & {
export type moduleDispatchResType = {
// common
moduleLogo?: string;
price?: number;
runningTime?: number;
query?: string;
textOutput?: string;
// bill
inputTokens?: number;
outputTokens?: number;
charsLength?: number;
model?: string;
query?: string;
contextTotalLen?: number;
totalPoints?: number;
textOutput?: string;
// chat
temperature?: number;
@@ -128,7 +111,6 @@ export type moduleDispatchResType = {
searchUsingReRank?: boolean;
extensionModel?: string;
extensionResult?: string;
extensionCharsLength?: number;
// cq
cqList?: ClassifyQuestionAgentItemType[];

View File

@@ -71,6 +71,30 @@ export const DatasetCollectionSyncResultMap = {
};
/* ------------ data -------------- */
export enum DatasetDataIndexTypeEnum {
chunk = 'chunk',
qa = 'qa',
summary = 'summary',
hypothetical = 'hypothetical',
custom = 'custom'
}
export const DatasetDataIndexTypeMap = {
[DatasetDataIndexTypeEnum.chunk]: {
name: 'dataset.data.indexes.chunk'
},
[DatasetDataIndexTypeEnum.summary]: {
name: 'dataset.data.indexes.summary'
},
[DatasetDataIndexTypeEnum.hypothetical]: {
name: 'dataset.data.indexes.hypothetical'
},
[DatasetDataIndexTypeEnum.qa]: {
name: 'dataset.data.indexes.qa'
},
[DatasetDataIndexTypeEnum.custom]: {
name: 'dataset.data.indexes.custom'
}
};
/* ------------ training -------------- */
export enum TrainingModeEnum {

View File

@@ -3,6 +3,7 @@ import { PermissionTypeEnum } from '../../support/permission/constant';
import { PushDatasetDataChunkProps } from './api';
import {
DatasetCollectionTypeEnum,
DatasetDataIndexTypeEnum,
DatasetStatusEnum,
DatasetTypeEnum,
SearchScoreTypeEnum,
@@ -63,6 +64,7 @@ export type DatasetCollectionSchemaType = {
export type DatasetDataIndexItemType = {
defaultIndex: boolean;
dataId: string; // pg data id
type: `${DatasetDataIndexTypeEnum}`;
text: string;
};
export type DatasetDataSchemaType = {
@@ -140,7 +142,6 @@ export type DatasetCollectionItemType = CollectionWithDatasetType & {
/* ================= data ===================== */
export type DatasetDataItemType = {
id: string;
teamId: string;
datasetId: string;
collectionId: string;
sourceName: string;
@@ -172,7 +173,7 @@ export type DatasetFileSchema = {
/* ============= search =============== */
export type SearchDataResponseItemType = Omit<
DatasetDataItemType,
'teamId' | 'indexes' | 'isOwner' | 'canWrite'
'indexes' | 'isOwner' | 'canWrite'
> & {
score: { type: `${SearchScoreTypeEnum}`; value: number; index: number }[];
// score: number;

View File

@@ -1,4 +1,4 @@
import { TrainingModeEnum, DatasetCollectionTypeEnum } from './constants';
import { TrainingModeEnum, DatasetCollectionTypeEnum, DatasetDataIndexTypeEnum } from './constants';
import { getFileIcon } from '../../common/file/icon';
import { strIsLink } from '../../common/string/tools';
@@ -41,6 +41,7 @@ export function getDefaultIndex(props?: { q?: string; a?: string; dataId?: strin
const qaStr = `${q}\n${a}`.trim();
return {
defaultIndex: true,
type: a ? DatasetDataIndexTypeEnum.qa : DatasetDataIndexTypeEnum.chunk,
text: a ? qaStr : q,
dataId
};

View File

@@ -89,10 +89,9 @@ export enum ModuleInputKeyEnum {
export enum ModuleOutputKeyEnum {
// common
responseData = 'responseData',
moduleDispatchBills = 'moduleDispatchBills',
userChatInput = 'userChatInput',
finish = 'finish',
responseData = 'responseData',
history = 'history',
answerText = 'answerText', // answer module text key
success = 'success',

View File

@@ -20,7 +20,9 @@ export enum FlowNodeInputTypeEnum {
aiSettings = 'aiSettings',
// ai model select
selectLLMModel = 'selectLLMModel',
selectChatModel = 'selectChatModel',
selectCQModel = 'selectCQModel',
selectExtractModel = 'selectExtractModel',
// dataset special input
selectDataset = 'selectDataset',
@@ -56,7 +58,7 @@ export enum FlowNodeTypeEnum {
pluginModule = 'pluginModule',
pluginInput = 'pluginInput',
pluginOutput = 'pluginOutput',
queryExtension = 'cfr'
cfr = 'cfr'
// abandon
}

View File

@@ -31,7 +31,7 @@ export const AiChatModule: FlowModuleTemplateType = {
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
type: FlowNodeInputTypeEnum.selectChatModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,

View File

@@ -24,7 +24,7 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
type: FlowNodeInputTypeEnum.selectCQModel,
valueType: ModuleIOValueTypeEnum.string,
label: 'core.module.input.label.Classify model',
required: true,

View File

@@ -24,7 +24,7 @@ export const ContextExtractModule: FlowModuleTemplateType = {
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
type: FlowNodeInputTypeEnum.selectExtractModel,
valueType: ModuleIOValueTypeEnum.string,
label: 'core.module.input.label.LLM',
required: true,

View File

@@ -3,7 +3,7 @@ import {
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type';
import { FlowModuleTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
@@ -17,19 +17,19 @@ import {
} from '../input';
import { Output_Template_UserChatInput } from '../output';
export const AiQueryExtension: FlowModuleTemplateType = {
export const AiCFR: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.chatNode,
templateType: ModuleTemplateTypeEnum.other,
flowType: FlowNodeTypeEnum.queryExtension,
flowType: FlowNodeTypeEnum.cfr,
avatar: '/imgs/module/cfr.svg',
name: 'core.module.template.Query extension',
intro: 'core.module.template.Query extension intro',
intro: '该模块已合并到知识库搜索参数中无需单独使用。模块将于2024/3/31弃用请尽快修改。',
showStatus: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectLLMModel,
type: FlowNodeInputTypeEnum.selectExtractModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
@@ -39,7 +39,7 @@ export const AiQueryExtension: FlowModuleTemplateType = {
{
key: ModuleInputKeyEnum.aiSystemPrompt,
type: FlowNodeInputTypeEnum.textarea,
label: 'core.app.edit.Query extension background prompt',
label: 'core.module.input.label.Background',
max: 300,
valueType: ModuleIOValueTypeEnum.string,
description: 'core.app.edit.Query extension background tip',
@@ -54,8 +54,7 @@ export const AiQueryExtension: FlowModuleTemplateType = {
Output_Template_UserChatInput,
{
key: ModuleOutputKeyEnum.text,
label: 'core.module.output.label.query extension result',
description: 'core.module.output.description.query extension result',
label: 'core.module.output.label.cfr result',
valueType: ModuleIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.source,
targets: []

View File

@@ -1,14 +1,6 @@
import { FlowNodeTypeEnum } from './node/constant';
import {
ModuleIOValueTypeEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum,
VariableInputEnum
} from './constants';
import { ModuleIOValueTypeEnum, ModuleTemplateTypeEnum, VariableInputEnum } from './constants';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from './node/type';
import { UserModelSchema } from 'support/user/type';
import { moduleDispatchResType } from '..//chat/type';
import { ChatModuleBillType } from '../../support/wallet/bill/type';
export type FlowModuleTemplateType = {
id: string; // module id, unique
@@ -113,7 +105,7 @@ export type ChatDispatchProps = {
mode: 'test' | 'chat';
teamId: string;
tmbId: string;
user: UserModelSchema;
user: UserType;
appId: string;
chatId?: string;
responseChatItemId?: string;
@@ -124,10 +116,7 @@ export type ChatDispatchProps = {
};
export type ModuleDispatchProps<T> = ChatDispatchProps & {
module: RunningModuleItemType;
outputs: RunningModuleItemType['outputs'];
inputs: RunningModuleItemType['inputs'];
params: T;
};
export type ModuleDispatchResponse<T> = T & {
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
[ModuleOutputKeyEnum.moduleDispatchBills]?: ChatModuleBillType[];
};