Files
FastGPT/test/setupModels.ts
T
Ryo 289da0f7b0 chore: bump pro submodule for hydration stability (#6808)
* sandbox-sync-agent

* refactor: host pro as submodule

* chore: checkpoint host pro restructure

* refactor workspace test layout and startup init

* chore: update next turbopack setup

* chore: snapshot current work before actions fix

* chore: update pro submodule

* chore: point pro submodule url to upstream https

* fix: Dockerfile

* chore: update pro submodule

* ci: support private pro submodule token and skip fork jobs

* fix(ci): build sdk workspace deps before code-sandbox bundle

* fix(app): exclude vitest configs from production typecheck

* fix(app-image): build sdk packages before next build

* fix(ci): align dockerfiles with workspace sdk build flow

* chore(docker): upgrade node20 docker images to node24

* fix(ci): read admin coverage output path in pro test workflow

* fix(app-image): include next-i18next config and locale assets

* chore: update pro submodule

* chore: do not specify branch for submodule

* chore: remove most ts-nocheck sign

* chore: update pro submodule

* chore: remove sandbox-agent-sync package

* chore: do not modify "pushData" file logic

* fix: health check

* chore: restore dev axios proxy state

* fix: test-fastgpt report workflow

* fix: use valid vitest coverage action inputs
2026-04-27 17:44:12 +08:00

66 lines
1.7 KiB
TypeScript

import { ModelTypeEnum } from '@fastgpt/global/core/ai/constants';
export default async function setupModels() {
global.llmModelMap = new Map<string, any>();
global.embeddingModelMap = new Map<string, any>();
global.llmModelMap.set('gpt-5', {
type: ModelTypeEnum.llm,
model: 'gpt-5',
name: 'gpt-5',
avatar: 'gpt-5',
isActive: true,
isDefault: true,
isCustom: false,
requestUrl: undefined,
requestAuth: undefined,
defaultSystemChatPrompt: undefined,
fieldMap: undefined,
defaultConfig: undefined,
provider: 'OpenAI',
functionCall: false,
toolChoice: false,
maxContext: 4096,
maxResponse: 4096,
quoteMaxToken: 2048
});
global.systemDefaultModel = {
llm: {
type: ModelTypeEnum.llm,
model: 'gpt-5',
name: 'gpt-5',
avatar: 'gpt-5',
isActive: true,
isDefault: true,
isCustom: false,
requestUrl: undefined,
requestAuth: undefined,
defaultSystemChatPrompt: undefined,
fieldMap: undefined,
defaultConfig: undefined,
provider: 'OpenAI',
functionCall: false,
toolChoice: false,
maxContext: 4096,
maxResponse: 4096,
quoteMaxToken: 2048
},
embedding: {
type: ModelTypeEnum.embedding,
model: 'text-embedding-ada-002',
name: 'text-embedding-ada-002',
avatar: 'text-embedding-ada-002',
isActive: true,
isDefault: true,
isCustom: false,
requestUrl: undefined,
requestAuth: undefined,
defaultConfig: undefined,
defaultToken: 1,
maxToken: 100,
provider: 'OpenAI',
weight: 1
}
};
global.systemModelList = [global.systemDefaultModel.llm!, global.systemDefaultModel.embedding!];
}