Files
FastGPT/packages/global/core/ai/model.ts
Archer 34602b25df 4.6.8-alpha (#804)
* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
2024-02-01 21:57:41 +08:00

36 lines
788 B
TypeScript

import type { LLMModelItemType, VectorModelItemType } from './model.d';
export const defaultQAModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
name: 'gpt-3.5-turbo-16k',
maxContext: 16000,
maxResponse: 16000,
quoteMaxToken: 13000,
maxTemperature: 1.2,
inputPrice: 0,
outputPrice: 0,
censor: false,
vision: false,
datasetProcess: true,
toolChoice: true,
functionCall: false,
customCQPrompt: '',
customExtractPrompt: '',
defaultSystemChatPrompt: '',
defaultConfig: {}
}
];
export const defaultVectorModels: VectorModelItemType[] = [
{
model: 'text-embedding-ada-002',
name: 'Embedding-2',
inputPrice: 0,
outputPrice: 0,
defaultToken: 500,
maxToken: 3000,
weight: 100
}
];