4.6.8-alpha (#804)

* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-02-01 21:57:41 +08:00
committed by GitHub
parent fc19c4cf09
commit 34602b25df
285 changed files with 10345 additions and 11223 deletions

View File

@@ -3,20 +3,24 @@ export type LLMModelItemType = {
name: string;
maxContext: number;
maxResponse: number;
inputPrice: number;
outputPrice: number;
};
export type ChatModelItemType = LLMModelItemType & {
quoteMaxToken: number;
maxTemperature: number;
inputPrice: number;
outputPrice: number;
censor?: boolean;
vision?: boolean;
defaultSystemChatPrompt?: string;
};
datasetProcess?: boolean;
export type FunctionModelItemType = LLMModelItemType & {
functionCall: boolean;
toolChoice: boolean;
functionPrompt: string;
customCQPrompt: string;
customExtractPrompt: string;
defaultSystemChatPrompt?: string;
defaultConfig?: Record<string, any>;
};
export type VectorModelItemType = {
@@ -27,6 +31,8 @@ export type VectorModelItemType = {
outputPrice: number;
maxToken: number;
weight: number;
hidden?: boolean;
defaultConfig?: Record<string, any>;
};
export type ReRankModelItemType = {

View File

@@ -3,11 +3,22 @@ import type { LLMModelItemType, VectorModelItemType } from './model.d';
export const defaultQAModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
name: 'gpt-3.5-turbo-16k',
maxContext: 16000,
maxResponse: 16000,
quoteMaxToken: 13000,
maxTemperature: 1.2,
inputPrice: 0,
outputPrice: 0
outputPrice: 0,
censor: false,
vision: false,
datasetProcess: true,
toolChoice: true,
functionCall: false,
customCQPrompt: '',
customExtractPrompt: '',
defaultSystemChatPrompt: '',
defaultConfig: {}
}
];