4.6.8-alpha (#804)

* perf: redirect request and err log replace

perf: dataset openapi

feat: session

fix: retry input error

feat: 468 doc

sub page

feat: standard sub

perf: rerank tip

perf: rerank tip

perf: api sdk

perf: openapi

sub plan

perf: sub ui

fix: ts

* perf: init log

* fix: variable select

* sub page

* icon

* perf: llm model config

* perf: menu ux

* perf: system store

* perf: publish app name

* fix: init data

* perf: flow edit ux

* fix: value type format and ux

* fix prompt editor default value (#13)

* fix prompt editor default value

* fix prompt editor update when not focus

* add key with variable

---------

Co-authored-by: Archer <545436317@qq.com>

* fix: value type

* doc

* i18n

* import path

* home page

* perf: mongo session running

* fix: ts

* perf: use toast

* perf: flow edit

* perf: sse response

* slider ui

* fetch error

* fix prompt editor rerender when not focus by key defaultvalue (#14)

* perf: prompt editor

* feat: dataset search concat

* perf: doc

* fix:ts

* perf: doc

* fix json editor onblur value (#15)

* faq

* vector model default config

* ipv6

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-02-01 21:57:41 +08:00
committed by GitHub
parent fc19c4cf09
commit 34602b25df
285 changed files with 10345 additions and 11223 deletions

View File

@@ -8,8 +8,8 @@ import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/mo
import type { ModuleDispatchProps } from '@fastgpt/global/core/module/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getCQModel } from '@/service/core/ai/model';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getLLMModel } from '@/service/core/ai/model';
import { getHistories } from '../utils';
import { formatModelPrice2Store } from '@/service/support/wallet/bill/utils';
@@ -32,14 +32,14 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const {
user,
histories,
inputs: { model, history = 6, agents, userChatInput }
params: { model, history = 6, agents, userChatInput }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const cqModel = getCQModel(model);
const cqModel = getLLMModel(model);
const chatHistories = getHistories(history, histories);
@@ -64,7 +64,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
model: cqModel.model,
inputLen: inputTokens,
outputLen: outputTokens,
type: ModelTypeEnum.cq
type: ModelTypeEnum.llm
});
return {
@@ -86,8 +86,8 @@ async function toolChoice({
user,
cqModel,
histories,
inputs: { agents, systemPrompt, userChatInput }
}: Props & { cqModel: FunctionModelItemType }) {
params: { agents, systemPrompt, userChatInput }
}: Props & { cqModel: LLMModelItemType }) {
const messages: ChatItemType[] = [
...histories,
{
@@ -112,7 +112,7 @@ ${systemPrompt}
// function body
const agentFunction = {
name: agentFunName,
description: '根据对话记录及补充的背景知识,对问题进行分类,并返回对应的类型字段',
description: '根据对话记录及背景知识,对问题进行分类,并返回对应的类型字段',
parameters: {
type: 'object',
properties: {
@@ -127,7 +127,10 @@ ${systemPrompt}
required: ['type']
}
};
const ai = getAIApi(user.openaiAccount, 480000);
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const response = await ai.chat.completions.create({
model: cqModel.model,
@@ -170,12 +173,12 @@ async function completions({
cqModel,
user,
histories,
inputs: { agents, systemPrompt = '', userChatInput }
}: Props & { cqModel: FunctionModelItemType }) {
params: { agents, systemPrompt = '', userChatInput }
}: Props & { cqModel: LLMModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
value: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
systemPrompt: systemPrompt || 'null',
typeList: agents
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
@@ -186,7 +189,10 @@ async function completions({
}
];
const ai = getAIApi(user.openaiAccount, 480000);
const ai = getAIApi({
userKey: user.openaiAccount,
timeout: 480000
});
const data = await ai.chat.completions.create({
model: cqModel.model,