4.7-production (#1053)

* 4.7-alpha3 (#62)

* doc

* Optimize possible null Pointers and parts of Ux

* fix: mulity index training error

* feat: doc and rename question guide

* fix ios speech input (#59)

* fix: prompt editor variables nowrap (#61)

* change openapi import in http module with curl import (#60)

* chore(ui): dataset import modal ui (#58)

* chore(ui): dataset import modal ui

* use component

* fix height

* 4.7 (#63)

* fix: claude3 image type verification failed (#1038) (#1040)

* perf: curl import modal

* doc img

* perf: adapt cohere rerank

* perf: code

* perf: input style

* doc

---------

Co-authored-by: xiaotian <dimsky@163.com>

* fix: ts

* docker deploy

* perf: prompt call

* doc

* ts

* finish ui

* perf: outlink detail ux

* perf: user schema

* fix: plugin update

* feat: get current time plugin

* fix: ts

* perf: fetch anamation

* perf: mark ux

* doc

* perf: select app ux

* fix: split text custom string conflict

* peref: inform readed

* doc

* memo flow component

* perf: version

* faq

* feat: flow max runtimes

* feat: similarity tip

* feat: auto detect file encoding

* Supports asymmetric vector model

* fix: ts

* perf: max w

* move code

* perf: hide whisper

* fix: ts

* feat: system msg modal

* perf: catch error

* perf: inform tip

* fix: inform

---------

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
Co-authored-by: xiaotian <dimsky@163.com>
This commit is contained in:
Archer
2024-03-26 12:09:31 +08:00
committed by GitHub
parent ef15ca894e
commit 911512b36d
180 changed files with 2179 additions and 1361 deletions

View File

@@ -1,14 +1,16 @@
import { VectorModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getAIApi } from '../config';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { EmbeddingTypeEnm } from '@fastgpt/global/core/ai/constants';
type GetVectorProps = {
model: VectorModelItemType;
input: string;
type?: `${EmbeddingTypeEnm}`;
};
// text to vector
export async function getVectorsByText({ model, input }: GetVectorProps) {
export async function getVectorsByText({ model, input, type }: GetVectorProps) {
if (!input) {
return Promise.reject({
code: 500,
@@ -23,6 +25,8 @@ export async function getVectorsByText({ model, input }: GetVectorProps) {
const result = await ai.embeddings
.create({
...model.defaultConfig,
...(type === EmbeddingTypeEnm.db && model.dbConfig),
...(type === EmbeddingTypeEnm.query && model.queryConfig),
model: model.model,
input: [input]
})

View File

@@ -160,7 +160,7 @@ A: ${chatBg}
return {
rawQuery: query,
extensionQueries: queries,
extensionQueries: Array.isArray(queries) ? queries : [],
model,
tokens: countGptMessagesTokens(messages)
};

View File

@@ -1,7 +1,21 @@
import { PostReRankProps, PostReRankResponse } from '@fastgpt/global/core/ai/api.d';
import { POST } from '../../../common/api/serverRequest';
export function reRankRecall({ query, inputs }: PostReRankProps) {
type PostReRankResponse = {
id: string;
results: {
index: number;
relevance_score: number;
}[];
};
type ReRankCallResult = { id: string; score?: number }[];
export function reRankRecall({
query,
documents
}: {
query: string;
documents: { id: string; text: string }[];
}): Promise<ReRankCallResult> {
const model = global.reRankModels[0];
if (!model || !model?.requestUrl) {
@@ -12,19 +26,24 @@ export function reRankRecall({ query, inputs }: PostReRankProps) {
return POST<PostReRankResponse>(
model.requestUrl,
{
model: model.model,
query,
inputs
documents: documents.map((doc) => doc.text)
},
{
headers: {
Authorization: `Bearer ${model.requestAuth}`
},
timeout: 120000
timeout: 30000
}
)
.then((data) => {
console.log('rerank time:', Date.now() - start);
return data;
return data?.results?.map((item) => ({
id: documents[item.index].id,
score: item.relevance_score
}));
})
.catch((err) => {
console.log('rerank error:', err);

View File

@@ -77,7 +77,8 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
const embeddingRecall = async ({ query, limit }: { query: string; limit: number }) => {
const { vectors, tokens } = await getVectorsByText({
model: getVectorModel(model),
input: query
input: query,
type: 'query'
});
const { results } = await recallFromVectorStore({
@@ -225,7 +226,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
try {
const results = await reRankRecall({
query,
inputs: data.map((item) => ({
documents: data.map((item) => ({
id: item.id,
text: `${item.q}\n${item.a}`
}))

View File

@@ -1,12 +1,9 @@
export const Prompt_Tool_Call = `<Instruction>
你是一个智能机器人,除了可以回答用户问题外,你还掌握工具的使用能力。有时候,你可以依赖工具的运行结果,来更准确的回答用户。
下面是你可以使用的工具,使用 JSON Schema 的格式声明,其中 toolId 是工具的 description 是工具的描述parameters 是工具的参数包括参数的类型和描述required 是必填参数的列表。
"""
{{toolsPrompt}}
"""
工具使用了 JSON Schema 的格式声明,其中 toolId 是工具的 description 是工具的描述parameters 是工具的参数包括参数的类型和描述required 是必填参数的列表。
接下来,请你根据工具描述决定回答问题或是使用工具。在完成任务过程中USER代表用户的输入TOOL_RESPONSE代表工具运行结果。ASSISTANT 代表你的输出。
请你根据工具描述决定回答问题或是使用工具。在完成任务过程中USER代表用户的输入TOOL_RESPONSE代表工具运行结果。ASSISTANT 代表你的输出。
你的每次输出都必须以0,1开头代表是否需要调用工具
0: 不使用工具,直接回答内容。
1: 使用工具,返回工具调用的参数。
@@ -29,7 +26,13 @@ TOOL_RESPONSE: """
ANSWER: 0: 今天杭州是晴天,适合去西湖、灵隐寺、千岛湖等地玩。
</Instruction>
现在,我们开始吧!
现在,我们开始吧!下面是你本次可以使用的工具:
"""
{{toolsPrompt}}
"""
下面是正式的对话内容:
USER: {{question}}
ANSWER:

View File

@@ -125,6 +125,7 @@ export async function dispatchWorkFlow({
}
if (nodeDispatchUsages) {
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
props.maxRunTimes -= nodeDispatchUsages.length;
}
if (toolResponses !== undefined) {
if (Array.isArray(toolResponses) && toolResponses.length === 0) return;
@@ -217,7 +218,7 @@ export async function dispatchWorkFlow({
);
}
async function moduleRun(module: RunningModuleItemType): Promise<any> {
if (res.closed) return Promise.resolve();
if (res.closed || props.maxRunTimes <= 0) return Promise.resolve();
if (stream && detail && module.showStatus) {
responseStatus({

View File

@@ -19,7 +19,6 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
mode,
teamId,
tmbId,
module,
params: { pluginId, ...data }
} = props;

View File

@@ -157,7 +157,7 @@ async function fetchData({
body: Record<string, any>;
params: Record<string, any>;
}): Promise<Record<string, any>> {
const { data: response } = await axios<Record<string, any>>({
const { data: response } = await axios({
method,
baseURL: `http://${SERVICE_LOCAL_HOST}`,
url,
@@ -241,7 +241,8 @@ async function fetchData({
};
return {
formatResponse: parseJson(response),
formatResponse:
typeof response === 'object' && !Array.isArray(response) ? parseJson(response) : {},
rawResponse: response
};
}