mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
4.6.8-production (#822)
* Json completion (#16) * json-completion * fix duplicate * fix * fix: config json * feat: query extension * perf: i18n * 468 doc * json editor * perf: doc * perf: default extension model * docker file * doc * perf: token count * perf: search extension * format * perf: some constants data --------- Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -34,11 +34,6 @@ export function countPromptTokens(
|
||||
const enc = getTikTokenEnc();
|
||||
const text = `${role}\n${prompt}`;
|
||||
|
||||
// too large a text will block the thread
|
||||
if (text.length > 15000) {
|
||||
return text.length * 1.7;
|
||||
}
|
||||
|
||||
try {
|
||||
const encodeText = enc.encode(text);
|
||||
return encodeText.length + role.length; // 补充 role 估算值
|
||||
|
19
packages/global/core/app/type.d.ts
vendored
19
packages/global/core/app/type.d.ts
vendored
@@ -50,7 +50,7 @@ export type AppDetailType = AppSchema & {
|
||||
// };
|
||||
// Since useform cannot infer enumeration types, all enumeration keys can only be undone manually
|
||||
export type AppSimpleEditFormType = {
|
||||
templateId: string;
|
||||
// templateId: string;
|
||||
aiSettings: {
|
||||
model: string;
|
||||
systemPrompt?: string | undefined;
|
||||
@@ -62,14 +62,14 @@ export type AppSimpleEditFormType = {
|
||||
};
|
||||
dataset: {
|
||||
datasets: SelectedDatasetType;
|
||||
similarity: number;
|
||||
limit: number;
|
||||
searchMode: `${DatasetSearchModeEnum}`;
|
||||
usingReRank: boolean;
|
||||
searchEmptyText: string;
|
||||
};
|
||||
cfr: {
|
||||
background: string;
|
||||
similarity?: number;
|
||||
limit?: number;
|
||||
usingReRank?: boolean;
|
||||
searchEmptyText?: string;
|
||||
datasetSearchUsingExtensionQuery?: boolean;
|
||||
datasetSearchExtensionModel?: string;
|
||||
datasetSearchExtensionBg?: string;
|
||||
};
|
||||
userGuide: {
|
||||
welcomeText: string;
|
||||
@@ -116,9 +116,6 @@ export type AppSimpleEditConfigTemplateType = {
|
||||
usingReRank: boolean;
|
||||
searchEmptyText?: boolean;
|
||||
};
|
||||
cfr?: {
|
||||
background?: boolean;
|
||||
};
|
||||
userGuide?: {
|
||||
welcomeText?: boolean;
|
||||
variables?: boolean;
|
||||
|
@@ -6,9 +6,8 @@ import { getGuideModule, splitGuideModule } from '../module/utils';
|
||||
import { ModuleItemType } from '../module/type.d';
|
||||
import { DatasetSearchModeEnum } from '../dataset/constants';
|
||||
|
||||
export const getDefaultAppForm = (templateId = 'fastgpt-universal'): AppSimpleEditFormType => {
|
||||
export const getDefaultAppForm = (): AppSimpleEditFormType => {
|
||||
return {
|
||||
templateId,
|
||||
aiSettings: {
|
||||
model: 'gpt-3.5-turbo',
|
||||
systemPrompt: '',
|
||||
@@ -18,16 +17,15 @@ export const getDefaultAppForm = (templateId = 'fastgpt-universal'): AppSimpleEd
|
||||
quoteTemplate: '',
|
||||
maxToken: 4000
|
||||
},
|
||||
cfr: {
|
||||
background: ''
|
||||
},
|
||||
dataset: {
|
||||
datasets: [],
|
||||
similarity: 0.4,
|
||||
limit: 1500,
|
||||
searchEmptyText: '',
|
||||
searchMode: DatasetSearchModeEnum.embedding,
|
||||
usingReRank: false
|
||||
usingReRank: false,
|
||||
datasetSearchUsingExtensionQuery: true,
|
||||
datasetSearchExtensionBg: ''
|
||||
},
|
||||
userGuide: {
|
||||
welcomeText: '',
|
||||
@@ -41,14 +39,8 @@ export const getDefaultAppForm = (templateId = 'fastgpt-universal'): AppSimpleEd
|
||||
};
|
||||
|
||||
/* format app modules to edit form */
|
||||
export const appModules2Form = ({
|
||||
templateId,
|
||||
modules
|
||||
}: {
|
||||
modules: ModuleItemType[];
|
||||
templateId: string;
|
||||
}) => {
|
||||
const defaultAppForm = getDefaultAppForm(templateId);
|
||||
export const appModules2Form = ({ modules }: { modules: ModuleItemType[] }) => {
|
||||
const defaultAppForm = getDefaultAppForm();
|
||||
|
||||
const findInputValueByKey = (inputs: FlowNodeInputItemType[], key: string) => {
|
||||
return inputs.find((item) => item.key === key)?.value;
|
||||
@@ -100,6 +92,18 @@ export const appModules2Form = ({
|
||||
module.inputs,
|
||||
ModuleInputKeyEnum.datasetSearchUsingReRank
|
||||
);
|
||||
defaultAppForm.dataset.datasetSearchUsingExtensionQuery = findInputValueByKey(
|
||||
module.inputs,
|
||||
ModuleInputKeyEnum.datasetSearchUsingExtensionQuery
|
||||
);
|
||||
defaultAppForm.dataset.datasetSearchExtensionModel = findInputValueByKey(
|
||||
module.inputs,
|
||||
ModuleInputKeyEnum.datasetSearchExtensionModel
|
||||
);
|
||||
defaultAppForm.dataset.datasetSearchExtensionBg = findInputValueByKey(
|
||||
module.inputs,
|
||||
ModuleInputKeyEnum.datasetSearchExtensionBg
|
||||
);
|
||||
|
||||
// empty text
|
||||
const emptyOutputs =
|
||||
@@ -121,11 +125,6 @@ export const appModules2Form = ({
|
||||
questionGuide: questionGuide,
|
||||
tts: ttsConfig
|
||||
};
|
||||
} else if (module.flowType === FlowNodeTypeEnum.cfr) {
|
||||
const value = module.inputs.find((item) => item.key === ModuleInputKeyEnum.aiSystemPrompt);
|
||||
if (value) {
|
||||
defaultAppForm.cfr.background = value.value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
2
packages/global/core/chat/type.d.ts
vendored
2
packages/global/core/chat/type.d.ts
vendored
@@ -109,6 +109,8 @@ export type moduleDispatchResType = {
|
||||
limit?: number;
|
||||
searchMode?: `${DatasetSearchModeEnum}`;
|
||||
searchUsingReRank?: boolean;
|
||||
extensionModel?: string;
|
||||
extensionResult?: string;
|
||||
|
||||
// cq
|
||||
cqList?: ClassifyQuestionAgentItemType[];
|
||||
|
@@ -64,7 +64,9 @@ export enum ModuleInputKeyEnum {
|
||||
datasetMaxTokens = 'limit',
|
||||
datasetSearchMode = 'searchMode',
|
||||
datasetSearchUsingReRank = 'usingReRank',
|
||||
datasetParamsModal = 'datasetParamsModal',
|
||||
datasetSearchUsingExtensionQuery = 'datasetSearchUsingExtensionQuery',
|
||||
datasetSearchExtensionModel = 'datasetSearchExtensionModel',
|
||||
datasetSearchExtensionBg = 'datasetSearchExtensionBg',
|
||||
|
||||
// context extract
|
||||
contextExtractInput = 'content',
|
||||
|
@@ -19,11 +19,11 @@ import { Output_Template_UserChatInput } from '../output';
|
||||
|
||||
export const AiCFR: FlowModuleTemplateType = {
|
||||
id: FlowNodeTypeEnum.chatNode,
|
||||
templateType: ModuleTemplateTypeEnum.tools,
|
||||
templateType: ModuleTemplateTypeEnum.other,
|
||||
flowType: FlowNodeTypeEnum.cfr,
|
||||
avatar: '/imgs/module/cfr.svg',
|
||||
name: 'core.module.template.cfr',
|
||||
intro: 'core.module.template.cfr intro',
|
||||
name: 'core.module.template.Query extension',
|
||||
intro: '该模块已合并到知识库搜索参数中,无需单独使用。',
|
||||
showStatus: true,
|
||||
inputs: [
|
||||
Input_Template_Switch,
|
||||
@@ -39,11 +39,11 @@ export const AiCFR: FlowModuleTemplateType = {
|
||||
{
|
||||
key: ModuleInputKeyEnum.aiSystemPrompt,
|
||||
type: FlowNodeInputTypeEnum.textarea,
|
||||
label: 'core.module.input.label.cfr background',
|
||||
label: 'core.module.input.label.Background',
|
||||
max: 300,
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
description: 'core.app.edit.cfr background tip',
|
||||
placeholder: 'core.module.input.placeholder.cfr background',
|
||||
description: 'core.app.edit.Query extension background tip',
|
||||
placeholder: 'core.module.QueryExtension.placeholder',
|
||||
showTargetInApp: true,
|
||||
showTargetInPlugin: true
|
||||
},
|
||||
|
@@ -37,17 +37,10 @@ export const DatasetSearchModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: ModuleInputKeyEnum.datasetSimilarity,
|
||||
type: FlowNodeInputTypeEnum.hidden,
|
||||
type: FlowNodeInputTypeEnum.selectDatasetParamsModal,
|
||||
label: '',
|
||||
value: 0.4,
|
||||
valueType: ModuleIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 1,
|
||||
step: 0.01,
|
||||
markList: [
|
||||
{ label: '0', value: 0 },
|
||||
{ label: '1', value: 1 }
|
||||
],
|
||||
showTargetInApp: false,
|
||||
showTargetInPlugin: false
|
||||
},
|
||||
@@ -79,13 +72,31 @@ export const DatasetSearchModule: FlowModuleTemplateType = {
|
||||
value: false
|
||||
},
|
||||
{
|
||||
key: ModuleInputKeyEnum.datasetParamsModal,
|
||||
type: FlowNodeInputTypeEnum.selectDatasetParamsModal,
|
||||
key: ModuleInputKeyEnum.datasetSearchUsingExtensionQuery,
|
||||
type: FlowNodeInputTypeEnum.hidden,
|
||||
label: '',
|
||||
valueType: ModuleIOValueTypeEnum.any,
|
||||
valueType: ModuleIOValueTypeEnum.boolean,
|
||||
showTargetInApp: false,
|
||||
showTargetInPlugin: false,
|
||||
value: true
|
||||
},
|
||||
{
|
||||
key: ModuleInputKeyEnum.datasetSearchExtensionModel,
|
||||
type: FlowNodeInputTypeEnum.hidden,
|
||||
label: '',
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
showTargetInApp: false,
|
||||
showTargetInPlugin: false
|
||||
},
|
||||
{
|
||||
key: ModuleInputKeyEnum.datasetSearchExtensionBg,
|
||||
type: FlowNodeInputTypeEnum.hidden,
|
||||
label: '',
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
showTargetInApp: false,
|
||||
showTargetInPlugin: false,
|
||||
value: ''
|
||||
},
|
||||
Input_Template_UserChatInput
|
||||
],
|
||||
outputs: [
|
||||
|
159
packages/service/core/ai/functions/cfr.ts
Normal file
159
packages/service/core/ai/functions/cfr.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getAIApi } from '../config';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
|
||||
/*
|
||||
cfr: coreference resolution - 指代消除
|
||||
可以根据上下文,完事当前问题指代内容,利于检索。
|
||||
*/
|
||||
|
||||
const defaultPrompt = `请不要回答任何问题。
|
||||
你的任务是结合历史记录,为当前问题,实现代词替换,确保问题描述的对象清晰明确。例如:
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 关于 FatGPT 的介绍和使用等问题。
|
||||
"""
|
||||
当前问题: 怎么下载
|
||||
输出: FastGPT 怎么下载?
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 报错 "no connection"
|
||||
A: FastGPT 报错"no connection"可能是因为……
|
||||
"""
|
||||
当前问题: 怎么解决
|
||||
输出: FastGPT 报错"no connection"如何解决?
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 作者是谁?
|
||||
A: FastGPT 的作者是 labring。
|
||||
"""
|
||||
当前问题: 介绍下他
|
||||
输出: 介绍下 FastGPT 的作者 labring。
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 作者是谁?
|
||||
A: FastGPT 的作者是 labring。
|
||||
"""
|
||||
当前问题: 我想购买商业版。
|
||||
输出: FastGPT 商业版如何购买?
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 关于 FatGPT 的介绍和使用等问题。
|
||||
"""
|
||||
当前问题: nh
|
||||
输出: nh
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: FastGPT 如何收费?
|
||||
A: FastGPT 收费可以参考……
|
||||
"""
|
||||
当前问题: 你知道 laf 么?
|
||||
输出: 你知道 laf 么?
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: FastGPT 的优势
|
||||
A: 1. 开源
|
||||
2. 简便
|
||||
3. 扩展性强
|
||||
"""
|
||||
当前问题: 介绍下第2点。
|
||||
输出: 介绍下 FastGPT 简便的优势。
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 什么是 FastGPT?
|
||||
A: FastGPT 是一个 RAG 平台。
|
||||
Q: 什么是 Sealos?
|
||||
A: Sealos 是一个云操作系统。
|
||||
"""
|
||||
当前问题: 它们有什么关系?
|
||||
输出: FastGPT 和 Sealos 有什么关系?
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
{{histories}}
|
||||
"""
|
||||
当前问题: {{query}}
|
||||
输出: `;
|
||||
|
||||
export const queryCfr = async ({
|
||||
chatBg,
|
||||
query,
|
||||
histories = [],
|
||||
model
|
||||
}: {
|
||||
chatBg?: string;
|
||||
query: string;
|
||||
histories: ChatItemType[];
|
||||
model: string;
|
||||
}) => {
|
||||
if (histories.length === 0 && !chatBg) {
|
||||
return {
|
||||
rawQuery: query,
|
||||
cfrQuery: query,
|
||||
model,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
|
||||
const systemFewShot = chatBg
|
||||
? `Q: 对话背景。
|
||||
A: ${chatBg}
|
||||
`
|
||||
: '';
|
||||
const historyFewShot = histories
|
||||
.map((item) => {
|
||||
const role = item.obj === 'Human' ? 'Q' : 'A';
|
||||
return `${role}: ${item.value}`;
|
||||
})
|
||||
.join('\n');
|
||||
const concatFewShot = `${systemFewShot}${historyFewShot}`.trim();
|
||||
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const result = await ai.chat.completions.create({
|
||||
model: model,
|
||||
temperature: 0.01,
|
||||
max_tokens: 150,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: replaceVariable(defaultPrompt, {
|
||||
query: `${query}`,
|
||||
histories: concatFewShot
|
||||
})
|
||||
}
|
||||
],
|
||||
stream: false
|
||||
});
|
||||
|
||||
const answer = result.choices?.[0]?.message?.content || '';
|
||||
if (!answer) {
|
||||
return {
|
||||
rawQuery: query,
|
||||
cfrQuery: query,
|
||||
model,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
rawQuery: query,
|
||||
cfrQuery: answer,
|
||||
model,
|
||||
inputTokens: result.usage?.prompt_tokens || 0,
|
||||
outputTokens: result.usage?.completion_tokens || 0
|
||||
};
|
||||
};
|
@@ -1,61 +1,176 @@
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getAIApi } from '../config';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
|
||||
const prompt = `
|
||||
您的任务是生成根据用户问题,从不同角度,生成两个不同版本的问题,以便可以从矢量数据库检索相关文档。例如:
|
||||
问题: FastGPT如何使用?
|
||||
OUTPUT: ["FastGPT使用教程。","怎么使用FastGPT?"]
|
||||
-------------------
|
||||
问题: FastGPT如何收费?
|
||||
OUTPUT: ["FastGPT收费标准。","FastGPT是如何计费的?"]
|
||||
-------------------
|
||||
问题: 怎么FastGPT部署?
|
||||
OUTPUT: ["FastGPT的部署方式。","如何部署FastGPT?"]
|
||||
-------------------
|
||||
问题 question: {{q}}
|
||||
OUTPUT:
|
||||
`;
|
||||
/*
|
||||
query extension - 问题扩展
|
||||
可以根据上下文,消除指代性问题以及扩展问题,利于检索。
|
||||
*/
|
||||
|
||||
const defaultPrompt = `作为一个向量检索助手,你的任务是结合历史记录,从不同角度,为“原问题”生成个不同版本的“检索词”,从而提高向量检索的语义丰富度,提高向量检索的精度。生成的问题要求指向对象清晰明确。例如:
|
||||
历史记录:
|
||||
"""
|
||||
"""
|
||||
原问题: 介绍下剧情。
|
||||
检索词: ["发生了什么故事?","故事梗概是什么?","讲述了什么故事?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
"""
|
||||
原问题: 怎么下载
|
||||
检索词: ["FastGPT 怎么下载?","下载 FastGPT 需要什么条件?","有哪些渠道可以下载 FastGPT?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
Q: 报错 "no connection"
|
||||
A: 报错"no connection"可能是因为……
|
||||
"""
|
||||
原问题: 怎么解决
|
||||
检索词: ["FastGPT 报错"no connection"如何解决?", "报错 'no connection' 是什么原因?", "FastGPT提示'no connection',要怎么办?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 作者是谁?
|
||||
A: FastGPT 的作者是 labring。
|
||||
"""
|
||||
原问题: 介绍下他
|
||||
检索词: ["介绍下 FastGPT 的作者 labring。","作者 labring 的背景信息。","labring 为什么要做 FastGPT?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 当前对话是关于 FatGPT 的介绍和使用等。
|
||||
"""
|
||||
原问题: 高级编排怎么用
|
||||
检索词: ["FastGPT的高级编排是什么?","FastGPT高级编排的使用教程。","FastGPT高级编排有什么用?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 对话背景。
|
||||
A: 关于 FatGPT 的介绍和使用等问题。
|
||||
"""
|
||||
原问题: 你好。
|
||||
检索词: ["你好"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: FastGPT 如何收费?
|
||||
A: FastGPT 收费可以参考……
|
||||
"""
|
||||
原问题: 你知道 laf 么?
|
||||
检索词: ["laf是什么?","如何使用laf?","laf的介绍。"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: FastGPT 的优势
|
||||
A: 1. 开源
|
||||
2. 简便
|
||||
3. 扩展性强
|
||||
"""
|
||||
原问题: 介绍下第2点。
|
||||
检索词: ["介绍下 FastGPT 简便的优势", "FastGPT 为什么使用起来简便?","FastGPT的有哪些简便的功能?"]。
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
Q: 什么是 FastGPT?
|
||||
A: FastGPT 是一个 RAG 平台。
|
||||
Q: 什么是 Laf?
|
||||
A: Laf 是一个云函数开发平台。
|
||||
"""
|
||||
原问题: 它们有什么关系?
|
||||
检索词: ["FastGPT和Laf有什么关系?","FastGPT的RAG是用Laf实现的么?"]
|
||||
----------------
|
||||
历史记录:
|
||||
"""
|
||||
{{histories}}
|
||||
"""
|
||||
原问题: {{query}}
|
||||
检索词: `;
|
||||
|
||||
export const queryExtension = async ({
|
||||
chatBg,
|
||||
query,
|
||||
histories = [],
|
||||
model
|
||||
}: {
|
||||
chatBg?: string;
|
||||
query: string;
|
||||
histories: ChatItemType[];
|
||||
model: string;
|
||||
}): Promise<{
|
||||
rawQuery: string;
|
||||
extensionQueries: string[];
|
||||
model: string;
|
||||
inputTokens: number;
|
||||
outputTokens: number;
|
||||
}> => {
|
||||
const systemFewShot = chatBg
|
||||
? `Q: 对话背景。
|
||||
A: ${chatBg}
|
||||
`
|
||||
: '';
|
||||
const historyFewShot = histories
|
||||
.map((item) => {
|
||||
const role = item.obj === 'Human' ? 'Q' : 'A';
|
||||
return `${role}: ${item.value}`;
|
||||
})
|
||||
.join('\n');
|
||||
const concatFewShot = `${systemFewShot}${historyFewShot}`.trim();
|
||||
|
||||
export const searchQueryExtension = async ({ query, model }: { query: string; model: string }) => {
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
});
|
||||
|
||||
const result = await ai.chat.completions.create({
|
||||
model,
|
||||
temperature: 0,
|
||||
model: model,
|
||||
temperature: 0.01,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: replaceVariable(prompt, { q: query })
|
||||
content: replaceVariable(defaultPrompt, {
|
||||
query: `${query}`,
|
||||
histories: concatFewShot
|
||||
})
|
||||
}
|
||||
],
|
||||
stream: false
|
||||
});
|
||||
|
||||
const answer = result.choices?.[0]?.message?.content || '';
|
||||
let answer = result.choices?.[0]?.message?.content || '';
|
||||
if (!answer) {
|
||||
return {
|
||||
queries: [query],
|
||||
rawQuery: query,
|
||||
extensionQueries: [],
|
||||
model,
|
||||
inputTokens: 0,
|
||||
responseTokens: 0
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
|
||||
answer = answer.replace(/\\"/g, '"');
|
||||
|
||||
try {
|
||||
const queries = JSON.parse(answer) as string[];
|
||||
|
||||
return {
|
||||
queries: JSON.parse(answer) as string[],
|
||||
rawQuery: query,
|
||||
extensionQueries: queries,
|
||||
model,
|
||||
inputTokens: result.usage?.prompt_tokens || 0,
|
||||
responseTokens: result.usage?.completion_tokens || 0
|
||||
outputTokens: result.usage?.completion_tokens || 0
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {
|
||||
queries: [query],
|
||||
rawQuery: query,
|
||||
extensionQueries: [],
|
||||
model,
|
||||
inputTokens: 0,
|
||||
responseTokens: 0
|
||||
outputTokens: 0
|
||||
};
|
||||
}
|
||||
};
|
||||
|
@@ -46,7 +46,9 @@ export function ChatContextFilter({
|
||||
|
||||
/* 整体 tokens 超出范围, system必须保留 */
|
||||
if (maxTokens <= 0) {
|
||||
chats.shift();
|
||||
if (chats.length > 1) {
|
||||
chats.shift();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
62
packages/service/core/dataset/search/utils.ts
Normal file
62
packages/service/core/dataset/search/utils.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { queryExtension } from '../../ai/functions/queryExtension';
|
||||
import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
export const datasetSearchQueryExtension = async ({
|
||||
query,
|
||||
extensionModel,
|
||||
extensionBg = '',
|
||||
histories = []
|
||||
}: {
|
||||
query: string;
|
||||
extensionModel?: LLMModelItemType;
|
||||
extensionBg?: string;
|
||||
histories?: ChatItemType[];
|
||||
}) => {
|
||||
// concat query
|
||||
let queries = [query];
|
||||
let rewriteQuery =
|
||||
histories.length > 0
|
||||
? `${histories
|
||||
.map((item) => {
|
||||
return `${item.obj}: ${item.value}`;
|
||||
})
|
||||
.join('\n')}
|
||||
Human: ${query}
|
||||
`
|
||||
: query;
|
||||
|
||||
// ai extension
|
||||
const aiExtensionResult = await (async () => {
|
||||
if (!extensionModel) return;
|
||||
const result = await queryExtension({
|
||||
chatBg: extensionBg,
|
||||
query,
|
||||
histories,
|
||||
model: extensionModel.model
|
||||
});
|
||||
if (result.extensionQueries?.length === 0) return;
|
||||
return result;
|
||||
})();
|
||||
|
||||
if (aiExtensionResult) {
|
||||
queries = queries.concat(aiExtensionResult.extensionQueries);
|
||||
rewriteQuery = queries.join('\n');
|
||||
}
|
||||
|
||||
const set = new Set<string>();
|
||||
const filterSameQueries = queries.filter((item) => {
|
||||
// 删除所有的标点符号与空格等,只对文本进行比较
|
||||
const str = hashStr(item.replace(/[^\p{L}\p{N}]/gu, ''));
|
||||
if (set.has(str)) return false;
|
||||
set.add(str);
|
||||
return true;
|
||||
});
|
||||
|
||||
return {
|
||||
concatQueries: filterSameQueries,
|
||||
rewriteQuery,
|
||||
aiExtensionResult
|
||||
};
|
||||
};
|
@@ -131,6 +131,7 @@ export const iconPaths = {
|
||||
'modal/edit': () => import('./icons/modal/edit.svg'),
|
||||
'modal/manualDataset': () => import('./icons/modal/manualDataset.svg'),
|
||||
'modal/selectSource': () => import('./icons/modal/selectSource.svg'),
|
||||
'modal/setting': () => import('./icons/modal/setting.svg'),
|
||||
more: () => import('./icons/more.svg'),
|
||||
out: () => import('./icons/out.svg'),
|
||||
'phoneTabbar/me': () => import('./icons/phoneTabbar/me.svg'),
|
||||
|
@@ -0,0 +1 @@
|
||||
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1706857056481" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4218" xmlns:xlink="http://www.w3.org/1999/xlink" width="128" height="128"><path d="M817.7 597.9c2.7-10.9 12.5-18.5 23.7-18.5h69.8V444.6h-74.4c-10.7 0-20.2-7-23.4-17.2-6.8-22-16.8-44.2-29.5-65.8-5.6-9.6-4.1-21.8 3.8-29.7l54.3-54.3-63.7-63.7-7.6 7.6c-14-7.5-28.7-14-43.8-19.5l-36.8 36.8c-7.6 7.6-19.4 9.4-28.9 4.3-19.5-10.5-40.9-19-63.7-25.4-10.6-3-17.9-12.6-17.9-23.6V113h-61.4v81c-24 6.7-47.1 15.8-68.7 27.4l-11.1-11.1c-3 3.4-7 5.9-11.6 7.2-22.8 6.4-44.2 14.9-63.7 25.4-9.5 5.1-21.2 3.4-28.9-4.3l-24.9-24.9-63.7 63.7 71.6 71.6c-13.1 22.2-24 45.9-31.8 71h-72.2c-0.8 2.4-1.6 4.8-2.4 7.3-3.2 10.2-12.7 17.2-23.4 17.2h-1v135.2c9.7 1.4 17.7 8.5 20.1 18.2 0.5 2 1 4 1.6 6h72.6c6.9 27.3 16.7 53.4 29.7 77.6l-65 65 63.7 63.7 13.7-13.7c7.9-7.9 20.1-9.5 29.7-3.8 23.3 13.7 48.2 24.3 74.1 31.6 0.2 0.1 0.5 0.2 0.7 0.2l10.7-10.7c24.8 14.6 51.6 26.1 79.9 34V911h61.4v-63.2c0-11 7.3-20.6 17.9-23.6 25.9-7.3 50.8-17.9 74.1-31.6 9.6-5.7 21.8-4.1 29.7 3.8l38.4 38.3c9-3.7 17.7-7.8 26.3-12.2l76.1-76.1-47.7-47.7c-7.6-7.6-9.4-19.4-4.3-28.9 12.1-21.8 21.4-46 27.9-71.9z" fill="#91B4FF" p-id="4219"></path><path d="M389 626.6c-7.5 0-15-3.5-19.8-10-3.5-4.7-6.7-9.6-9.7-14.7-6.9-11.6-3-26.7 8.6-33.5 11.6-6.9 26.7-3 33.5 8.6 2.2 3.7 4.5 7.2 7 10.7 8 10.9 5.6 26.2-5.3 34.2-4.2 3.2-9.3 4.7-14.3 4.7z" fill="#3778FF" p-id="4220"></path><path d="M512.2 689.1c-25.6 0-50.2-5.3-73.3-15.8-12.3-5.6-17.7-20.1-12.1-32.4 5.6-12.3 20.1-17.7 32.4-12.1 16.7 7.6 34.5 11.4 53 11.4 70.7 0 128.2-57.5 128.2-128.2s-57.5-128.2-128.2-128.2S384 441.3 384 512c0 13.5-11 24.5-24.5 24.5S335 525.5 335 512c0-97.7 79.5-177.1 177.1-177.1 97.7 0 177.1 79.5 177.1 177.1 0.1 97.7-79.3 177.1-177 177.1z" fill="#3778FF" p-id="4221"></path><path d="M603.8 960H420c-13.5 0-24.5-11-24.5-24.5v-69.7c-17.5-5.9-34.6-13.2-51.1-21.8l-49.7 49.7c-9.6 9.6-25.1 9.6-34.6 0l-130-130c-9.6-9.6-9.6-25.1 0-34.6l52.5-52.5c-7.2-15.4-13.4-31.5-18.5-48.3H88.3c-13.5 0-24.5-11-24.5-24.5V420.1c0-13.5 11-24.5 24.5-24.5h81.4c5.3-14.2 11.6-28.3 18.9-42.2L130 294.8c-9.6-9.6-9.6-25.1 0-34.6l130-130c9.6-9.6 25.1-9.6 34.6 0l61.4 61.4c12.6-5.9 25.8-11.2 39.4-15.7V88.4c0-13.5 11-24.5 24.5-24.5h183.8c13.5 0 24.5 11 24.5 24.5v87.5c13.6 4.6 26.8 9.8 39.4 15.7l61.4-61.4c9.6-9.6 25.1-9.6 34.6 0l130 130c9.6 9.6 9.6 25.1 0 34.6L835 353.4c7.3 13.9 13.6 28.1 18.9 42.2h81.4c13.5 0 24.5 11 24.5 24.5v183.8c0 13.5-11 24.5-24.5 24.5h-75.7c-5.1 16.8-11.3 33-18.5 48.3l52.5 52.5c9.6 9.6 9.6 25.1 0 34.6l-130 130c-9.6 9.6-25.1 9.6-34.6 0l-49.7-49.7c-16.5 8.6-33.6 15.8-51.1 21.8v69.7c0.1 13.5-10.9 24.4-24.4 24.4z m-159.4-48.9h134.9v-63.2c0-11 7.3-20.6 17.9-23.6 25.9-7.3 50.8-17.9 74.1-31.6 9.6-5.7 21.8-4.1 29.7 3.8l45.4 45.4 95.4-95.4-47.7-47.7c-7.6-7.6-9.4-19.4-4.3-28.9 11.8-21.9 21-46.1 27.5-72 2.7-10.9 12.5-18.5 23.7-18.5h69.8V444.6h-74.4c-10.7 0-20.2-7-23.4-17.2-6.8-22-16.8-44.2-29.5-65.8-5.6-9.6-4.1-21.8 3.8-29.7l54.3-54.3-95.4-95.4-56.6 56.6c-7.6 7.6-19.4 9.4-28.9 4.3-19.5-10.5-40.9-19-63.7-25.4-10.6-3-17.9-12.6-17.9-23.6V113H444.4v81c0 11-7.3 20.6-17.9 23.6-22.8 6.4-44.2 14.9-63.7 25.4-9.5 5.1-21.2 3.4-28.9-4.3l-56.6-56.6-95.3 95.4 54.3 54.3c7.9 7.9 9.4 20.1 3.8 29.7-12.7 21.6-22.6 43.8-29.5 65.8-3.2 10.2-12.7 17.2-23.4 17.2h-74.4v134.9h69.8c11.2 0 21 7.6 23.7 18.5 6.5 25.9 15.8 50.1 27.5 72 5.1 9.5 3.4 21.3-4.3 28.9L182 746.5l95.4 95.4 45.4-45.4c7.9-7.9 20.1-9.5 29.7-3.8 23.3 13.7 48.2 24.3 74.1 31.6 10.6 3 17.9 12.6 17.9 23.6v63.2z" fill="#3778FF" p-id="4222"></path></svg>
|
After Width: | Height: | Size: 3.6 KiB |
@@ -1,11 +1,14 @@
|
||||
import React from 'react';
|
||||
import Editor, { loader } from '@monaco-editor/react';
|
||||
import React, { useEffect } from 'react';
|
||||
import Editor, { loader, useMonaco } from '@monaco-editor/react';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { Box, BoxProps, useToast } from '@chakra-ui/react';
|
||||
import { Box, BoxProps } from '@chakra-ui/react';
|
||||
import MyIcon from '../../Icon';
|
||||
import { EditorVariablePickerType } from '../PromptEditor/type';
|
||||
import { useToast } from '../../../../hooks/useToast';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
|
||||
loader.config({
|
||||
paths: { vs: '/js/monaco-editor.0.43.0' }
|
||||
paths: { vs: 'https://cdn.staticfile.net/monaco-editor/0.43.0/min/vs' }
|
||||
});
|
||||
|
||||
type Props = Omit<BoxProps, 'onChange' | 'resize' | 'height'> & {
|
||||
@@ -14,6 +17,7 @@ type Props = Omit<BoxProps, 'onChange' | 'resize' | 'height'> & {
|
||||
defaultValue?: string;
|
||||
value?: string;
|
||||
onChange?: (e: string) => void;
|
||||
variables?: EditorVariablePickerType[];
|
||||
};
|
||||
|
||||
const options = {
|
||||
@@ -38,10 +42,43 @@ const options = {
|
||||
tabSize: 2
|
||||
};
|
||||
|
||||
const JSONEditor = ({ defaultValue, value, onChange, resize, ...props }: Props) => {
|
||||
const toast = useToast();
|
||||
const JSONEditor = ({ defaultValue, value, onChange, resize, variables, ...props }: Props) => {
|
||||
const { toast } = useToast();
|
||||
const { t } = useTranslation();
|
||||
const [height, setHeight] = useState(props.height || 100);
|
||||
const initialY = useRef(0);
|
||||
const completionRegisterRef = useRef<any>();
|
||||
const monaco = useMonaco();
|
||||
|
||||
useEffect(() => {
|
||||
completionRegisterRef.current = monaco?.languages.registerCompletionItemProvider('json', {
|
||||
triggerCharacters: ['"'],
|
||||
provideCompletionItems: function (model, position) {
|
||||
var word = model.getWordUntilPosition(position);
|
||||
var range = {
|
||||
startLineNumber: position.lineNumber,
|
||||
endLineNumber: position.lineNumber,
|
||||
startColumn: word.startColumn,
|
||||
endColumn: word.endColumn
|
||||
};
|
||||
return {
|
||||
suggestions:
|
||||
variables?.map((item) => ({
|
||||
label: `${item.label}`,
|
||||
kind: monaco.languages.CompletionItemKind.Function,
|
||||
documentation: item.label,
|
||||
insertText: `{{${item.label}}}`,
|
||||
range: range
|
||||
})) || [],
|
||||
dispose: () => {}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return () => {
|
||||
completionRegisterRef.current?.dispose();
|
||||
};
|
||||
}, [monaco, completionRegisterRef.current]);
|
||||
|
||||
const handleMouseDown = useCallback((e: React.MouseEvent) => {
|
||||
initialY.current = e.clientY;
|
||||
@@ -111,15 +148,14 @@ const JSONEditor = ({ defaultValue, value, onChange, resize, ...props }: Props)
|
||||
onChange={(e) => onChange?.(e || '')}
|
||||
wrapperProps={{
|
||||
onBlur: () => {
|
||||
if (!value) return;
|
||||
try {
|
||||
JSON.parse(value as string);
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: 'Invalid JSON',
|
||||
title: t('common.Invalid Json'),
|
||||
description: error.message,
|
||||
position: 'top',
|
||||
status: 'error',
|
||||
duration: 3000,
|
||||
status: 'warning',
|
||||
isClosable: true
|
||||
});
|
||||
}
|
||||
|
Reference in New Issue
Block a user