mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-02 12:48:30 +00:00
feat: new app page
This commit is contained in:
@@ -4,7 +4,7 @@ import { authChat } from '@/service/utils/auth';
|
||||
import { modelServiceToolMap } from '@/service/utils/chat';
|
||||
import { ChatItemType } from '@/types/chat';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
|
||||
import { ChatModelMap } from '@/constants/model';
|
||||
import { pushChatBill } from '@/service/events/pushBill';
|
||||
import { resStreamResponse } from '@/service/utils/chat';
|
||||
import { appKbSearch } from '../openapi/kb/appKbSearch';
|
||||
@@ -48,36 +48,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
const modelConstantsData = ChatModelMap[model.chat.chatModel];
|
||||
|
||||
// 读取对话内容
|
||||
const prompts = [...content, prompt[0]];
|
||||
|
||||
const {
|
||||
code = 200,
|
||||
systemPrompts = [],
|
||||
quote = [],
|
||||
guidePrompt = ''
|
||||
rawSearch = [],
|
||||
userSystemPrompt = [],
|
||||
quotePrompt = []
|
||||
} = await (async () => {
|
||||
// 使用了知识库搜索
|
||||
if (model.chat.relatedKbs?.length > 0) {
|
||||
const { code, searchPrompts, rawSearch, guidePrompt } = await appKbSearch({
|
||||
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
|
||||
model,
|
||||
userId,
|
||||
fixedQuote: content[content.length - 1]?.quote || [],
|
||||
prompt: prompt[0],
|
||||
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
|
||||
similarity: model.chat.searchSimilarity,
|
||||
limit: model.chat.searchLimit
|
||||
});
|
||||
|
||||
return {
|
||||
code,
|
||||
quote: rawSearch,
|
||||
systemPrompts: searchPrompts,
|
||||
guidePrompt
|
||||
rawSearch: rawSearch,
|
||||
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
|
||||
quotePrompt: [quotePrompt]
|
||||
};
|
||||
}
|
||||
if (model.chat.systemPrompt) {
|
||||
return {
|
||||
guidePrompt: model.chat.systemPrompt,
|
||||
systemPrompts: [
|
||||
userSystemPrompt: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: model.chat.systemPrompt
|
||||
@@ -92,13 +87,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
const conversationId = chatId || String(new Types.ObjectId());
|
||||
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
|
||||
if (showModelDetail) {
|
||||
guidePrompt && res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(guidePrompt));
|
||||
res.setHeader(QUOTE_LEN_HEADER, quote.length);
|
||||
userSystemPrompt[0] &&
|
||||
res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(userSystemPrompt[0].value));
|
||||
res.setHeader(QUOTE_LEN_HEADER, rawSearch.length);
|
||||
}
|
||||
|
||||
// search result is empty
|
||||
if (code === 201) {
|
||||
const response = systemPrompts[0]?.value;
|
||||
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
|
||||
const response = model.chat.searchEmptyText;
|
||||
await saveChat({
|
||||
chatId,
|
||||
newChatId: conversationId,
|
||||
@@ -116,11 +112,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
return res.end(response);
|
||||
}
|
||||
|
||||
prompts.unshift(...systemPrompts);
|
||||
// 读取对话内容
|
||||
const prompts = [...quotePrompt, ...content, ...userSystemPrompt, prompt[0]];
|
||||
|
||||
// content check
|
||||
await sensitiveCheck({
|
||||
input: [...systemPrompts, prompt[0]].map((item) => item.value).join('')
|
||||
input: [...quotePrompt, ...userSystemPrompt, prompt[0]].map((item) => item.value).join('')
|
||||
});
|
||||
|
||||
// 计算温度
|
||||
@@ -162,8 +159,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
{
|
||||
...prompt[1],
|
||||
value: responseContent,
|
||||
quote: showModelDetail ? quote : [],
|
||||
systemPrompt: showModelDetail ? guidePrompt : ''
|
||||
quote: showModelDetail ? rawSearch : [],
|
||||
systemPrompt: showModelDetail ? userSystemPrompt[0]?.value : ''
|
||||
}
|
||||
],
|
||||
userId
|
||||
|
@@ -6,7 +6,6 @@ import { authUser } from '@/service/utils/auth';
|
||||
import { ChatItemType } from '@/types/chat';
|
||||
import { authModel } from '@/service/utils/auth';
|
||||
import mongoose from 'mongoose';
|
||||
import { ModelStatusEnum } from '@/constants/model';
|
||||
import type { ModelSchema } from '@/types/mongoSchema';
|
||||
|
||||
/* 初始化我的聊天框,需要身份验证 */
|
||||
@@ -29,8 +28,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
if (!myModel) {
|
||||
const { _id } = await Model.create({
|
||||
name: '应用1',
|
||||
userId,
|
||||
status: ModelStatusEnum.running
|
||||
userId
|
||||
});
|
||||
model = (await Model.findById(_id)) as ModelSchema;
|
||||
} else {
|
||||
@@ -95,7 +93,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
model: {
|
||||
name: model.name,
|
||||
avatar: model.avatar,
|
||||
intro: model.share.intro,
|
||||
intro: model.intro,
|
||||
canUse: model.share.isShare || String(model.userId) === userId
|
||||
},
|
||||
chatModel: model.chat.chatModel,
|
||||
|
@@ -4,7 +4,7 @@ import { authShareChat } from '@/service/utils/auth';
|
||||
import { modelServiceToolMap } from '@/service/utils/chat';
|
||||
import { ChatItemSimpleType } from '@/types/chat';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
|
||||
import { ChatModelMap } from '@/constants/model';
|
||||
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
|
||||
import { resStreamResponse } from '@/service/utils/chat';
|
||||
import { ChatRoleEnum } from '@/constants/chat';
|
||||
@@ -40,26 +40,33 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
});
|
||||
|
||||
const modelConstantsData = ChatModelMap[model.chat.chatModel];
|
||||
const prompt = prompts[prompts.length - 1];
|
||||
|
||||
const { code = 200, systemPrompts = [] } = await (async () => {
|
||||
const {
|
||||
rawSearch = [],
|
||||
userSystemPrompt = [],
|
||||
quotePrompt = []
|
||||
} = await (async () => {
|
||||
// 使用了知识库搜索
|
||||
if (model.chat.relatedKbs?.length > 0) {
|
||||
const { code, searchPrompts } = await appKbSearch({
|
||||
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
|
||||
model,
|
||||
userId,
|
||||
fixedQuote: [],
|
||||
prompt: prompts[prompts.length - 1],
|
||||
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
|
||||
prompt: prompt,
|
||||
similarity: model.chat.searchSimilarity,
|
||||
limit: model.chat.searchLimit
|
||||
});
|
||||
|
||||
return {
|
||||
code,
|
||||
systemPrompts: searchPrompts
|
||||
rawSearch: rawSearch,
|
||||
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
|
||||
quotePrompt: [quotePrompt]
|
||||
};
|
||||
}
|
||||
if (model.chat.systemPrompt) {
|
||||
return {
|
||||
systemPrompts: [
|
||||
userSystemPrompt: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: model.chat.systemPrompt
|
||||
@@ -71,15 +78,17 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
})();
|
||||
|
||||
// search result is empty
|
||||
if (code === 201) {
|
||||
return res.send(systemPrompts[0]?.value);
|
||||
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
|
||||
const response = model.chat.searchEmptyText;
|
||||
return res.end(response);
|
||||
}
|
||||
|
||||
prompts.unshift(...systemPrompts);
|
||||
// 读取对话内容
|
||||
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
|
||||
|
||||
// content check
|
||||
await sensitiveCheck({
|
||||
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
|
||||
input: [...quotePrompt, ...userSystemPrompt, prompt].map((item) => item.value).join('')
|
||||
});
|
||||
|
||||
// 计算温度
|
||||
@@ -93,7 +102,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
].chatCompletion({
|
||||
apiKey: userOpenAiKey || systemAuthKey,
|
||||
temperature: +temperature,
|
||||
messages: prompts,
|
||||
messages: completePrompts,
|
||||
stream: true,
|
||||
res,
|
||||
chatId: historyId
|
||||
|
@@ -50,7 +50,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
model: {
|
||||
name: model.name,
|
||||
avatar: model.avatar,
|
||||
intro: model.share.intro
|
||||
intro: model.intro
|
||||
},
|
||||
chatModel: model.chat.chatModel
|
||||
}
|
||||
|
@@ -3,7 +3,6 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
import { ModelStatusEnum } from '@/constants/model';
|
||||
import { Model } from '@/service/models/model';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
@@ -32,8 +31,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
// 创建模型
|
||||
const response = await Model.create({
|
||||
name,
|
||||
userId,
|
||||
status: ModelStatusEnum.running
|
||||
userId
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
|
@@ -31,7 +31,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
$and: [
|
||||
{ 'share.isShare': true },
|
||||
{
|
||||
$or: [{ name: { $regex: regex } }, { 'share.intro': { $regex: regex } }]
|
||||
$or: [{ name: { $regex: regex } }, { intro: { $regex: regex } }]
|
||||
}
|
||||
]
|
||||
};
|
||||
@@ -66,6 +66,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
avatar: { $ifNull: ['$avatar', '/icon/logo.png'] },
|
||||
name: 1,
|
||||
userId: 1,
|
||||
intro: 1,
|
||||
share: 1,
|
||||
isCollection: {
|
||||
$cond: {
|
||||
|
@@ -9,10 +9,10 @@ import { authModel } from '@/service/utils/auth';
|
||||
/* 获取我的模型 */
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
const { name, avatar, chat, share } = req.body as ModelUpdateParams;
|
||||
const { name, avatar, chat, share, intro } = req.body as ModelUpdateParams;
|
||||
const { modelId } = req.query as { modelId: string };
|
||||
|
||||
if (!name || !chat || !modelId) {
|
||||
if (!modelId) {
|
||||
throw new Error('参数错误');
|
||||
}
|
||||
|
||||
@@ -35,10 +35,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
{
|
||||
name,
|
||||
avatar,
|
||||
intro,
|
||||
chat,
|
||||
'share.isShare': share.isShare,
|
||||
'share.isShareDetail': share.isShareDetail,
|
||||
'share.intro': share.intro
|
||||
...(share && {
|
||||
'share.isShare': share.isShare,
|
||||
'share.isShareDetail': share.isShareDetail
|
||||
})
|
||||
}
|
||||
);
|
||||
|
||||
|
@@ -4,12 +4,11 @@ import { authUser, authModel, getApiKey } from '@/service/utils/auth';
|
||||
import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat';
|
||||
import { ChatItemSimpleType } from '@/types/chat';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
|
||||
import { ChatModelMap } from '@/constants/model';
|
||||
import { pushChatBill } from '@/service/events/pushBill';
|
||||
import { ChatRoleEnum } from '@/constants/chat';
|
||||
import { withNextCors } from '@/service/utils/tools';
|
||||
import { BillTypeEnum } from '@/constants/user';
|
||||
import { sensitiveCheck } from '../../openapi/text/sensitiveCheck';
|
||||
import { NEW_CHATID_HEADER } from '@/constants/chat';
|
||||
import { Types } from 'mongoose';
|
||||
import { appKbSearch } from '../kb/appKbSearch';
|
||||
@@ -66,48 +65,46 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
});
|
||||
|
||||
const modelConstantsData = ChatModelMap[model.chat.chatModel];
|
||||
const prompt = prompts[prompts.length - 1];
|
||||
|
||||
let systemPrompts: {
|
||||
obj: ChatRoleEnum;
|
||||
value: string;
|
||||
}[] = [];
|
||||
const { userSystemPrompt = [], quotePrompt = [] } = await (async () => {
|
||||
// 使用了知识库搜索
|
||||
if (model.chat.relatedKbs?.length > 0) {
|
||||
const { userSystemPrompt, quotePrompt } = await appKbSearch({
|
||||
model,
|
||||
userId,
|
||||
fixedQuote: [],
|
||||
prompt: prompt,
|
||||
similarity: model.chat.searchSimilarity,
|
||||
limit: model.chat.searchLimit
|
||||
});
|
||||
|
||||
// 使用了知识库搜索
|
||||
if (model.chat.relatedKbs?.length > 0) {
|
||||
const { code, searchPrompts } = await appKbSearch({
|
||||
model,
|
||||
userId,
|
||||
fixedQuote: [],
|
||||
prompt: prompts[prompts.length - 1],
|
||||
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
|
||||
});
|
||||
|
||||
// search result is empty
|
||||
if (code === 201) {
|
||||
return isStream
|
||||
? res.send(searchPrompts[0]?.value)
|
||||
: jsonRes(res, {
|
||||
data: searchPrompts[0]?.value,
|
||||
message: searchPrompts[0]?.value
|
||||
});
|
||||
return {
|
||||
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
|
||||
quotePrompt: [quotePrompt]
|
||||
};
|
||||
}
|
||||
if (model.chat.systemPrompt) {
|
||||
return {
|
||||
userSystemPrompt: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: model.chat.systemPrompt
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
return {};
|
||||
})();
|
||||
|
||||
systemPrompts = searchPrompts;
|
||||
} else if (model.chat.systemPrompt) {
|
||||
systemPrompts = [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: model.chat.systemPrompt
|
||||
}
|
||||
];
|
||||
// search result is empty
|
||||
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
|
||||
const response = model.chat.searchEmptyText;
|
||||
return res.end(response);
|
||||
}
|
||||
|
||||
prompts.unshift(...systemPrompts);
|
||||
|
||||
// content check
|
||||
await sensitiveCheck({
|
||||
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
|
||||
});
|
||||
// 读取对话内容
|
||||
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
|
||||
|
||||
// 计算温度
|
||||
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
|
||||
@@ -123,7 +120,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
|
||||
apiKey,
|
||||
temperature: +temperature,
|
||||
messages: prompts,
|
||||
messages: completePrompts,
|
||||
stream: isStream,
|
||||
res,
|
||||
chatId: conversationId
|
||||
|
@@ -18,7 +18,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
({ _id, apiKey, createTime, lastUsedTime }) => {
|
||||
return {
|
||||
id: _id,
|
||||
apiKey: `${apiKey.substring(0, 2)}******${apiKey.substring(apiKey.length - 2)}`,
|
||||
apiKey: `******${apiKey.substring(apiKey.length - 4)}`,
|
||||
createTime,
|
||||
lastUsedTime
|
||||
};
|
||||
|
@@ -5,7 +5,6 @@ import { PgClient } from '@/service/pg';
|
||||
import { withNextCors } from '@/service/utils/tools';
|
||||
import type { ChatItemSimpleType } from '@/types/chat';
|
||||
import type { ModelSchema } from '@/types/mongoSchema';
|
||||
import { appVectorSearchModeEnum } from '@/constants/model';
|
||||
import { authModel } from '@/service/utils/auth';
|
||||
import { ChatModelMap } from '@/constants/model';
|
||||
import { ChatRoleEnum } from '@/constants/chat';
|
||||
@@ -21,16 +20,19 @@ export type QuoteItemType = {
|
||||
type Props = {
|
||||
prompts: ChatItemSimpleType[];
|
||||
similarity: number;
|
||||
limit: number;
|
||||
appId: string;
|
||||
};
|
||||
type Response = {
|
||||
code: 200 | 201;
|
||||
rawSearch: QuoteItemType[];
|
||||
guidePrompt: string;
|
||||
searchPrompts: {
|
||||
userSystemPrompt: {
|
||||
obj: ChatRoleEnum;
|
||||
value: string;
|
||||
}[];
|
||||
};
|
||||
quotePrompt: {
|
||||
obj: ChatRoleEnum;
|
||||
value: string;
|
||||
};
|
||||
};
|
||||
|
||||
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
@@ -41,7 +43,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
throw new Error('userId is empty');
|
||||
}
|
||||
|
||||
const { prompts, similarity, appId } = req.body as Props;
|
||||
const { prompts, similarity, limit, appId } = req.body as Props;
|
||||
|
||||
if (!similarity || !Array.isArray(prompts) || !appId) {
|
||||
throw new Error('params is error');
|
||||
@@ -58,7 +60,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
userId,
|
||||
fixedQuote: [],
|
||||
prompt: prompts[prompts.length - 1],
|
||||
similarity
|
||||
similarity,
|
||||
limit
|
||||
});
|
||||
|
||||
jsonRes<Response>(res, {
|
||||
@@ -78,13 +81,15 @@ export async function appKbSearch({
|
||||
userId,
|
||||
fixedQuote,
|
||||
prompt,
|
||||
similarity
|
||||
similarity = 0.8,
|
||||
limit = 5
|
||||
}: {
|
||||
model: ModelSchema;
|
||||
userId: string;
|
||||
fixedQuote: QuoteItemType[];
|
||||
prompt: ChatItemSimpleType;
|
||||
similarity: number;
|
||||
limit: number;
|
||||
}): Promise<Response> {
|
||||
const modelConstantsData = ChatModelMap[model.chat.chatModel];
|
||||
|
||||
@@ -103,7 +108,7 @@ export async function appKbSearch({
|
||||
.map((item) => `'${item}'`)
|
||||
.join(',')}) AND vector <#> '[${promptVector[0]}]' < -${similarity} order by vector <#> '[${
|
||||
promptVector[0]
|
||||
}]' limit 10;
|
||||
}]' limit ${limit};
|
||||
COMMIT;`
|
||||
);
|
||||
|
||||
@@ -115,7 +120,7 @@ export async function appKbSearch({
|
||||
...searchRes.slice(0, 3),
|
||||
...fixedQuote.slice(0, 2),
|
||||
...searchRes.slice(3),
|
||||
...fixedQuote.slice(2, 5)
|
||||
...fixedQuote.slice(2, 4)
|
||||
].filter((item) => {
|
||||
if (idSet.has(item.id)) {
|
||||
return false;
|
||||
@@ -125,86 +130,44 @@ export async function appKbSearch({
|
||||
});
|
||||
|
||||
// 计算固定提示词的 token 数量
|
||||
const guidePrompt = model.chat.systemPrompt // user system prompt
|
||||
const userSystemPrompt = model.chat.systemPrompt // user system prompt
|
||||
? {
|
||||
obj: ChatRoleEnum.System,
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: model.chat.systemPrompt
|
||||
}
|
||||
: model.chat.searchMode === appVectorSearchModeEnum.noContext
|
||||
? {
|
||||
obj: ChatRoleEnum.System,
|
||||
value: `知识库是关于"${model.name}"的内容,根据知识库内容回答问题.`
|
||||
}
|
||||
: {
|
||||
obj: ChatRoleEnum.System,
|
||||
value: `玩一个问答游戏,规则为:
|
||||
1.你完全忘记你已有的知识
|
||||
2.你只回答关于"${model.name}"的问题
|
||||
3.你只从知识库中选择内容进行回答
|
||||
4.如果问题不在知识库中,你会回答:"我不知道。"
|
||||
请务必遵守规则`
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: `知识库是关于 ${model.name} 的内容,参考知识库回答问题。与 "${model.name}" 无关内容,直接回复: "我不知道"。`
|
||||
};
|
||||
|
||||
const fixedSystemTokens = modelToolMap[model.chat.chatModel].countTokens({
|
||||
messages: [guidePrompt]
|
||||
messages: [userSystemPrompt]
|
||||
});
|
||||
|
||||
// filter part quote by maxToken
|
||||
const sliceResult = modelToolMap[model.chat.chatModel]
|
||||
.tokenSlice({
|
||||
maxToken: modelConstantsData.systemMaxToken - fixedSystemTokens,
|
||||
messages: filterSearch.map((item) => ({
|
||||
messages: filterSearch.map((item, i) => ({
|
||||
obj: ChatRoleEnum.System,
|
||||
value: `${item.q}\n${item.a}`
|
||||
value: `${i + 1}: [${item.q}\n${item.a}]`
|
||||
}))
|
||||
})
|
||||
.map((item) => item.value);
|
||||
.map((item) => item.value)
|
||||
.join('\n')
|
||||
.trim();
|
||||
|
||||
// slice filterSearch
|
||||
const rawSearch = filterSearch.slice(0, sliceResult.length);
|
||||
|
||||
// system prompt
|
||||
const systemPrompt = sliceResult.join('\n').trim();
|
||||
|
||||
/* 高相似度+不回复 */
|
||||
if (!systemPrompt && model.chat.searchMode === appVectorSearchModeEnum.hightSimilarity) {
|
||||
return {
|
||||
code: 201,
|
||||
rawSearch: [],
|
||||
guidePrompt: '',
|
||||
searchPrompts: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: '对不起,你的问题不在知识库中。'
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
/* 高相似度+无上下文,不添加额外知识,仅用系统提示词 */
|
||||
if (!systemPrompt && model.chat.searchMode === appVectorSearchModeEnum.noContext) {
|
||||
return {
|
||||
code: 200,
|
||||
rawSearch: [],
|
||||
guidePrompt: model.chat.systemPrompt || '',
|
||||
searchPrompts: model.chat.systemPrompt
|
||||
? [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: model.chat.systemPrompt
|
||||
}
|
||||
]
|
||||
: []
|
||||
};
|
||||
}
|
||||
const quoteText = sliceResult ? `知识库:\n${sliceResult}` : '';
|
||||
|
||||
return {
|
||||
code: 200,
|
||||
rawSearch,
|
||||
guidePrompt: guidePrompt.value || '',
|
||||
searchPrompts: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: `知识库:<${systemPrompt}>`
|
||||
},
|
||||
guidePrompt
|
||||
]
|
||||
userSystemPrompt,
|
||||
quotePrompt: {
|
||||
obj: ChatRoleEnum.System,
|
||||
value: quoteText
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -4,7 +4,7 @@ import { jsonRes } from '@/service/response';
|
||||
import { connectToDatabase, OpenApi } from '@/service/mongo';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890');
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24);
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -14,11 +14,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
const count = await OpenApi.find({ userId }).countDocuments();
|
||||
|
||||
if (count >= 5) {
|
||||
throw new Error('最多 5 组API Key');
|
||||
if (count >= 10) {
|
||||
throw new Error('最多 10 组 API 秘钥');
|
||||
}
|
||||
|
||||
const apiKey = `${userId}-${nanoid()}`;
|
||||
const apiKey = `fastgpt-${nanoid()}`;
|
||||
|
||||
await OpenApi.create({
|
||||
userId,
|
||||
|
Reference in New Issue
Block a user