feat: new app page

This commit is contained in:
archer
2023-06-17 17:04:47 +08:00
parent df2fda6176
commit 61447c60ac
45 changed files with 1652 additions and 1338 deletions

View File

@@ -4,7 +4,7 @@ import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { appKbSearch } from '../openapi/kb/appKbSearch';
@@ -48,36 +48,31 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// 读取对话内容
const prompts = [...content, prompt[0]];
const {
code = 200,
systemPrompts = [],
quote = [],
guidePrompt = ''
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { code, searchPrompts, rawSearch, guidePrompt } = await appKbSearch({
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: content[content.length - 1]?.quote || [],
prompt: prompt[0],
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
code,
quote: rawSearch,
systemPrompts: searchPrompts,
guidePrompt
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
guidePrompt: model.chat.systemPrompt,
systemPrompts: [
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
@@ -92,13 +87,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
if (showModelDetail) {
guidePrompt && res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(guidePrompt));
res.setHeader(QUOTE_LEN_HEADER, quote.length);
userSystemPrompt[0] &&
res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(userSystemPrompt[0].value));
res.setHeader(QUOTE_LEN_HEADER, rawSearch.length);
}
// search result is empty
if (code === 201) {
const response = systemPrompts[0]?.value;
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
await saveChat({
chatId,
newChatId: conversationId,
@@ -116,11 +112,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.end(response);
}
prompts.unshift(...systemPrompts);
// 读取对话内容
const prompts = [...quotePrompt, ...content, ...userSystemPrompt, prompt[0]];
// content check
await sensitiveCheck({
input: [...systemPrompts, prompt[0]].map((item) => item.value).join('')
input: [...quotePrompt, ...userSystemPrompt, prompt[0]].map((item) => item.value).join('')
});
// 计算温度
@@ -162,8 +159,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
{
...prompt[1],
value: responseContent,
quote: showModelDetail ? quote : [],
systemPrompt: showModelDetail ? guidePrompt : ''
quote: showModelDetail ? rawSearch : [],
systemPrompt: showModelDetail ? userSystemPrompt[0]?.value : ''
}
],
userId

View File

@@ -6,7 +6,6 @@ import { authUser } from '@/service/utils/auth';
import { ChatItemType } from '@/types/chat';
import { authModel } from '@/service/utils/auth';
import mongoose from 'mongoose';
import { ModelStatusEnum } from '@/constants/model';
import type { ModelSchema } from '@/types/mongoSchema';
/* 初始化我的聊天框,需要身份验证 */
@@ -29,8 +28,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
if (!myModel) {
const { _id } = await Model.create({
name: '应用1',
userId,
status: ModelStatusEnum.running
userId
});
model = (await Model.findById(_id)) as ModelSchema;
} else {
@@ -95,7 +93,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: {
name: model.name,
avatar: model.avatar,
intro: model.share.intro,
intro: model.intro,
canUse: model.share.isShare || String(model.userId) === userId
},
chatModel: model.chat.chatModel,

View File

@@ -4,7 +4,7 @@ import { authShareChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap, ModelVectorSearchModeMap } from '@/constants/model';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
@@ -40,26 +40,33 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const prompt = prompts[prompts.length - 1];
const { code = 200, systemPrompts = [] } = await (async () => {
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { code, searchPrompts } = await appKbSearch({
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompts[prompts.length - 1],
similarity: ModelVectorSearchModeMap[model.chat.searchMode]?.similarity
prompt: prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
code,
systemPrompts: searchPrompts
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
systemPrompts: [
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
@@ -71,15 +78,17 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
})();
// search result is empty
if (code === 201) {
return res.send(systemPrompts[0]?.value);
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
return res.end(response);
}
prompts.unshift(...systemPrompts);
// 读取对话内容
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
// content check
await sensitiveCheck({
input: [...systemPrompts, prompts[prompts.length - 1]].map((item) => item.value).join('')
input: [...quotePrompt, ...userSystemPrompt, prompt].map((item) => item.value).join('')
});
// 计算温度
@@ -93,7 +102,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
messages: completePrompts,
stream: true,
res,
chatId: historyId

View File

@@ -50,7 +50,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: {
name: model.name,
avatar: model.avatar,
intro: model.share.intro
intro: model.intro
},
chatModel: model.chat.chatModel
}