V4.7-alpha (#985)

Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
Archer
2024-03-13 10:50:02 +08:00
committed by GitHub
parent 5bca15f12f
commit 9501c3f3a1
170 changed files with 5786 additions and 2342 deletions

View File

@@ -1,7 +0,0 @@
export enum sseResponseEventEnum {
error = 'error',
answer = 'answer', // animation stream
response = 'response', // direct response, not animation
moduleStatus = 'moduleStatus',
appStreamResponse = 'appStreamResponse' // sse response request
}

View File

@@ -1,5 +1,5 @@
import type { NextApiResponse } from 'next';
import { sseResponseEventEnum } from './constant';
import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants';
import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
import { addLog } from '../system/log';
import { clearCookie } from '../../support/permission/controller';
@@ -70,7 +70,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
return responseWrite({
res,
event: sseResponseEventEnum.error,
event: SseResponseEventEnum.error,
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
});
}
@@ -90,7 +90,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
responseWrite({
res,
event: sseResponseEventEnum.error,
event: SseResponseEventEnum.error,
data: JSON.stringify({ message: replaceSensitiveText(msg) })
});
};
@@ -132,3 +132,22 @@ export function responseWrite({
event && Write(`event: ${event}\n`);
Write(`data: ${data}\n\n`);
}
export const responseWriteNodeStatus = ({
res,
status = 'running',
name
}: {
res?: NextApiResponse;
status?: 'running';
name: string;
}) => {
responseWrite({
res,
event: SseResponseEventEnum.flowNodeStatus,
data: JSON.stringify({
status,
name
})
});
};

View File

@@ -1,4 +1,4 @@
import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import { getAIApi } from '../config';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
@@ -8,10 +8,10 @@ export async function createQuestionGuide({
messages,
model
}: {
messages: ChatMessageItemType[];
messages: ChatCompletionMessageParam[];
model: string;
}) {
const concatMessages: ChatMessageItemType[] = [
const concatMessages: ChatCompletionMessageParam[] = [
...messages,
{
role: 'user',

View File

@@ -2,6 +2,7 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { getAIApi } from '../config';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
/*
query extension - 问题扩展
@@ -133,7 +134,7 @@ A: ${chatBg}
histories: concatFewShot
})
}
];
] as ChatCompletionMessageParam[];
const result = await ai.chat.completions.create({
model: model,
temperature: 0.01,

View File

@@ -10,6 +10,7 @@ import {
import { appCollectionName } from '../app/schema';
import { userCollectionName } from '../../support/user/schema';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants';
export const ChatItemCollectionName = 'chatitems';
@@ -54,8 +55,8 @@ const ChatItemSchema = new Schema({
},
value: {
// chat content
type: String,
default: ''
type: Array,
default: []
},
userGoodFeedback: {
type: String
@@ -75,7 +76,7 @@ const ChatItemSchema = new Schema({
a: String
}
},
[ModuleOutputKeyEnum.responseData]: {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
type: Array,
default: []
}

View File

@@ -1,6 +1,7 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { MongoChatItem } from './chatItemSchema';
import { addLog } from '../../common/system/log';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
export async function getChatItems({
appId,
@@ -24,8 +25,27 @@ export async function getChatItems({
history.reverse();
history.forEach((item) => {
// @ts-ignore
item.value = adaptStringValue(item.value);
});
return { history };
}
/* 临时适配旧的对话记录,清洗完数据后可删除4.30刪除) */
export const adaptStringValue = (value: any): ChatItemValueItemType[] => {
if (typeof value === 'string') {
return [
{
type: ChatItemValueTypeEnum.text,
text: {
content: value
}
}
];
}
return value;
};
export const addCustomFeedbacks = async ({
appId,

View File

@@ -1,21 +1,40 @@
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum, IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants';
import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import type { ChatCompletionContentPart } from '@fastgpt/global/core/ai/type.d';
import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken';
import type {
ChatCompletionContentPart,
ChatCompletionMessageParam
} from '@fastgpt/global/core/ai/type.d';
import axios from 'axios';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
/* slice chat context by tokens */
export function ChatContextFilter({
export function filterGPTMessageByMaxTokens({
messages = [],
maxTokens
}: {
messages: ChatItemType[];
messages: ChatCompletionMessageParam[];
maxTokens: number;
}) {
if (!Array.isArray(messages)) {
return [];
}
const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0);
const rawTextLen = messages.reduce((sum, item) => {
if (typeof item.content === 'string') {
return sum + item.content.length;
}
if (Array.isArray(item.content)) {
return (
sum +
item.content.reduce((sum, item) => {
if (item.type === 'text') {
return sum + item.text.length;
}
return sum;
}, 0)
);
}
return sum;
}, 0);
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
@@ -23,19 +42,21 @@ export function ChatContextFilter({
}
// filter startWith system prompt
const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex);
const chatStartIndex = messages.findIndex(
(item) => item.role !== ChatCompletionRequestMessageRoleEnum.System
);
const systemPrompts: ChatCompletionMessageParam[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatCompletionMessageParam[] = messages.slice(chatStartIndex);
// reduce token of systemPrompt
maxTokens -= countMessagesTokens(systemPrompts);
maxTokens -= countGptMessagesTokens(systemPrompts);
// Save the last chat prompt(question)
const question = chatPrompts.pop();
if (!question) {
return systemPrompts;
}
const chats: ChatItemType[] = [question];
const chats: ChatCompletionMessageParam[] = [question];
// 从后往前截取对话内容, 每次需要截取2个
while (1) {
@@ -45,7 +66,7 @@ export function ChatContextFilter({
break;
}
const tokens = countMessagesTokens([assistant, user]);
const tokens = countGptMessagesTokens([assistant, user]);
maxTokens -= tokens;
/* 整体 tokens 超出范围,截断 */
if (maxTokens < 0) {
@@ -62,6 +83,30 @@ export function ChatContextFilter({
return [...systemPrompts, ...chats];
}
export const formatGPTMessagesInRequestBefore = (messages: ChatCompletionMessageParam[]) => {
return messages
.map((item) => {
if (!item.content) return;
if (typeof item.content === 'string') {
return {
...item,
content: item.content.trim()
};
}
// array
if (item.content.length === 0) return;
if (item.content.length === 1 && item.content[0].type === 'text') {
return {
...item,
content: item.content[0].text
};
}
return item;
})
.filter(Boolean) as ChatCompletionMessageParam[];
};
/**
string to vision model. Follow the markdown code block rule for interception:
@@ -175,3 +220,21 @@ export async function formatStr2ChatContent(str: string) {
return content ? content : null;
}
export const loadChatImgToBase64 = async (content: string | ChatCompletionContentPart[]) => {
if (typeof content === 'string') {
return content;
}
return Promise.all(
content.map(async (item) => {
if (item.type === 'text') return item;
// load image
const response = await axios.get(item.image_url.url, {
responseType: 'arraybuffer'
});
const base64 = Buffer.from(response.data).toString('base64');
item.image_url.url = `data:${response.headers['content-type']};base64,${base64}`;
return item;
})
);
};

View File

@@ -25,12 +25,12 @@ export const pushResult2Remote = async ({
outLinkUid,
shareId,
appName,
responseData
flowResponses
}: {
outLinkUid?: string; // raw id, not parse
shareId?: string;
appName: string;
responseData?: ChatHistoryItemResType[];
flowResponses?: ChatHistoryItemResType[];
}) => {
if (!shareId || !outLinkUid || !FastGPTProUrl) return;
try {
@@ -46,7 +46,7 @@ export const pushResult2Remote = async ({
data: {
token: outLinkUid,
appName,
responseData
responseData: flowResponses
}
});
} catch (error) {}