mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
v4.4.6 (#377)
This commit is contained in:
@@ -4,8 +4,8 @@ import { TrainingModeEnum } from '@/constants/plugin';
|
||||
import { ERROR_ENUM } from '../errorCode';
|
||||
import { sendInform } from '@/pages/api/user/inform/send';
|
||||
import { authBalanceByUid } from '../utils/auth';
|
||||
import { axiosConfig, getAIChatApi } from '@fastgpt/core/aiApi/config';
|
||||
import type { ChatCompletionRequestMessage } from '@fastgpt/core/aiApi/type';
|
||||
import { axiosConfig, getAIChatApi } from '@fastgpt/core/ai/config';
|
||||
import type { ChatCompletionRequestMessage } from '@fastgpt/core/ai/type';
|
||||
import { addLog } from '../utils/tools';
|
||||
import { splitText2Chunks } from '@/utils/file';
|
||||
import { replaceVariable } from '@/utils/common/tools/text';
|
||||
|
@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
|
||||
import type { ClassifyQuestionAgentItemType } from '@/types/app';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { SpecialInputKeyEnum } from '@/constants/flow';
|
||||
@@ -29,7 +29,7 @@ const agentFunName = 'agent_user_question';
|
||||
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
|
||||
const {
|
||||
moduleName,
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { agents, userChatInput }
|
||||
} = props as Props;
|
||||
|
||||
@@ -53,7 +53,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.classifyQuestion,
|
||||
moduleName,
|
||||
price: userOpenaiAccount?.key ? 0 : cqModel.price * tokens,
|
||||
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
|
||||
model: cqModel.name || '',
|
||||
tokens,
|
||||
cqList: agents,
|
||||
@@ -63,7 +63,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
};
|
||||
|
||||
async function functionCall({
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { agents, systemPrompt, history = [], userChatInput }
|
||||
}: Props) {
|
||||
const cqModel = global.cqModel;
|
||||
@@ -105,7 +105,7 @@ async function functionCall({
|
||||
required: ['type']
|
||||
}
|
||||
};
|
||||
const chatAPI = getAIChatApi(userOpenaiAccount);
|
||||
const chatAPI = getAIChatApi(user.openaiAccount);
|
||||
|
||||
const response = await chatAPI.createChatCompletion(
|
||||
{
|
||||
@@ -116,7 +116,7 @@ async function functionCall({
|
||||
functions: [agentFunction]
|
||||
},
|
||||
{
|
||||
...axiosConfig(userOpenaiAccount)
|
||||
...axiosConfig(user.openaiAccount)
|
||||
}
|
||||
);
|
||||
|
||||
@@ -138,7 +138,7 @@ async function functionCall({
|
||||
}
|
||||
|
||||
async function completions({
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { agents, systemPrompt = '', history = [], userChatInput }
|
||||
}: Props) {
|
||||
const extractModel = global.extractModel;
|
||||
@@ -155,7 +155,7 @@ Human:${userChatInput}`
|
||||
}
|
||||
];
|
||||
|
||||
const chatAPI = getAIChatApi(userOpenaiAccount);
|
||||
const chatAPI = getAIChatApi(user.openaiAccount);
|
||||
|
||||
const { data } = await chatAPI.createChatCompletion(
|
||||
{
|
||||
@@ -166,7 +166,7 @@ Human:${userChatInput}`
|
||||
},
|
||||
{
|
||||
timeout: 480000,
|
||||
...axiosConfig(userOpenaiAccount)
|
||||
...axiosConfig(user.openaiAccount)
|
||||
}
|
||||
);
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
|
@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { ChatContextFilter } from '@/service/common/tiktoken';
|
||||
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
|
||||
import type { ContextExtractAgentItemType } from '@/types/app';
|
||||
import { ContextExtractEnum } from '@/constants/flow/flowField';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
@@ -29,7 +29,7 @@ const agentFunName = 'agent_extract_data';
|
||||
export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const {
|
||||
moduleName,
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { content, description, extractKeys }
|
||||
} = props;
|
||||
|
||||
@@ -73,7 +73,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.contentExtract,
|
||||
moduleName,
|
||||
price: userOpenaiAccount?.key ? 0 : extractModel.price * tokens,
|
||||
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
|
||||
model: extractModel.name || '',
|
||||
tokens,
|
||||
extractDescription: description,
|
||||
@@ -83,7 +83,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
}
|
||||
|
||||
async function functionCall({
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { history = [], content, extractKeys, description }
|
||||
}: Props) {
|
||||
const extractModel = global.extractModel;
|
||||
@@ -126,7 +126,7 @@ async function functionCall({
|
||||
}
|
||||
};
|
||||
|
||||
const chatAPI = getAIChatApi(userOpenaiAccount);
|
||||
const chatAPI = getAIChatApi(user.openaiAccount);
|
||||
|
||||
const response = await chatAPI.createChatCompletion(
|
||||
{
|
||||
@@ -137,7 +137,7 @@ async function functionCall({
|
||||
functions: [agentFunction]
|
||||
},
|
||||
{
|
||||
...axiosConfig(userOpenaiAccount)
|
||||
...axiosConfig(user.openaiAccount)
|
||||
}
|
||||
);
|
||||
|
||||
@@ -157,7 +157,7 @@ async function functionCall({
|
||||
}
|
||||
|
||||
async function completions({
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
inputs: { history = [], content, extractKeys, description }
|
||||
}: Props) {
|
||||
const extractModel = global.extractModel;
|
||||
@@ -181,7 +181,7 @@ Human: ${content}`
|
||||
}
|
||||
];
|
||||
|
||||
const chatAPI = getAIChatApi(userOpenaiAccount);
|
||||
const chatAPI = getAIChatApi(user.openaiAccount);
|
||||
|
||||
const { data } = await chatAPI.createChatCompletion(
|
||||
{
|
||||
@@ -192,7 +192,7 @@ Human: ${content}`
|
||||
},
|
||||
{
|
||||
timeout: 480000,
|
||||
...axiosConfig(userOpenaiAccount)
|
||||
...axiosConfig(user.openaiAccount)
|
||||
}
|
||||
);
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
|
@@ -5,13 +5,13 @@ import type { ChatHistoryItemResType } from '@/types/chat';
|
||||
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
|
||||
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { getChatModel } from '@/service/utils/data';
|
||||
import { countModelPrice } from '@/service/common/bill/push';
|
||||
import { ChatModelItemType } from '@/types/model';
|
||||
import { textCensor } from '@/api/service/plugins';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
|
||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
|
||||
import { AppModuleItemType } from '@/types/app';
|
||||
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
@@ -35,6 +35,7 @@ export type ChatProps = ModuleDispatchProps<
|
||||
export type ChatResponse = {
|
||||
[TaskResponseKeyEnum.answerText]: string;
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
|
||||
[TaskResponseKeyEnum.history]: ChatItemType[];
|
||||
finish: boolean;
|
||||
};
|
||||
|
||||
@@ -45,7 +46,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
moduleName,
|
||||
stream = false,
|
||||
detail = false,
|
||||
userOpenaiAccount,
|
||||
user,
|
||||
outputs,
|
||||
inputs: {
|
||||
model = global.chatModels[0]?.model,
|
||||
@@ -105,7 +106,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
// FastGPT temperature range: 1~10
|
||||
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
|
||||
temperature = Math.max(temperature, 0.01);
|
||||
const chatAPI = getAIChatApi(userOpenaiAccount);
|
||||
const chatAPI = getAIChatApi(user.openaiAccount);
|
||||
|
||||
const response = await chatAPI.createChatCompletion(
|
||||
{
|
||||
@@ -128,7 +129,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
{
|
||||
timeout: 480000,
|
||||
responseType: stream ? 'stream' : 'json',
|
||||
...axiosConfig(userOpenaiAccount)
|
||||
...axiosConfig(user.openaiAccount)
|
||||
}
|
||||
);
|
||||
|
||||
@@ -179,7 +180,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
[TaskResponseKeyEnum.responseData]: {
|
||||
moduleType: FlowModuleTypeEnum.chatNode,
|
||||
moduleName,
|
||||
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
|
||||
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
|
||||
model: modelConstantsData.name,
|
||||
tokens: totalTokens,
|
||||
question: userChatInput,
|
||||
@@ -187,6 +188,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
quoteList: filterQuoteQA,
|
||||
historyPreview: getHistoryPreview(completeMessages)
|
||||
},
|
||||
[TaskResponseKeyEnum.history]: completeMessages,
|
||||
finish: true
|
||||
};
|
||||
};
|
||||
|
@@ -4,5 +4,6 @@ export * from './chat/oneapi';
|
||||
export * from './kb/search';
|
||||
export * from './tools/answer';
|
||||
export * from './tools/http';
|
||||
export * from './tools/runApp';
|
||||
export * from './agent/classifyQuestion';
|
||||
export * from './agent/extract';
|
||||
|
@@ -6,7 +6,6 @@ export type AnswerProps = ModuleDispatchProps<{
|
||||
text: string;
|
||||
}>;
|
||||
export type AnswerResponse = {
|
||||
[TaskResponseKeyEnum.answerText]: string;
|
||||
finish: boolean;
|
||||
};
|
||||
|
||||
@@ -29,7 +28,6 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
|
||||
}
|
||||
|
||||
return {
|
||||
[TaskResponseKeyEnum.answerText]: text,
|
||||
finish: true
|
||||
};
|
||||
};
|
||||
|
80
projects/app/src/service/moduleDispatch/tools/runApp.ts
Normal file
80
projects/app/src/service/moduleDispatch/tools/runApp.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
|
||||
import type { ModuleDispatchProps } from '@/types/core/chat/type';
|
||||
import { SelectAppItemType } from '@/types/core/app/flow';
|
||||
import { dispatchModules } from '@/pages/api/v1/chat/completions';
|
||||
import { App } from '@/service/mongo';
|
||||
import { responseWrite } from '@/service/common/stream';
|
||||
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
|
||||
import { textAdaptGptResponse } from '@/utils/adapt';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
userChatInput: string;
|
||||
history?: ChatItemType[];
|
||||
app: SelectAppItemType;
|
||||
}>;
|
||||
type Response = {
|
||||
finish: boolean;
|
||||
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType[];
|
||||
[TaskResponseKeyEnum.answerText]: string;
|
||||
[TaskResponseKeyEnum.history]: ChatItemType[];
|
||||
};
|
||||
|
||||
export const dispatchAppRequest = async (props: Record<string, any>): Promise<Response> => {
|
||||
const {
|
||||
res,
|
||||
variables,
|
||||
user,
|
||||
stream,
|
||||
detail,
|
||||
inputs: { userChatInput, history = [], app }
|
||||
} = props as Props;
|
||||
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
|
||||
const appData = await App.findById(app.id);
|
||||
|
||||
if (!appData) {
|
||||
return Promise.reject('App not found');
|
||||
}
|
||||
|
||||
responseWrite({
|
||||
res,
|
||||
event: detail ? sseResponseEventEnum.answer : undefined,
|
||||
data: textAdaptGptResponse({
|
||||
text: '\n'
|
||||
})
|
||||
});
|
||||
|
||||
const { responseData, answerText } = await dispatchModules({
|
||||
res,
|
||||
modules: appData.modules,
|
||||
user,
|
||||
variables,
|
||||
params: {
|
||||
history,
|
||||
userChatInput
|
||||
},
|
||||
stream,
|
||||
detail
|
||||
});
|
||||
|
||||
const completeMessages = history.concat([
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: userChatInput
|
||||
},
|
||||
{
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answerText
|
||||
}
|
||||
]);
|
||||
|
||||
return {
|
||||
finish: true,
|
||||
responseData,
|
||||
[TaskResponseKeyEnum.answerText]: answerText,
|
||||
[TaskResponseKeyEnum.history]: completeMessages
|
||||
};
|
||||
};
|
@@ -102,8 +102,23 @@ export async function authOutLinkLimit({
|
||||
await authShareStart({ authToken, tokenUrl: outLink.limit.hookUrl, question });
|
||||
}
|
||||
|
||||
export async function authOutLinkId({ id }: { id: string }) {
|
||||
const outLink = await OutLink.findOne({
|
||||
shareId: id
|
||||
});
|
||||
|
||||
if (!outLink) {
|
||||
return Promise.reject('分享链接无效');
|
||||
}
|
||||
|
||||
return {
|
||||
userId: String(outLink.userId)
|
||||
};
|
||||
}
|
||||
|
||||
type TokenAuthResponseType = {
|
||||
success: boolean;
|
||||
msg?: string;
|
||||
message?: string;
|
||||
};
|
||||
|
||||
@@ -119,7 +134,7 @@ export const authShareChatInit = async (authToken?: string, tokenUrl?: string) =
|
||||
}
|
||||
});
|
||||
if (data?.success !== true) {
|
||||
return Promise.reject(data?.message || '身份校验失败');
|
||||
return Promise.reject(data?.message || data?.msg || '身份校验失败');
|
||||
}
|
||||
} catch (error) {
|
||||
return Promise.reject('身份校验失败');
|
||||
@@ -148,7 +163,7 @@ export const authShareStart = async ({
|
||||
});
|
||||
|
||||
if (data?.success !== true) {
|
||||
return Promise.reject(data?.message || '身份校验失败');
|
||||
return Promise.reject(data?.message || data?.msg || '身份校验失败');
|
||||
}
|
||||
} catch (error) {
|
||||
return Promise.reject('身份校验失败');
|
||||
|
@@ -1,15 +1,17 @@
|
||||
import type { NextApiRequest } from 'next';
|
||||
import Cookie from 'cookie';
|
||||
import { App, OpenApi, User, KB } from '../mongo';
|
||||
import { App, User, KB } from '../mongo';
|
||||
import type { AppSchema, UserModelSchema } from '@/types/mongoSchema';
|
||||
import { ERROR_ENUM } from '../errorCode';
|
||||
import { authJWT } from './tools';
|
||||
import { authOpenApiKey } from '../support/openapi/auth';
|
||||
import { authOutLinkId } from '../support/outLink/auth';
|
||||
|
||||
export enum AuthUserTypeEnum {
|
||||
token = 'token',
|
||||
root = 'root',
|
||||
apikey = 'apikey'
|
||||
apikey = 'apikey',
|
||||
outLink = 'outLink'
|
||||
}
|
||||
|
||||
/* auth balance */
|
||||
@@ -34,13 +36,15 @@ export const authUser = async ({
|
||||
authToken = false,
|
||||
authRoot = false,
|
||||
authApiKey = false,
|
||||
authBalance = false
|
||||
authBalance = false,
|
||||
authOutLink
|
||||
}: {
|
||||
req: NextApiRequest;
|
||||
authToken?: boolean;
|
||||
authRoot?: boolean;
|
||||
authApiKey?: boolean;
|
||||
authBalance?: boolean;
|
||||
authOutLink?: boolean;
|
||||
}) => {
|
||||
const authCookieToken = async (cookie?: string, token?: string): Promise<string> => {
|
||||
// 获取 cookie
|
||||
@@ -107,13 +111,18 @@ export const authUser = async ({
|
||||
userid?: string;
|
||||
authorization?: string;
|
||||
};
|
||||
const { shareId } = (req?.body || {}) as { shareId?: string };
|
||||
|
||||
let uid = '';
|
||||
let appId = '';
|
||||
let openApiKey = apikey;
|
||||
let authType: `${AuthUserTypeEnum}` = AuthUserTypeEnum.token;
|
||||
|
||||
if (authToken && (cookie || token)) {
|
||||
if (authOutLink && shareId) {
|
||||
const res = await authOutLinkId({ id: shareId });
|
||||
uid = res.userId;
|
||||
authType = AuthUserTypeEnum.outLink;
|
||||
} else if (authToken && (cookie || token)) {
|
||||
// user token(from fastgpt web)
|
||||
uid = await authCookieToken(cookie, token);
|
||||
authType = AuthUserTypeEnum.token;
|
||||
|
Reference in New Issue
Block a user