mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
V4.8.17 feature (#3485)
* feat: add third party account config (#3443) * temp * editor workflow variable style * add team to dispatch * i18n * delete console * change openai account position * fix * fix * fix * fix * fix * 4.8.17 test (#3461) * perf: external provider config * perf: ui * feat: add template config (#3434) * change template position * template config * delete console * delete * fix * fix * perf: Mongo visutal field (#3464) * remve invalid code * perf: team member visutal code * perf: virtual search; perf: search test data * fix: ts * fix: image response headers * perf: template code * perf: auth layout;perf: auto save (#3472) * perf: auth layout * perf: auto save * perf: auto save * fix: template guide display & http input support external variables (#3475) * fix: template guide display * http editor support external workflow variables * perf: auto save;fix: ifelse checker line break; (#3478) * perf: auto save * perf: auto save * fix: ifelse checker line break * perf: doc * perf: doc * fix: update var type error * 4.8.17 test (#3479) * perf: auto save * perf: auto save * perf: template code * 4.8.17 test (#3480) * perf: auto save * perf: auto save * perf: model price model * feat: add react memo * perf: model provider filter * fix: ts (#3481) * perf: auto save * perf: auto save * fix: ts * simple app tool select (#3473) * workflow plugin userguide & simple tool ui * simple tool filter * reuse component * change component to hook * fix * perf: too selector modal (#3484) * perf: auto save * perf: auto save * perf: markdown render * perf: too selector * fix: app version require tmbId * perf: templates refresh * perf: templates refresh * hide auto save error tip * perf: toolkit guide --------- Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
@@ -1,4 +1,3 @@
|
||||
import type { UserModelSchema } from '@fastgpt/global/support/user/type';
|
||||
import OpenAI from '@fastgpt/global/core/ai';
|
||||
import {
|
||||
ChatCompletionCreateParamsNonStreaming,
|
||||
@@ -7,13 +6,11 @@ import {
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import { addLog } from '../../common/system/log';
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
import { OpenaiAccountType } from '@fastgpt/global/support/user/team/type';
|
||||
|
||||
export const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
|
||||
|
||||
export const getAIApi = (props?: {
|
||||
userKey?: UserModelSchema['openaiAccount'];
|
||||
timeout?: number;
|
||||
}) => {
|
||||
export const getAIApi = (props?: { userKey?: OpenaiAccountType; timeout?: number }) => {
|
||||
const { userKey, timeout } = props || {};
|
||||
|
||||
const baseUrl =
|
||||
@@ -29,7 +26,7 @@ export const getAIApi = (props?: {
|
||||
});
|
||||
};
|
||||
|
||||
export const getAxiosConfig = (props?: { userKey?: UserModelSchema['openaiAccount'] }) => {
|
||||
export const getAxiosConfig = (props?: { userKey?: OpenaiAccountType }) => {
|
||||
const { userKey } = props || {};
|
||||
|
||||
const baseUrl =
|
||||
@@ -57,7 +54,7 @@ export const createChatCompletion = async <T extends CompletionsBodyType>({
|
||||
options
|
||||
}: {
|
||||
body: T;
|
||||
userKey?: UserModelSchema['openaiAccount'];
|
||||
userKey?: OpenaiAccountType;
|
||||
timeout?: number;
|
||||
options?: OpenAI.RequestOptions;
|
||||
}): Promise<{
|
||||
|
@@ -131,6 +131,7 @@ export async function getChildAppPreviewNode({
|
||||
name: app.name,
|
||||
intro: app.intro,
|
||||
courseUrl: app.courseUrl,
|
||||
userGuide: app.userGuide,
|
||||
showStatus: app.showStatus,
|
||||
isTool: true,
|
||||
version: app.version,
|
||||
|
51
packages/service/core/app/templates/templateSchema.ts
Normal file
51
packages/service/core/app/templates/templateSchema.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { AppTemplateSchemaType } from '@fastgpt/global/core/app/type';
|
||||
import { connectionMongo, getMongoModel } from '../../../common/mongo/index';
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
export const collectionName = 'app_templates';
|
||||
|
||||
const AppTemplateSchema = new Schema({
|
||||
templateId: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
name: {
|
||||
type: String
|
||||
},
|
||||
intro: {
|
||||
type: String
|
||||
},
|
||||
avatar: {
|
||||
type: String
|
||||
},
|
||||
tags: {
|
||||
type: [String],
|
||||
default: undefined
|
||||
},
|
||||
type: {
|
||||
type: String
|
||||
},
|
||||
isActive: {
|
||||
type: Boolean
|
||||
},
|
||||
userGuide: {
|
||||
type: Object
|
||||
},
|
||||
isQuickTemplate: {
|
||||
type: Boolean
|
||||
},
|
||||
order: {
|
||||
type: Number,
|
||||
default: -1
|
||||
},
|
||||
workflow: {
|
||||
type: Object
|
||||
}
|
||||
});
|
||||
|
||||
AppTemplateSchema.index({ templateId: 1 });
|
||||
|
||||
export const MongoAppTemplate = getMongoModel<AppTemplateSchemaType>(
|
||||
collectionName,
|
||||
AppTemplateSchema
|
||||
);
|
25
packages/service/core/app/templates/templateTypeSchema.ts
Normal file
25
packages/service/core/app/templates/templateTypeSchema.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { TemplateTypeSchemaType } from '@fastgpt/global/core/app/type';
|
||||
import { connectionMongo, getMongoModel } from '../../../common/mongo/index';
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
export const collectionName = 'app_template_types';
|
||||
|
||||
const TemplateTypeSchema = new Schema({
|
||||
typeName: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
typeId: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
typeOrder: {
|
||||
type: Number,
|
||||
default: 0
|
||||
}
|
||||
});
|
||||
|
||||
export const MongoTemplateTypes = getMongoModel<TemplateTypeSchemaType>(
|
||||
collectionName,
|
||||
TemplateTypeSchema
|
||||
);
|
@@ -2,10 +2,16 @@ import { connectionMongo, getMongoModel, type Model } from '../../../common/mong
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { AppVersionSchemaType } from '@fastgpt/global/core/app/version';
|
||||
import { chatConfigType } from '../schema';
|
||||
import { TeamMemberCollectionName } from '@fastgpt/global/support/user/team/constant';
|
||||
|
||||
export const AppVersionCollectionName = 'app_versions';
|
||||
|
||||
const AppVersionSchema = new Schema({
|
||||
tmbId: {
|
||||
type: String,
|
||||
ref: TeamMemberCollectionName,
|
||||
required: true
|
||||
},
|
||||
appId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: AppVersionCollectionName,
|
||||
@@ -26,16 +32,8 @@ const AppVersionSchema = new Schema({
|
||||
chatConfig: {
|
||||
type: chatConfigType
|
||||
},
|
||||
isPublish: {
|
||||
type: Boolean
|
||||
},
|
||||
versionName: {
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
tmbId: {
|
||||
type: String
|
||||
}
|
||||
isPublish: Boolean,
|
||||
versionName: String
|
||||
});
|
||||
|
||||
try {
|
||||
|
@@ -104,9 +104,6 @@ export const loadRequestMessages = async ({
|
||||
}) => {
|
||||
// Load image to base64
|
||||
const loadImageToBase64 = async (messages: ChatCompletionContentPart[]) => {
|
||||
if (process.env.MULTIPLE_DATA_TO_BASE64 === 'false') {
|
||||
return messages;
|
||||
}
|
||||
return Promise.all(
|
||||
messages.map(async (item) => {
|
||||
if (item.type === 'image_url') {
|
||||
@@ -125,7 +122,7 @@ export const loadRequestMessages = async ({
|
||||
|
||||
try {
|
||||
// If imgUrl is a local path, load image from local, and set url to base64
|
||||
if (imgUrl.startsWith('/')) {
|
||||
if (imgUrl.startsWith('/') || process.env.MULTIPLE_DATA_TO_BASE64 === 'true') {
|
||||
addLog.debug('Load image from local server', {
|
||||
baseUrl: serverRequestBaseUrl,
|
||||
requestUrl: imgUrl
|
||||
|
@@ -4,11 +4,7 @@ import {
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { MongoDatasetCollection } from './schema';
|
||||
import {
|
||||
CollectionWithDatasetType,
|
||||
DatasetCollectionSchemaType,
|
||||
DatasetSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetCollectionSchemaType, DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetTraining } from '../training/schema';
|
||||
import { MongoDatasetData } from '../data/schema';
|
||||
import { delImgByRelatedId } from '../../../common/file/image/controller';
|
||||
@@ -230,7 +226,7 @@ export const delCollectionRelatedSource = async ({
|
||||
collections,
|
||||
session
|
||||
}: {
|
||||
collections: (CollectionWithDatasetType | DatasetCollectionSchemaType)[];
|
||||
collections: DatasetCollectionSchemaType[];
|
||||
session: ClientSession;
|
||||
}) => {
|
||||
if (collections.length === 0) return;
|
||||
@@ -264,7 +260,7 @@ export async function delCollection({
|
||||
session,
|
||||
delRelatedSource
|
||||
}: {
|
||||
collections: (CollectionWithDatasetType | DatasetCollectionSchemaType)[];
|
||||
collections: DatasetCollectionSchemaType[];
|
||||
session: ClientSession;
|
||||
delRelatedSource: boolean;
|
||||
}) {
|
||||
@@ -274,16 +270,7 @@ export async function delCollection({
|
||||
|
||||
if (!teamId) return Promise.reject('teamId is not exist');
|
||||
|
||||
const datasetIds = Array.from(
|
||||
new Set(
|
||||
collections.map((item) => {
|
||||
if (typeof item.datasetId === 'string') {
|
||||
return String(item.datasetId);
|
||||
}
|
||||
return String(item.datasetId._id);
|
||||
})
|
||||
)
|
||||
);
|
||||
const datasetIds = Array.from(new Set(collections.map((item) => String(item.datasetId))));
|
||||
const collectionIds = collections.map((item) => String(item._id));
|
||||
|
||||
// delete training data
|
||||
@@ -324,7 +311,7 @@ export async function delOnlyCollection({
|
||||
collections,
|
||||
session
|
||||
}: {
|
||||
collections: (CollectionWithDatasetType | DatasetCollectionSchemaType)[];
|
||||
collections: DatasetCollectionSchemaType[];
|
||||
session: ClientSession;
|
||||
}) {
|
||||
if (collections.length === 0) return;
|
||||
@@ -333,16 +320,7 @@ export async function delOnlyCollection({
|
||||
|
||||
if (!teamId) return Promise.reject('teamId is not exist');
|
||||
|
||||
const datasetIds = Array.from(
|
||||
new Set(
|
||||
collections.map((item) => {
|
||||
if (typeof item.datasetId === 'string') {
|
||||
return String(item.datasetId);
|
||||
}
|
||||
return String(item.datasetId._id);
|
||||
})
|
||||
)
|
||||
);
|
||||
const datasetIds = Array.from(new Set(collections.map((item) => String(item.datasetId))));
|
||||
const collectionIds = collections.map((item) => String(item._id));
|
||||
|
||||
// delete training data
|
||||
|
@@ -100,6 +100,13 @@ const DatasetCollectionSchema = new Schema({
|
||||
}
|
||||
});
|
||||
|
||||
DatasetCollectionSchema.virtual('dataset', {
|
||||
ref: DatasetCollectionName,
|
||||
localField: 'datasetId',
|
||||
foreignField: '_id',
|
||||
justOne: true
|
||||
});
|
||||
|
||||
try {
|
||||
// auth file
|
||||
DatasetCollectionSchema.index({ teamId: 1, fileId: 1 });
|
||||
|
@@ -130,7 +130,7 @@ export const collectionTagsToTagLabel = async ({
|
||||
};
|
||||
|
||||
export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
const dataset = collection.datasetId;
|
||||
const dataset = collection.dataset;
|
||||
|
||||
if (
|
||||
collection.type !== DatasetCollectionTypeEnum.link &&
|
||||
@@ -183,7 +183,7 @@ export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
name: collection.name,
|
||||
datasetId: collection.datasetId._id,
|
||||
datasetId: collection.datasetId,
|
||||
parentId: collection.parentId,
|
||||
type: collection.type,
|
||||
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { CollectionWithDatasetType, DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetCollection } from './collection/schema';
|
||||
import { MongoDataset } from './schema';
|
||||
import { delCollectionRelatedSource } from './collection/controller';
|
||||
@@ -49,9 +49,9 @@ export async function findDatasetAndAllChildren({
|
||||
}
|
||||
|
||||
export async function getCollectionWithDataset(collectionId: string) {
|
||||
const data = (await MongoDatasetCollection.findById(collectionId)
|
||||
.populate('datasetId')
|
||||
.lean()) as CollectionWithDatasetType;
|
||||
const data = await MongoDatasetCollection.findById(collectionId)
|
||||
.populate<{ dataset: DatasetSchemaType }>('dataset')
|
||||
.lean();
|
||||
if (!data) {
|
||||
return Promise.reject('Collection is not exist');
|
||||
}
|
||||
|
@@ -77,21 +77,32 @@ const DatasetDataSchema = new Schema({
|
||||
rebuilding: Boolean
|
||||
});
|
||||
|
||||
// list collection and count data; list data; delete collection(relate data)
|
||||
DatasetDataSchema.index({
|
||||
teamId: 1,
|
||||
datasetId: 1,
|
||||
collectionId: 1,
|
||||
chunkIndex: 1,
|
||||
updateTime: -1
|
||||
DatasetDataSchema.virtual('collection', {
|
||||
ref: DatasetColCollectionName,
|
||||
localField: 'collectionId',
|
||||
foreignField: '_id',
|
||||
justOne: true
|
||||
});
|
||||
// full text index
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
|
||||
DatasetDataSchema.index({ updateTime: 1 });
|
||||
// rebuild data
|
||||
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
|
||||
|
||||
try {
|
||||
// list collection and count data; list data; delete collection(relate data)
|
||||
DatasetDataSchema.index({
|
||||
teamId: 1,
|
||||
datasetId: 1,
|
||||
collectionId: 1,
|
||||
chunkIndex: 1,
|
||||
updateTime: -1
|
||||
});
|
||||
// full text index
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
|
||||
DatasetDataSchema.index({ updateTime: 1 });
|
||||
// rebuild data
|
||||
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoDatasetData = getMongoModel<DatasetDataSchemaType>(
|
||||
DatasetDataCollectionName,
|
||||
|
@@ -8,8 +8,8 @@ import { getVectorsByText } from '../../ai/embedding';
|
||||
import { getVectorModel } from '../../ai/model';
|
||||
import { MongoDatasetData } from '../data/schema';
|
||||
import {
|
||||
DatasetCollectionSchemaType,
|
||||
DatasetDataSchemaType,
|
||||
DatasetDataWithCollectionType,
|
||||
SearchDataResponseItemType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetCollection } from '../collection/schema';
|
||||
@@ -267,7 +267,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
});
|
||||
|
||||
// get q and a
|
||||
const dataList = (await MongoDatasetData.find(
|
||||
const dataList = await MongoDatasetData.find(
|
||||
{
|
||||
teamId,
|
||||
datasetId: { $in: datasetIds },
|
||||
@@ -276,8 +276,11 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
},
|
||||
'datasetId collectionId updateTime q a chunkIndex indexes'
|
||||
)
|
||||
.populate('collectionId', 'name fileId rawLink externalFileId externalFileUrl')
|
||||
.lean()) as DatasetDataWithCollectionType[];
|
||||
.populate<{ collection: DatasetCollectionSchemaType }>(
|
||||
'collection',
|
||||
'name fileId rawLink externalFileId externalFileUrl'
|
||||
)
|
||||
.lean();
|
||||
|
||||
// add score to data(It's already sorted. The first one is the one with the most points)
|
||||
const concatResults = dataList.map((data) => {
|
||||
@@ -307,8 +310,8 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
a: data.a,
|
||||
chunkIndex: data.chunkIndex,
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId?._id),
|
||||
...getCollectionSourceData(data.collectionId),
|
||||
collectionId: String(data.collectionId),
|
||||
...getCollectionSourceData(data.collection),
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: data.score, index }]
|
||||
};
|
||||
|
||||
|
@@ -34,7 +34,7 @@ export const pushDataListToTrainingQueueByCollectionId = async ({
|
||||
session?: ClientSession;
|
||||
} & PushDatasetDataProps) => {
|
||||
const {
|
||||
datasetId: { _id: datasetId, agentModel, vectorModel }
|
||||
dataset: { _id: datasetId, agentModel, vectorModel }
|
||||
} = await getCollectionWithDataset(collectionId);
|
||||
return pushDataListToTrainingQueue({
|
||||
...props,
|
||||
|
@@ -35,7 +35,7 @@ type ActionProps = Props & { cqModel: LLMModelItemType };
|
||||
/* request openai chat */
|
||||
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
|
||||
const {
|
||||
user,
|
||||
externalProvider,
|
||||
node: { nodeId, name },
|
||||
histories,
|
||||
params: { model, history = 6, agents, userChatInput }
|
||||
@@ -69,7 +69,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
.filter((item) => item.key !== result.key)
|
||||
.map((item) => getHandleId(nodeId, 'source', item.key)),
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
query: userChatInput,
|
||||
tokens,
|
||||
@@ -80,7 +80,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
tokens
|
||||
}
|
||||
@@ -90,7 +90,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
|
||||
|
||||
const completions = async ({
|
||||
cqModel,
|
||||
user,
|
||||
externalProvider,
|
||||
histories,
|
||||
params: { agents, systemPrompt = '', userChatInput }
|
||||
}: ActionProps) => {
|
||||
@@ -131,7 +131,7 @@ const completions = async ({
|
||||
},
|
||||
cqModel
|
||||
),
|
||||
userKey: user.openaiAccount
|
||||
userKey: externalProvider.openaiAccount
|
||||
});
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
|
||||
|
@@ -46,7 +46,7 @@ const agentFunName = 'request_function';
|
||||
|
||||
export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const {
|
||||
user,
|
||||
externalProvider,
|
||||
node: { name },
|
||||
histories,
|
||||
params: { content, history = 6, model, description, extractKeys }
|
||||
@@ -123,7 +123,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[NodeOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
|
||||
...arg,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
query: content,
|
||||
tokens,
|
||||
@@ -134,7 +134,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
tokens
|
||||
}
|
||||
@@ -211,7 +211,7 @@ ${description ? `- ${description}` : ''}
|
||||
};
|
||||
|
||||
const toolChoice = async (props: ActionProps) => {
|
||||
const { user, extractModel } = props;
|
||||
const { externalProvider, extractModel } = props;
|
||||
|
||||
const { filterMessages, agentFunction } = await getFunctionCallSchema(props);
|
||||
|
||||
@@ -233,7 +233,7 @@ const toolChoice = async (props: ActionProps) => {
|
||||
},
|
||||
extractModel
|
||||
),
|
||||
userKey: user.openaiAccount
|
||||
userKey: externalProvider.openaiAccount
|
||||
});
|
||||
|
||||
const arg: Record<string, any> = (() => {
|
||||
@@ -263,7 +263,7 @@ const toolChoice = async (props: ActionProps) => {
|
||||
};
|
||||
|
||||
const functionCall = async (props: ActionProps) => {
|
||||
const { user, extractModel } = props;
|
||||
const { externalProvider, extractModel } = props;
|
||||
|
||||
const { agentFunction, filterMessages } = await getFunctionCallSchema(props);
|
||||
const functions: ChatCompletionCreateParams.Function[] = [agentFunction];
|
||||
@@ -281,7 +281,7 @@ const functionCall = async (props: ActionProps) => {
|
||||
},
|
||||
extractModel
|
||||
),
|
||||
userKey: user.openaiAccount
|
||||
userKey: externalProvider.openaiAccount
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -312,7 +312,7 @@ const functionCall = async (props: ActionProps) => {
|
||||
|
||||
const completions = async ({
|
||||
extractModel,
|
||||
user,
|
||||
externalProvider,
|
||||
histories,
|
||||
params: { content, extractKeys, description = 'No special requirements' }
|
||||
}: ActionProps) => {
|
||||
@@ -360,7 +360,7 @@ Human: ${content}`
|
||||
},
|
||||
extractModel
|
||||
),
|
||||
userKey: user.openaiAccount
|
||||
userKey: externalProvider.openaiAccount
|
||||
});
|
||||
const answer = data.choices?.[0].message?.content || '';
|
||||
|
||||
|
@@ -43,7 +43,7 @@ export const runToolWithFunctionCall = async (
|
||||
requestOrigin,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
user,
|
||||
externalProvider,
|
||||
stream,
|
||||
workflowStreamResponse,
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
@@ -221,7 +221,7 @@ export const runToolWithFunctionCall = async (
|
||||
getEmptyResponseTip
|
||||
} = await createChatCompletion({
|
||||
body: requestBody,
|
||||
userKey: user.openaiAccount,
|
||||
userKey: externalProvider.openaiAccount,
|
||||
options: {
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
|
@@ -46,7 +46,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
requestOrigin,
|
||||
chatConfig,
|
||||
runningAppInfo: { teamId },
|
||||
user,
|
||||
externalProvider,
|
||||
params: {
|
||||
model,
|
||||
systemPrompt,
|
||||
@@ -153,7 +153,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
})();
|
||||
|
||||
// censor model and system key
|
||||
if (toolModel.censor && !user.openaiAccount?.key) {
|
||||
if (toolModel.censor && !externalProvider.openaiAccount?.key) {
|
||||
await postTextCensor({
|
||||
text: `${systemPrompt}
|
||||
${userChatInput}
|
||||
@@ -228,7 +228,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
tokens: toolNodeTokens,
|
||||
modelType: ModelTypeEnum.llm
|
||||
});
|
||||
const toolAIUsage = user.openaiAccount?.key ? 0 : totalPoints;
|
||||
const toolAIUsage = externalProvider.openaiAccount?.key ? 0 : totalPoints;
|
||||
|
||||
// flat child tool response
|
||||
const childToolResponse = dispatchFlowResponse.map((item) => item.flowResponses).flat();
|
||||
|
@@ -51,7 +51,7 @@ export const runToolWithPromptCall = async (
|
||||
requestOrigin,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
user,
|
||||
externalProvider,
|
||||
stream,
|
||||
workflowStreamResponse,
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
@@ -230,7 +230,7 @@ export const runToolWithPromptCall = async (
|
||||
getEmptyResponseTip
|
||||
} = await createChatCompletion({
|
||||
body: requestBody,
|
||||
userKey: user.openaiAccount,
|
||||
userKey: externalProvider.openaiAccount,
|
||||
options: {
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
|
@@ -24,11 +24,9 @@ import { AIChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { formatToolResponse, initToolCallEdges, initToolNodes } from './utils';
|
||||
import { computedMaxToken, llmCompletionsBodyFormat } from '../../../../ai/utils';
|
||||
import { getNanoid, sliceStrStartEnd } from '@fastgpt/global/common/string/tools';
|
||||
import { addLog } from '../../../../../common/system/log';
|
||||
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
|
||||
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { i18nT } from '../../../../../../web/i18n/utils';
|
||||
|
||||
type ToolRunResponseType = {
|
||||
toolRunResponse: DispatchFlowResponse;
|
||||
@@ -92,7 +90,7 @@ export const runToolWithToolChoice = async (
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
stream,
|
||||
user,
|
||||
externalProvider,
|
||||
workflowStreamResponse,
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
} = workflowProps;
|
||||
@@ -278,7 +276,7 @@ export const runToolWithToolChoice = async (
|
||||
getEmptyResponseTip
|
||||
} = await createChatCompletion({
|
||||
body: requestBody,
|
||||
userKey: user.openaiAccount,
|
||||
userKey: externalProvider.openaiAccount,
|
||||
options: {
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
|
@@ -62,7 +62,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
res,
|
||||
requestOrigin,
|
||||
stream = false,
|
||||
user,
|
||||
externalProvider,
|
||||
histories,
|
||||
node: { name },
|
||||
query,
|
||||
@@ -134,7 +134,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
}),
|
||||
(() => {
|
||||
// censor model and system key
|
||||
if (modelConstantsData.censor && !user.openaiAccount?.key) {
|
||||
if (modelConstantsData.censor && !externalProvider.openaiAccount?.key) {
|
||||
return postTextCensor({
|
||||
text: `${systemPrompt}
|
||||
${userChatInput}
|
||||
@@ -170,7 +170,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
// console.log(JSON.stringify(requestBody, null, 2), '===');
|
||||
const { response, isStreamResponse, getEmptyResponseTip } = await createChatCompletion({
|
||||
body: requestBody,
|
||||
userKey: user.openaiAccount,
|
||||
userKey: externalProvider.openaiAccount,
|
||||
options: {
|
||||
headers: {
|
||||
Accept: 'application/json, text/plain, */*'
|
||||
@@ -230,7 +230,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
return {
|
||||
answerText,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
tokens,
|
||||
query: `${userChatInput}`,
|
||||
@@ -245,7 +245,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
moduleName: name,
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
|
||||
model: modelName,
|
||||
tokens
|
||||
}
|
||||
|
@@ -126,7 +126,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
runtimeEdges = [],
|
||||
histories = [],
|
||||
variables = {},
|
||||
user,
|
||||
timezone,
|
||||
externalProvider,
|
||||
stream = false,
|
||||
...props
|
||||
} = data;
|
||||
@@ -150,7 +151,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
[DispatchNodeResponseKeyEnum.runTimes]: 1,
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]: [],
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: null,
|
||||
newVariables: removeSystemVariable(variables)
|
||||
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -180,6 +181,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
|
||||
variables = {
|
||||
...getSystemVariable(data),
|
||||
...externalProvider.externalWorkflowVariables,
|
||||
...variables
|
||||
};
|
||||
|
||||
@@ -493,11 +495,11 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
}
|
||||
|
||||
// replace {{xx}} variables
|
||||
let value = replaceVariable(input.value, variables);
|
||||
// let value = replaceVariable(input.value, variables);
|
||||
|
||||
// replace {{$xx.xx$}} variables
|
||||
value = replaceEditorVariable({
|
||||
text: value,
|
||||
let value = replaceEditorVariable({
|
||||
text: input.value,
|
||||
nodes: runtimeNodes,
|
||||
variables
|
||||
});
|
||||
@@ -543,7 +545,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
res,
|
||||
variables,
|
||||
histories,
|
||||
user,
|
||||
timezone,
|
||||
externalProvider,
|
||||
stream,
|
||||
node,
|
||||
runtimeNodes,
|
||||
@@ -677,7 +680,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]:
|
||||
mergeAssistantResponseAnswerText(chatAssistantResponse),
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: toolRunResponse,
|
||||
newVariables: removeSystemVariable(variables)
|
||||
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables)
|
||||
};
|
||||
} catch (error) {
|
||||
return Promise.reject(error);
|
||||
@@ -686,7 +689,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
|
||||
/* get system variable */
|
||||
const getSystemVariable = ({
|
||||
user,
|
||||
timezone,
|
||||
runningAppInfo,
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
@@ -707,7 +710,7 @@ const getSystemVariable = ({
|
||||
chatId,
|
||||
responseChatItemId,
|
||||
histories,
|
||||
cTime: getSystemTime(user.timezone)
|
||||
cTime: getSystemTime(timezone)
|
||||
};
|
||||
};
|
||||
|
||||
|
@@ -22,7 +22,6 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
params,
|
||||
runtimeEdges,
|
||||
runtimeNodes,
|
||||
user,
|
||||
node: { name }
|
||||
} = props;
|
||||
const { loopInputArray = [], childrenNodeIdList = [] } = params;
|
||||
@@ -86,14 +85,14 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]: assistantResponses,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {
|
||||
totalPoints: totalPoints,
|
||||
totalPoints,
|
||||
loopInput: loopInputArray,
|
||||
loopResult: outputValueArr,
|
||||
loopDetail: loopDetail
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
|
||||
{
|
||||
totalPoints: user.openaiAccount?.key ? 0 : totalPoints,
|
||||
totalPoints,
|
||||
moduleName: name
|
||||
}
|
||||
],
|
||||
|
@@ -53,8 +53,8 @@ function checkCondition(condition: VariableConditionEnum, inputValue: any, value
|
||||
[VariableConditionEnum.isEmpty]: () => isEmpty(inputValue),
|
||||
[VariableConditionEnum.isNotEmpty]: () => !isEmpty(inputValue),
|
||||
|
||||
[VariableConditionEnum.equalTo]: () => String(inputValue) === value,
|
||||
[VariableConditionEnum.notEqual]: () => String(inputValue) !== value,
|
||||
[VariableConditionEnum.equalTo]: () => String(inputValue).trim() === value.trim(),
|
||||
[VariableConditionEnum.notEqual]: () => String(inputValue).trim() !== value.trim(),
|
||||
|
||||
// number
|
||||
[VariableConditionEnum.greaterThan]: () => Number(inputValue) > Number(value),
|
||||
@@ -67,8 +67,8 @@ function checkCondition(condition: VariableConditionEnum, inputValue: any, value
|
||||
[VariableConditionEnum.notInclude]: () => !isInclude(inputValue, value),
|
||||
|
||||
// string
|
||||
[VariableConditionEnum.startWith]: () => inputValue?.startsWith(value),
|
||||
[VariableConditionEnum.endWith]: () => inputValue?.endsWith(value),
|
||||
[VariableConditionEnum.startWith]: () => inputValue?.trim()?.startsWith(value),
|
||||
[VariableConditionEnum.endWith]: () => inputValue?.trim()?.endsWith(value),
|
||||
[VariableConditionEnum.reg]: () => {
|
||||
if (typeof inputValue !== 'string' || !value) return false;
|
||||
if (value.startsWith('/')) {
|
||||
@@ -79,7 +79,7 @@ function checkCondition(condition: VariableConditionEnum, inputValue: any, value
|
||||
}
|
||||
|
||||
const reg = new RegExp(value, 'g');
|
||||
const result = reg.test(inputValue);
|
||||
const result = reg.test(inputValue.trim());
|
||||
|
||||
return result;
|
||||
},
|
||||
|
@@ -19,7 +19,7 @@ type Props = ModuleDispatchProps<{
|
||||
type Response = DispatchNodeResultType<{}>;
|
||||
|
||||
export const dispatchUpdateVariable = async (props: Props): Promise<Response> => {
|
||||
const { params, variables, runtimeNodes, workflowStreamResponse, node } = props;
|
||||
const { params, variables, runtimeNodes, workflowStreamResponse, externalProvider } = props;
|
||||
|
||||
const { updateList } = params;
|
||||
const nodeIds = runtimeNodes.map((node) => node.nodeId);
|
||||
@@ -41,15 +41,15 @@ export const dispatchUpdateVariable = async (props: Props): Promise<Response> =>
|
||||
const value = (() => {
|
||||
// If first item is empty, it means it is a input value
|
||||
if (!item.value?.[0]) {
|
||||
const formatValue = valueTypeFormat(item.value?.[1], item.valueType);
|
||||
|
||||
return typeof formatValue === 'string'
|
||||
? replaceEditorVariable({
|
||||
text: formatValue,
|
||||
nodes: runtimeNodes,
|
||||
variables
|
||||
})
|
||||
: formatValue;
|
||||
const val =
|
||||
typeof item.value?.[1] === 'string'
|
||||
? replaceEditorVariable({
|
||||
text: item.value?.[1],
|
||||
nodes: runtimeNodes,
|
||||
variables
|
||||
})
|
||||
: item.value?.[1];
|
||||
return valueTypeFormat(val, item.valueType);
|
||||
} else {
|
||||
return getReferenceVariableValue({
|
||||
value: item.value,
|
||||
@@ -80,7 +80,7 @@ export const dispatchUpdateVariable = async (props: Props): Promise<Response> =>
|
||||
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.updateVariables,
|
||||
data: removeSystemVariable(variables)
|
||||
data: removeSystemVariable(variables, externalProvider.externalWorkflowVariables)
|
||||
});
|
||||
|
||||
return {
|
||||
|
@@ -14,6 +14,7 @@ import { NextApiResponse } from 'next';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import json5 from 'json5';
|
||||
|
||||
export const getWorkflowResponseWrite = ({
|
||||
res,
|
||||
@@ -116,11 +117,18 @@ export const valueTypeFormat = (value: any, type?: WorkflowIOValueTypeEnum) => {
|
||||
return Boolean(value);
|
||||
}
|
||||
try {
|
||||
if (type === WorkflowIOValueTypeEnum.datasetQuote && !Array.isArray(value)) {
|
||||
return JSON.parse(value);
|
||||
}
|
||||
if (type === WorkflowIOValueTypeEnum.selectDataset && !Array.isArray(value)) {
|
||||
return JSON.parse(value);
|
||||
if (
|
||||
type &&
|
||||
[
|
||||
WorkflowIOValueTypeEnum.object,
|
||||
WorkflowIOValueTypeEnum.chatHistory,
|
||||
WorkflowIOValueTypeEnum.datasetQuote,
|
||||
WorkflowIOValueTypeEnum.selectApp,
|
||||
WorkflowIOValueTypeEnum.selectDataset
|
||||
].includes(type) &&
|
||||
typeof value !== 'object'
|
||||
) {
|
||||
return json5.parse(value);
|
||||
}
|
||||
} catch (error) {
|
||||
return value;
|
||||
@@ -141,14 +149,23 @@ export const checkQuoteQAValue = (quoteQA?: SearchDataResponseItemType[]) => {
|
||||
};
|
||||
|
||||
/* remove system variable */
|
||||
export const removeSystemVariable = (variables: Record<string, any>) => {
|
||||
export const removeSystemVariable = (
|
||||
variables: Record<string, any>,
|
||||
removeObj: Record<string, string> = {}
|
||||
) => {
|
||||
const copyVariables = { ...variables };
|
||||
delete copyVariables.userId;
|
||||
delete copyVariables.appId;
|
||||
delete copyVariables.chatId;
|
||||
delete copyVariables.responseChatItemId;
|
||||
delete copyVariables.histories;
|
||||
delete copyVariables.cTime;
|
||||
|
||||
// delete external provider workflow variables
|
||||
Object.keys(removeObj).forEach((key) => {
|
||||
delete copyVariables[key];
|
||||
});
|
||||
|
||||
return copyVariables;
|
||||
};
|
||||
export const filterSystemVariables = (variables: Record<string, any>): SystemVariablesType => {
|
||||
|
Reference in New Issue
Block a user