mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
V4.9.1 feature (#4206)
* fix: remove DefaultTeam (#4037) * fix :Get application bound knowledge base information logical rewrite (#4057) * fix :Get application bound knowledge base information logical rewrite * fix :Get application bound knowledge base information logical rewrite * fix :Get application bound knowledge base information logical rewrite * fix :Get application bound knowledge base information logical rewrite * update package * fix: import dataset step error;perf: ai proxy avatar (#4074) * perf: pg config params * perf: ai proxy avatar * fix: import dataset step error * feat: data input ux * perf: app dataset rewite * fix: 文本提取不支持arrayString,arrayNumber等jsonSchema (#4079) * update doc ;perf: model test (#4098) * perf: extract array * update doc * perf: model test * perf: model test * perf: think tag parse (#4102) * chat quote reader (#3912) * init chat quote full text reader * linked structure * dataset data linked * optimize code * fix ts build * test finish * delete log * fix * fix ts * fix ts * remove nextId * initial scroll * fix * fix * perf: chunk read (#4109) * package * perf: chunk read * feat: api dataset support pdf parse;fix: chunk reader auth (#4117) * feat: api dataset support pdf parse * fix: chunk reader auth * feat: invitation link (#3979) * feat: invitation link schema and apis * feat: add invitation link * feat: member status: active, leave, forbidden * fix: expires show hours and minutes * feat: invalid invitation link hint * fix: typo * chore: fix typo & i18n * fix * pref: fe * feat: add ttl index for 30-day-clean-up * perf: invite member code (#4118) * perf: invite member code * fix: ts * fix: model test channel id;fix: quote reader (#4123) * fix: model test channel id * fix: quote reader * fix chat quote reader (#4125) * perf: model test;perf: sidebar trigger (#4127) * fix: import dataset step error;perf: ai proxy avatar (#4074) * perf: pg config params * perf: ai proxy avatar * fix: import dataset step error * feat: data input ux * perf: app dataset rewite * perf: model test * perf: sidebar trigger * lock * update nanoid version * fix: select component ux * fix: ts * fix: vitest * remove test * fix: prompt toolcall ui (#4139) * load log error adapt * fix: prompt toolcall ui * perf: commercial function tip * update package * pref: copy link (#4147) * fix(i18n): namespace (#4143) * hiden dataset source (#4152) * hiden dataset source * perf: reader * chore: move all tests into a single folder (#4160) * fix modal close scroll (#4162) * fix modal close scroll * update refresh * feat: rerank modal select and weight (#4164) * fix loadInitData refresh (#4169) * fix * fix * form input number default & api dataset max token * feat: mix search weight (#4170) * feat: mix search weight * feat: svg render * fix: avatar error remove (#4173) * fix: avatar error remove * fix: index * fix: guide * fix: auth * update package;fix: input data model ui (#4181) * update package * fix: ts * update config * update jieba package * add type sign * fix: input data ui * fix: page title refresh (#4186) * fix: ts * update jieba package * fix: page title refresh * fix: remove member length check when opening invite create modal (#4193) * add env to check internal ip (#4187) * fix: ts * update jieba package * add env to check internal ip * package * fix: jieba * reset package * update config * fix: jieba package * init shell * init version * change team reload * update jieba package (#4200) * update jieba package * package * update package * remove invalid code * action * package (#4201) * package * update package * remove invalid code * package * remove i18n tip (#4202) * doc (#4205) * fix: i18n (#4208) * fix: next config (#4207) * reset package * i18n * update config * i18n * remove log --------- Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com> Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com> Co-authored-by: shilin <39396378+shilin66@users.noreply.github.com> Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
@@ -76,7 +76,7 @@ export const refreshSourceAvatar = async (
|
||||
const newId = getIdFromPath(path);
|
||||
const oldId = getIdFromPath(oldPath);
|
||||
|
||||
if (!newId) return;
|
||||
if (!newId || newId === oldId) return;
|
||||
|
||||
await MongoImage.updateOne({ _id: newId }, { $unset: { expiredTime: 1 } }, { session });
|
||||
|
||||
|
@@ -300,6 +300,9 @@ export const readRawContentByFileBuffer = async ({
|
||||
return systemParse();
|
||||
};
|
||||
|
||||
const start = Date.now();
|
||||
addLog.debug(`Start parse file`, { extension });
|
||||
|
||||
let { rawText, formatText, imageList } = await (async () => {
|
||||
if (extension === 'pdf') {
|
||||
return await pdfParseFn();
|
||||
@@ -307,6 +310,8 @@ export const readRawContentByFileBuffer = async ({
|
||||
return await systemParse();
|
||||
})();
|
||||
|
||||
addLog.debug(`Parse file success, time: ${Date.now() - start}ms. Uploading file image.`);
|
||||
|
||||
// markdown data format
|
||||
if (imageList) {
|
||||
await batchRun(imageList, async (item) => {
|
||||
@@ -341,5 +346,7 @@ export const readRawContentByFileBuffer = async ({
|
||||
}
|
||||
}
|
||||
|
||||
addLog.debug(`Upload file image success, time: ${Date.now() - start}ms`);
|
||||
|
||||
return { rawText, formatText, imageList };
|
||||
};
|
||||
|
3
packages/service/common/string/jieba/dict.json
Normal file
3
packages/service/common/string/jieba/dict.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,4 +1,13 @@
|
||||
import { cut } from '@node-rs/jieba';
|
||||
import { Jieba } from '@node-rs/jieba';
|
||||
|
||||
let jieba: Jieba | undefined;
|
||||
|
||||
(async () => {
|
||||
const dictData = await import('./dict.json');
|
||||
// @ts-ignore
|
||||
const dictBuffer = Buffer.from(dictData.dict?.replace(/\\n/g, '\n'), 'utf-8');
|
||||
jieba = Jieba.withDict(dictBuffer);
|
||||
})();
|
||||
|
||||
const stopWords = new Set([
|
||||
'--',
|
||||
@@ -1509,8 +1518,10 @@ const stopWords = new Set([
|
||||
]
|
||||
]);
|
||||
|
||||
export function jiebaSplit({ text }: { text: string }) {
|
||||
const tokens = cut(text, true);
|
||||
export async function jiebaSplit({ text }: { text: string }) {
|
||||
text = text.replace(/[#*`_~>[\](){}|]/g, '').replace(/\S*https?\S*/gi, '');
|
||||
|
||||
const tokens = (await jieba!.cutAsync(text, true)) as string[];
|
||||
|
||||
return (
|
||||
tokens
|
@@ -30,6 +30,8 @@ export const isInternalAddress = (url: string): boolean => {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (process.env.CHECK_INTERNAL_IP !== 'true') return false;
|
||||
|
||||
// For IP addresses, check if they are internal
|
||||
const ipv4Pattern = /^(\d{1,3}\.){3}\d{1,3}$/;
|
||||
if (!ipv4Pattern.test(hostname)) {
|
||||
|
@@ -38,6 +38,27 @@ export class PgVectorCtrl {
|
||||
await PgClient.query(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS create_time_index ON ${DatasetVectorTableName} USING btree(createtime);`
|
||||
);
|
||||
// 10w rows
|
||||
// await PgClient.query(`
|
||||
// ALTER TABLE modeldata SET (
|
||||
// autovacuum_vacuum_scale_factor = 0.1,
|
||||
// autovacuum_analyze_scale_factor = 0.05,
|
||||
// autovacuum_vacuum_threshold = 50,
|
||||
// autovacuum_analyze_threshold = 50,
|
||||
// autovacuum_vacuum_cost_delay = 20,
|
||||
// autovacuum_vacuum_cost_limit = 200
|
||||
// );`);
|
||||
|
||||
// 100w rows
|
||||
// await PgClient.query(`
|
||||
// ALTER TABLE modeldata SET (
|
||||
// autovacuum_vacuum_scale_factor = 0.01,
|
||||
// autovacuum_analyze_scale_factor = 0.02,
|
||||
// autovacuum_vacuum_threshold = 1000,
|
||||
// autovacuum_analyze_threshold = 1000,
|
||||
// autovacuum_vacuum_cost_delay = 10,
|
||||
// autovacuum_vacuum_cost_limit = 2000
|
||||
// );`)
|
||||
|
||||
addLog.info('init pg successful');
|
||||
} catch (error) {
|
||||
|
@@ -6,10 +6,12 @@ import { getSTTModel } from '../model';
|
||||
|
||||
export const aiTranscriptions = async ({
|
||||
model,
|
||||
fileStream
|
||||
fileStream,
|
||||
headers
|
||||
}: {
|
||||
model: string;
|
||||
fileStream: fs.ReadStream;
|
||||
headers?: Record<string, string>;
|
||||
}) => {
|
||||
const data = new FormData();
|
||||
data.append('model', model);
|
||||
@@ -30,7 +32,8 @@ export const aiTranscriptions = async ({
|
||||
Authorization: modelData.requestAuth
|
||||
? `Bearer ${modelData.requestAuth}`
|
||||
: aiAxiosConfig.authorization,
|
||||
...data.getHeaders()
|
||||
...data.getHeaders(),
|
||||
...headers
|
||||
},
|
||||
data: data
|
||||
});
|
||||
|
@@ -76,6 +76,10 @@ export const createChatCompletion = async ({
|
||||
timeout: formatTimeout
|
||||
});
|
||||
|
||||
addLog.debug(`Start create chat completion`, {
|
||||
model: body.model
|
||||
});
|
||||
|
||||
const response = await ai.chat.completions.create(body, {
|
||||
...options,
|
||||
...(modelConstantsData.requestUrl ? { path: modelConstantsData.requestUrl } : {}),
|
||||
|
@@ -76,7 +76,7 @@
|
||||
{
|
||||
"model": "qwen-max",
|
||||
"name": "Qwen-max",
|
||||
"maxContext": 8000,
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 4000,
|
||||
"quoteMaxToken": 6000,
|
||||
"maxTemperature": 1,
|
||||
|
@@ -8,7 +8,7 @@ import {
|
||||
EmbeddingModelItemType,
|
||||
TTSModelType,
|
||||
STTModelType,
|
||||
ReRankModelItemType
|
||||
RerankModelItemType
|
||||
} from '@fastgpt/global/core/ai/model.d';
|
||||
import { debounce } from 'lodash';
|
||||
import {
|
||||
@@ -94,7 +94,7 @@ export const loadSystemModels = async (init = false) => {
|
||||
global.embeddingModelMap = new Map<string, EmbeddingModelItemType>();
|
||||
global.ttsModelMap = new Map<string, TTSModelType>();
|
||||
global.sttModelMap = new Map<string, STTModelType>();
|
||||
global.reRankModelMap = new Map<string, ReRankModelItemType>();
|
||||
global.reRankModelMap = new Map<string, RerankModelItemType>();
|
||||
// @ts-ignore
|
||||
global.systemDefaultModel = {};
|
||||
|
||||
|
@@ -8,10 +8,11 @@ type GetVectorProps = {
|
||||
model: EmbeddingModelItemType;
|
||||
input: string;
|
||||
type?: `${EmbeddingTypeEnm}`;
|
||||
headers?: Record<string, string>;
|
||||
};
|
||||
|
||||
// text to vector
|
||||
export async function getVectorsByText({ model, input, type }: GetVectorProps) {
|
||||
export async function getVectorsByText({ model, input, type, headers }: GetVectorProps) {
|
||||
if (!input) {
|
||||
return Promise.reject({
|
||||
code: 500,
|
||||
@@ -35,13 +36,12 @@ export async function getVectorsByText({ model, input, type }: GetVectorProps) {
|
||||
model.requestUrl
|
||||
? {
|
||||
path: model.requestUrl,
|
||||
headers: model.requestAuth
|
||||
? {
|
||||
Authorization: `Bearer ${model.requestAuth}`
|
||||
}
|
||||
: undefined
|
||||
headers: {
|
||||
...(model.requestAuth ? { Authorization: `Bearer ${model.requestAuth}` } : {}),
|
||||
...headers
|
||||
}
|
||||
}
|
||||
: {}
|
||||
: { headers }
|
||||
)
|
||||
.then(async (res) => {
|
||||
if (!res.data) {
|
||||
|
@@ -38,7 +38,7 @@ export function getSTTModel(model?: string) {
|
||||
}
|
||||
|
||||
export const getDefaultRerankModel = () => global?.systemDefaultModel.rerank!;
|
||||
export function getReRankModel(model?: string) {
|
||||
export function getRerankModel(model?: string) {
|
||||
if (!model) return getDefaultRerankModel();
|
||||
return global.reRankModelMap.get(model) || getDefaultRerankModel();
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ import { addLog } from '../../../common/system/log';
|
||||
import { POST } from '../../../common/api/serverRequest';
|
||||
import { getDefaultRerankModel } from '../model';
|
||||
import { getAxiosConfig } from '../config';
|
||||
import { ReRankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { RerankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
|
||||
type PostReRankResponse = {
|
||||
id: string;
|
||||
@@ -16,11 +16,13 @@ type ReRankCallResult = { id: string; score?: number }[];
|
||||
export function reRankRecall({
|
||||
model = getDefaultRerankModel(),
|
||||
query,
|
||||
documents
|
||||
documents,
|
||||
headers
|
||||
}: {
|
||||
model?: ReRankModelItemType;
|
||||
model?: RerankModelItemType;
|
||||
query: string;
|
||||
documents: { id: string; text: string }[];
|
||||
headers?: Record<string, string>;
|
||||
}): Promise<ReRankCallResult> {
|
||||
if (!model) {
|
||||
return Promise.reject('no rerank model');
|
||||
@@ -41,7 +43,8 @@ export function reRankRecall({
|
||||
},
|
||||
{
|
||||
headers: {
|
||||
Authorization: model.requestAuth ? `Bearer ${model.requestAuth}` : authorization
|
||||
Authorization: model.requestAuth ? `Bearer ${model.requestAuth}` : authorization,
|
||||
...headers
|
||||
},
|
||||
timeout: 30000
|
||||
}
|
||||
|
8
packages/service/core/ai/type.d.ts
vendored
8
packages/service/core/ai/type.d.ts
vendored
@@ -1,7 +1,7 @@
|
||||
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
import {
|
||||
STTModelType,
|
||||
ReRankModelItemType,
|
||||
RerankModelItemType,
|
||||
TTSModelType,
|
||||
EmbeddingModelItemType,
|
||||
LLMModelItemType
|
||||
@@ -18,7 +18,7 @@ export type SystemModelItemType =
|
||||
| EmbeddingModelItemType
|
||||
| TTSModelType
|
||||
| STTModelType
|
||||
| ReRankModelItemType;
|
||||
| RerankModelItemType;
|
||||
|
||||
export type SystemDefaultModelType = {
|
||||
[ModelTypeEnum.llm]?: LLMModelItemType;
|
||||
@@ -28,7 +28,7 @@ export type SystemDefaultModelType = {
|
||||
[ModelTypeEnum.embedding]?: EmbeddingModelItemType;
|
||||
[ModelTypeEnum.tts]?: TTSModelType;
|
||||
[ModelTypeEnum.stt]?: STTModelType;
|
||||
[ModelTypeEnum.rerank]?: ReRankModelItemType;
|
||||
[ModelTypeEnum.rerank]?: RerankModelItemType;
|
||||
};
|
||||
|
||||
declare global {
|
||||
@@ -38,7 +38,7 @@ declare global {
|
||||
var embeddingModelMap: Map<string, EmbeddingModelItemType>;
|
||||
var ttsModelMap: Map<string, TTSModelType>;
|
||||
var sttModelMap: Map<string, STTModelType>;
|
||||
var reRankModelMap: Map<string, ReRankModelItemType>;
|
||||
var reRankModelMap: Map<string, RerankModelItemType>;
|
||||
|
||||
var systemActiveModelList: SystemModelItemType[];
|
||||
var systemDefaultModel: SystemDefaultModelType;
|
||||
|
@@ -132,7 +132,7 @@ export const parseReasoningStreamContent = () => {
|
||||
let endTagBuffer = '';
|
||||
|
||||
/*
|
||||
parseReasoning - 只控制是否主动解析 <think></think>,如果接口已经解析了,仍然会返回 think 内容。
|
||||
parseThinkTag - 只控制是否主动解析 <think></think>,如果接口已经解析了,则不再解析。
|
||||
*/
|
||||
const parsePart = (
|
||||
part: {
|
||||
@@ -143,13 +143,13 @@ export const parseReasoningStreamContent = () => {
|
||||
};
|
||||
}[];
|
||||
},
|
||||
parseReasoning = false
|
||||
parseThinkTag = false
|
||||
): [string, string] => {
|
||||
const content = part.choices?.[0]?.delta?.content || '';
|
||||
|
||||
// @ts-ignore
|
||||
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
|
||||
if (reasoningContent || !parseReasoning) {
|
||||
if (reasoningContent || !parseThinkTag) {
|
||||
isInThinkTag = false;
|
||||
return [reasoningContent, content];
|
||||
}
|
||||
|
149
packages/service/core/app/utils.ts
Normal file
149
packages/service/core/app/utils.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { MongoDataset } from '../dataset/schema';
|
||||
import { getEmbeddingModel } from '../ai/model';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
|
||||
|
||||
export async function listAppDatasetDataByTeamIdAndDatasetIds({
|
||||
teamId,
|
||||
datasetIdList
|
||||
}: {
|
||||
teamId?: string;
|
||||
datasetIdList: string[];
|
||||
}) {
|
||||
const myDatasets = await MongoDataset.find({
|
||||
_id: { $in: datasetIdList },
|
||||
...(teamId && { teamId })
|
||||
}).lean();
|
||||
|
||||
return myDatasets.map((item) => ({
|
||||
datasetId: String(item._id),
|
||||
avatar: item.avatar,
|
||||
name: item.name,
|
||||
vectorModel: getEmbeddingModel(item.vectorModel)
|
||||
}));
|
||||
}
|
||||
|
||||
export async function rewriteAppWorkflowToDetail({
|
||||
nodes,
|
||||
teamId,
|
||||
isRoot
|
||||
}: {
|
||||
nodes: StoreNodeItemType[];
|
||||
teamId: string;
|
||||
isRoot: boolean;
|
||||
}) {
|
||||
const datasetIdSet = new Set<string>();
|
||||
|
||||
// Get all dataset ids from nodes
|
||||
nodes.forEach((node) => {
|
||||
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
|
||||
|
||||
const input = node.inputs.find((item) => item.key === NodeInputKeyEnum.datasetSelectList);
|
||||
if (!input) return;
|
||||
|
||||
const rawValue = input.value as undefined | { datasetId: string }[] | { datasetId: string };
|
||||
if (!rawValue) return;
|
||||
|
||||
const datasetIds = Array.isArray(rawValue)
|
||||
? rawValue.map((v) => v?.datasetId).filter((id) => !!id && typeof id === 'string')
|
||||
: rawValue?.datasetId
|
||||
? [String(rawValue.datasetId)]
|
||||
: [];
|
||||
|
||||
datasetIds.forEach((id) => datasetIdSet.add(id));
|
||||
});
|
||||
|
||||
if (datasetIdSet.size === 0) return;
|
||||
|
||||
// Load dataset list
|
||||
const datasetList = await listAppDatasetDataByTeamIdAndDatasetIds({
|
||||
teamId: isRoot ? undefined : teamId,
|
||||
datasetIdList: Array.from(datasetIdSet)
|
||||
});
|
||||
const datasetMap = new Map(datasetList.map((ds) => [String(ds.datasetId), ds]));
|
||||
|
||||
// Rewrite dataset ids, add dataset info to nodes
|
||||
if (datasetList.length > 0) {
|
||||
nodes.forEach((node) => {
|
||||
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
|
||||
|
||||
node.inputs.forEach((item) => {
|
||||
if (item.key !== NodeInputKeyEnum.datasetSelectList) return;
|
||||
|
||||
const val = item.value as undefined | { datasetId: string }[] | { datasetId: string };
|
||||
|
||||
if (Array.isArray(val)) {
|
||||
item.value = val
|
||||
.map((v) => {
|
||||
const data = datasetMap.get(String(v.datasetId));
|
||||
if (!data)
|
||||
return {
|
||||
datasetId: v.datasetId,
|
||||
avatar: '',
|
||||
name: 'Dataset not found',
|
||||
vectorModel: ''
|
||||
};
|
||||
return {
|
||||
datasetId: data.datasetId,
|
||||
avatar: data.avatar,
|
||||
name: data.name,
|
||||
vectorModel: data.vectorModel
|
||||
};
|
||||
})
|
||||
.filter(Boolean);
|
||||
} else if (typeof val === 'object' && val !== null) {
|
||||
const data = datasetMap.get(String(val.datasetId));
|
||||
if (!data) {
|
||||
item.value = [
|
||||
{
|
||||
datasetId: val.datasetId,
|
||||
avatar: '',
|
||||
name: 'Dataset not found',
|
||||
vectorModel: ''
|
||||
}
|
||||
];
|
||||
} else {
|
||||
item.value = [
|
||||
{
|
||||
datasetId: data.datasetId,
|
||||
avatar: data.avatar,
|
||||
name: data.name,
|
||||
vectorModel: data.vectorModel
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
|
||||
export async function rewriteAppWorkflowToSimple(formatNodes: StoreNodeItemType[]) {
|
||||
formatNodes.forEach((node) => {
|
||||
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
|
||||
|
||||
node.inputs.forEach((input) => {
|
||||
if (input.key === NodeInputKeyEnum.datasetSelectList) {
|
||||
const val = input.value as undefined | { datasetId: string }[] | { datasetId: string };
|
||||
if (!val) {
|
||||
input.value = [];
|
||||
} else if (Array.isArray(val)) {
|
||||
input.value = val
|
||||
.map((dataset: { datasetId: string }) => ({
|
||||
datasetId: dataset.datasetId
|
||||
}))
|
||||
.filter((item) => !!item.datasetId);
|
||||
} else if (typeof val === 'object' && val !== null) {
|
||||
input.value = [
|
||||
{
|
||||
datasetId: val.datasetId
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
@@ -15,6 +15,7 @@ import { AppChatConfigType } from '@fastgpt/global/core/app/type';
|
||||
import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
import { pushChatLog } from './pushChatLog';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
|
||||
type Props = {
|
||||
chatId: string;
|
||||
@@ -73,9 +74,44 @@ export async function saveChat({
|
||||
(node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput
|
||||
)?.inputs;
|
||||
|
||||
// Format save chat content: Remove quote q/a
|
||||
const processedContent = content.map((item) => {
|
||||
if (item.obj === ChatRoleEnum.AI) {
|
||||
const nodeResponse = item[DispatchNodeResponseKeyEnum.nodeResponse];
|
||||
|
||||
if (nodeResponse) {
|
||||
return {
|
||||
...item,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: nodeResponse.map((responseItem) => {
|
||||
if (
|
||||
responseItem.moduleType === FlowNodeTypeEnum.datasetSearchNode &&
|
||||
responseItem.quoteList
|
||||
) {
|
||||
return {
|
||||
...responseItem,
|
||||
quoteList: responseItem.quoteList.map((quote: any) => ({
|
||||
id: quote.id,
|
||||
chunkIndex: quote.chunkIndex,
|
||||
datasetId: quote.datasetId,
|
||||
collectionId: quote.collectionId,
|
||||
sourceId: quote.sourceId,
|
||||
sourceName: quote.sourceName,
|
||||
score: quote.score,
|
||||
tokens: quote.tokens
|
||||
}))
|
||||
};
|
||||
}
|
||||
return responseItem;
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
return item;
|
||||
});
|
||||
|
||||
await mongoSessionRun(async (session) => {
|
||||
const [{ _id: chatItemIdHuman }, { _id: chatItemIdAi }] = await MongoChatItem.insertMany(
|
||||
content.map((item) => ({
|
||||
processedContent.map((item) => ({
|
||||
chatId,
|
||||
teamId,
|
||||
tmbId,
|
||||
|
@@ -111,11 +111,13 @@ export const useApiDatasetRequest = ({ apiServer }: { apiServer: APIFileServer }
|
||||
const getFileContent = async ({
|
||||
teamId,
|
||||
tmbId,
|
||||
apiFileId
|
||||
apiFileId,
|
||||
customPdfParse
|
||||
}: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
apiFileId: string;
|
||||
customPdfParse?: boolean;
|
||||
}) => {
|
||||
const data = await request<APIFileContentResponse>(
|
||||
`/v1/file/content`,
|
||||
@@ -133,7 +135,8 @@ export const useApiDatasetRequest = ({ apiServer }: { apiServer: APIFileServer }
|
||||
teamId,
|
||||
tmbId,
|
||||
url: previewUrl,
|
||||
relatedId: apiFileId
|
||||
relatedId: apiFileId,
|
||||
customPdfParse
|
||||
});
|
||||
return rawText;
|
||||
}
|
||||
|
@@ -41,7 +41,7 @@ try {
|
||||
}
|
||||
);
|
||||
DatasetDataTextSchema.index({ teamId: 1, datasetId: 1, collectionId: 1 });
|
||||
DatasetDataTextSchema.index({ dataId: 1 }, { unique: true });
|
||||
DatasetDataTextSchema.index({ dataId: 'hashed' });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -40,6 +40,15 @@ const DatasetDataSchema = new Schema({
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
history: {
|
||||
type: [
|
||||
{
|
||||
q: String,
|
||||
a: String,
|
||||
updateTime: Date
|
||||
}
|
||||
]
|
||||
},
|
||||
indexes: {
|
||||
type: [
|
||||
{
|
||||
@@ -77,7 +86,8 @@ const DatasetDataSchema = new Schema({
|
||||
|
||||
// Abandon
|
||||
fullTextToken: String,
|
||||
initFullText: Boolean
|
||||
initFullText: Boolean,
|
||||
initJieba: Boolean
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -89,15 +99,14 @@ try {
|
||||
chunkIndex: 1,
|
||||
updateTime: -1
|
||||
});
|
||||
// FullText tmp full text index
|
||||
// DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
|
||||
DatasetDataSchema.index({ updateTime: 1 });
|
||||
// rebuild data
|
||||
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
|
||||
|
||||
DatasetDataSchema.index({ initFullText: 1 });
|
||||
// 为查询 initJieba 字段不存在的数据添加索引
|
||||
DatasetDataSchema.index({ initJieba: 1, updateTime: 1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -127,7 +127,8 @@ export const readApiServerFileContent = async ({
|
||||
yuqueServer,
|
||||
apiFileId,
|
||||
teamId,
|
||||
tmbId
|
||||
tmbId,
|
||||
customPdfParse
|
||||
}: {
|
||||
apiServer?: APIFileServer;
|
||||
feishuServer?: FeishuServer;
|
||||
@@ -135,9 +136,15 @@ export const readApiServerFileContent = async ({
|
||||
apiFileId: string;
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
customPdfParse?: boolean;
|
||||
}) => {
|
||||
if (apiServer) {
|
||||
return useApiDatasetRequest({ apiServer }).getFileContent({ teamId, tmbId, apiFileId });
|
||||
return useApiDatasetRequest({ apiServer }).getFileContent({
|
||||
teamId,
|
||||
tmbId,
|
||||
apiFileId,
|
||||
customPdfParse
|
||||
});
|
||||
}
|
||||
|
||||
if (feishuServer || yuqueServer) {
|
||||
|
@@ -16,7 +16,7 @@ import { reRankRecall } from '../../../core/ai/rerank';
|
||||
import { countPromptTokens } from '../../../common/string/tiktoken/index';
|
||||
import { datasetSearchResultConcat } from '@fastgpt/global/core/dataset/search/utils';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
import { jiebaSplit } from '../../../common/string/jieba';
|
||||
import { jiebaSplit } from '../../../common/string/jieba/index';
|
||||
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
|
||||
import { Types } from '../../../common/mongo';
|
||||
import json5 from 'json5';
|
||||
@@ -27,6 +27,7 @@ import { ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { POST } from '../../../common/api/plusRequest';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { datasetSearchQueryExtension } from './utils';
|
||||
import type { RerankModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
|
||||
export type SearchDatasetDataProps = {
|
||||
histories: ChatItemType[];
|
||||
@@ -39,7 +40,11 @@ export type SearchDatasetDataProps = {
|
||||
[NodeInputKeyEnum.datasetSimilarity]?: number; // min distance
|
||||
[NodeInputKeyEnum.datasetMaxTokens]: number; // max Token limit
|
||||
[NodeInputKeyEnum.datasetSearchMode]?: `${DatasetSearchModeEnum}`;
|
||||
[NodeInputKeyEnum.datasetSearchEmbeddingWeight]?: number;
|
||||
|
||||
[NodeInputKeyEnum.datasetSearchUsingReRank]?: boolean;
|
||||
[NodeInputKeyEnum.datasetSearchRerankModel]?: RerankModelItemType;
|
||||
[NodeInputKeyEnum.datasetSearchRerankWeight]?: number;
|
||||
|
||||
/*
|
||||
{
|
||||
@@ -75,13 +80,16 @@ export type SearchDatasetDataResponse = {
|
||||
};
|
||||
|
||||
export const datasetDataReRank = async ({
|
||||
rerankModel,
|
||||
data,
|
||||
query
|
||||
}: {
|
||||
rerankModel?: RerankModelItemType;
|
||||
data: SearchDataResponseItemType[];
|
||||
query: string;
|
||||
}): Promise<SearchDataResponseItemType[]> => {
|
||||
const results = await reRankRecall({
|
||||
model: rerankModel,
|
||||
query,
|
||||
documents: data.map((item) => ({
|
||||
id: item.id,
|
||||
@@ -154,7 +162,10 @@ export async function searchDatasetData(
|
||||
similarity = 0,
|
||||
limit: maxTokens,
|
||||
searchMode = DatasetSearchModeEnum.embedding,
|
||||
embeddingWeight = 0.5,
|
||||
usingReRank = false,
|
||||
rerankModel,
|
||||
rerankWeight = 0.5,
|
||||
datasetIds = [],
|
||||
collectionFilterMatch
|
||||
} = props;
|
||||
@@ -526,7 +537,7 @@ export async function searchDatasetData(
|
||||
$match: {
|
||||
teamId: new Types.ObjectId(teamId),
|
||||
datasetId: new Types.ObjectId(id),
|
||||
$text: { $search: jiebaSplit({ text: query }) },
|
||||
$text: { $search: await jiebaSplit({ text: query }) },
|
||||
...(filterCollectionIdList
|
||||
? {
|
||||
collectionId: {
|
||||
@@ -711,6 +722,7 @@ export async function searchDatasetData(
|
||||
});
|
||||
try {
|
||||
return await datasetDataReRank({
|
||||
rerankModel,
|
||||
query: reRankQuery,
|
||||
data: filterSameDataResults
|
||||
});
|
||||
@@ -721,11 +733,26 @@ export async function searchDatasetData(
|
||||
})();
|
||||
|
||||
// embedding recall and fullText recall rrf concat
|
||||
const rrfConcatResults = datasetSearchResultConcat([
|
||||
{ k: 60, list: embeddingRecallResults },
|
||||
{ k: 60, list: fullTextRecallResults },
|
||||
{ k: 58, list: reRankResults }
|
||||
const baseK = 120;
|
||||
const embK = Math.round(baseK * (1 - embeddingWeight)); // 搜索结果的 k 值
|
||||
const fullTextK = Math.round(baseK * embeddingWeight); // rerank 结果的 k 值
|
||||
|
||||
const rrfSearchResult = datasetSearchResultConcat([
|
||||
{ k: embK, list: embeddingRecallResults },
|
||||
{ k: fullTextK, list: fullTextRecallResults }
|
||||
]);
|
||||
const rrfConcatResults = (() => {
|
||||
if (reRankResults.length === 0) return rrfSearchResult;
|
||||
if (rerankWeight === 1) return reRankResults;
|
||||
|
||||
const searchK = Math.round(baseK * rerankWeight); // 搜索结果的 k 值
|
||||
const rerankK = Math.round(baseK * (1 - rerankWeight)); // rerank 结果的 k 值
|
||||
|
||||
return datasetSearchResultConcat([
|
||||
{ k: searchK, list: rrfSearchResult },
|
||||
{ k: rerankK, list: reRankResults }
|
||||
]);
|
||||
})();
|
||||
|
||||
// remove same q and a data
|
||||
set = new Set<string>();
|
||||
|
@@ -9,7 +9,11 @@ import {
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { createChatCompletion } from '../../../ai/config';
|
||||
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/workflow/template/system/contextExtract/type';
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import {
|
||||
NodeInputKeyEnum,
|
||||
NodeOutputKeyEnum,
|
||||
toolValueTypeList
|
||||
} from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { Prompt_ExtractJson } from '@fastgpt/global/core/ai/prompt/agent';
|
||||
@@ -192,10 +196,13 @@ ${description ? `- ${description}` : ''}
|
||||
}
|
||||
> = {};
|
||||
extractKeys.forEach((item) => {
|
||||
const jsonSchema = (
|
||||
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
|
||||
)?.jsonSchema;
|
||||
properties[item.key] = {
|
||||
type: item.valueType || 'string',
|
||||
...jsonSchema,
|
||||
description: item.desc,
|
||||
...(item.enum ? { enum: item.enum.split('\n') } : {})
|
||||
...(item.enum ? { enum: item.enum.split('\n').filter(Boolean) } : {})
|
||||
};
|
||||
});
|
||||
// function body
|
||||
|
@@ -1,11 +1,6 @@
|
||||
import { createChatCompletion } from '../../../../ai/config';
|
||||
import { filterGPTMessageByMaxContext, loadRequestMessages } from '../../../../chat/utils';
|
||||
import {
|
||||
ChatCompletion,
|
||||
StreamChatType,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionAssistantMessageParam
|
||||
} from '@fastgpt/global/core/ai/type';
|
||||
import { StreamChatType, ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
|
||||
import { NextApiResponse } from 'next';
|
||||
import { responseWriteController } from '../../../../../common/response';
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
|
@@ -208,6 +208,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
res,
|
||||
stream: response,
|
||||
aiChatReasoning,
|
||||
parseThinkTag: modelConstantsData.reasoning,
|
||||
isResponseAnswerText,
|
||||
workflowStreamResponse
|
||||
});
|
||||
@@ -513,12 +514,14 @@ async function streamResponse({
|
||||
stream,
|
||||
workflowStreamResponse,
|
||||
aiChatReasoning,
|
||||
parseThinkTag,
|
||||
isResponseAnswerText
|
||||
}: {
|
||||
res: NextApiResponse;
|
||||
stream: StreamChatType;
|
||||
workflowStreamResponse?: WorkflowResponseType;
|
||||
aiChatReasoning?: boolean;
|
||||
parseThinkTag?: boolean;
|
||||
isResponseAnswerText?: boolean;
|
||||
}) {
|
||||
const write = responseWriteController({
|
||||
@@ -535,7 +538,7 @@ async function streamResponse({
|
||||
break;
|
||||
}
|
||||
|
||||
const [reasoningContent, content] = parsePart(part, aiChatReasoning);
|
||||
const [reasoningContent, content] = parsePart(part, parseThinkTag);
|
||||
answer += content;
|
||||
reasoning += reasoningContent;
|
||||
|
||||
|
@@ -6,7 +6,7 @@ import { formatModelChars2Points } from '../../../../support/wallet/usage/utils'
|
||||
import type { SelectedDatasetType } from '@fastgpt/global/core/workflow/api.d';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { getEmbeddingModel } from '../../../ai/model';
|
||||
import { getEmbeddingModel, getRerankModel } from '../../../ai/model';
|
||||
import { deepRagSearch, defaultSearchDatasetData } from '../../../dataset/search/controller';
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
@@ -22,9 +22,14 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.datasetSelectList]: SelectedDatasetType;
|
||||
[NodeInputKeyEnum.datasetSimilarity]: number;
|
||||
[NodeInputKeyEnum.datasetMaxTokens]: number;
|
||||
[NodeInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
||||
[NodeInputKeyEnum.userChatInput]?: string;
|
||||
[NodeInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
||||
[NodeInputKeyEnum.datasetSearchEmbeddingWeight]?: number;
|
||||
|
||||
[NodeInputKeyEnum.datasetSearchUsingReRank]: boolean;
|
||||
[NodeInputKeyEnum.datasetSearchRerankModel]?: string;
|
||||
[NodeInputKeyEnum.datasetSearchRerankWeight]?: number;
|
||||
|
||||
[NodeInputKeyEnum.collectionFilterMatch]: string;
|
||||
[NodeInputKeyEnum.authTmbId]?: boolean;
|
||||
|
||||
@@ -53,11 +58,14 @@ export async function dispatchDatasetSearch(
|
||||
datasets = [],
|
||||
similarity,
|
||||
limit = 1500,
|
||||
usingReRank,
|
||||
searchMode,
|
||||
userChatInput = '',
|
||||
authTmbId = false,
|
||||
collectionFilterMatch,
|
||||
searchMode,
|
||||
embeddingWeight,
|
||||
usingReRank,
|
||||
rerankModel,
|
||||
rerankWeight,
|
||||
|
||||
datasetSearchUsingExtensionQuery,
|
||||
datasetSearchExtensionModel,
|
||||
@@ -122,7 +130,10 @@ export async function dispatchDatasetSearch(
|
||||
limit,
|
||||
datasetIds,
|
||||
searchMode,
|
||||
embeddingWeight,
|
||||
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId)),
|
||||
rerankModel: getRerankModel(rerankModel),
|
||||
rerankWeight,
|
||||
collectionFilterMatch
|
||||
};
|
||||
const {
|
||||
@@ -219,6 +230,9 @@ export async function dispatchDatasetSearch(
|
||||
similarity: usingSimilarityFilter ? similarity : undefined,
|
||||
limit,
|
||||
searchMode,
|
||||
embeddingWeight: searchMode === DatasetSearchModeEnum.mixedRecall ? embeddingWeight : undefined,
|
||||
rerankModel: usingReRank ? getRerankModel(rerankModel)?.name : undefined,
|
||||
rerankWeight: usingReRank ? rerankWeight : undefined,
|
||||
searchUsingReRank: searchUsingReRank,
|
||||
quoteList: searchRes,
|
||||
queryExtensionResult,
|
||||
|
@@ -21,7 +21,7 @@ import {
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { getNanoid, replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
|
||||
|
||||
import { dispatchWorkflowStart } from './init/workflowStart';
|
||||
@@ -426,6 +426,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
})();
|
||||
|
||||
if (!nodeRunResult) return [];
|
||||
if (res?.closed) {
|
||||
addLog.warn('Request is closed', {
|
||||
appId: props.runningAppInfo.id,
|
||||
nodeId: node.nodeId,
|
||||
nodeName: node.name
|
||||
});
|
||||
return [];
|
||||
}
|
||||
|
||||
/*
|
||||
特殊情况:
|
||||
|
@@ -3,13 +3,13 @@
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"@fastgpt/global": "workspace:*",
|
||||
"@node-rs/jieba": "1.10.0",
|
||||
"@node-rs/jieba": "2.0.1",
|
||||
"@xmldom/xmldom": "^0.8.10",
|
||||
"@zilliz/milvus2-sdk-node": "2.4.2",
|
||||
"axios": "^1.8.2",
|
||||
"chalk": "^5.3.0",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"cookie": "^0.5.0",
|
||||
"cookie": "^0.7.1",
|
||||
"date-fns": "2.30.0",
|
||||
"dayjs": "^1.11.7",
|
||||
"decompress": "^4.2.1",
|
||||
@@ -26,7 +26,7 @@
|
||||
"mammoth": "^1.6.0",
|
||||
"mongoose": "^8.10.1",
|
||||
"multer": "1.4.5-lts.1",
|
||||
"next": "14.2.21",
|
||||
"next": "14.2.24",
|
||||
"nextjs-cors": "^2.2.0",
|
||||
"node-cron": "^3.0.3",
|
||||
"node-xlsx": "^0.24.0",
|
||||
|
@@ -51,6 +51,9 @@ const OutLinkSchema = new Schema({
|
||||
type: Boolean,
|
||||
default: true
|
||||
},
|
||||
// showFullText: {
|
||||
// type: Boolean
|
||||
// },
|
||||
showRawSource: {
|
||||
type: Boolean
|
||||
},
|
||||
|
@@ -87,8 +87,7 @@ export async function createDefaultTeam({
|
||||
}) {
|
||||
// auth default team
|
||||
const tmb = await MongoTeamMember.findOne({
|
||||
userId: new Types.ObjectId(userId),
|
||||
defaultTeam: true
|
||||
userId: new Types.ObjectId(userId)
|
||||
});
|
||||
|
||||
if (!tmb) {
|
||||
@@ -113,8 +112,7 @@ export async function createDefaultTeam({
|
||||
name: 'Owner',
|
||||
role: TeamMemberRoleEnum.owner,
|
||||
status: TeamMemberStatusEnum.active,
|
||||
createTime: new Date(),
|
||||
defaultTeam: true
|
||||
createTime: new Date()
|
||||
}
|
||||
],
|
||||
{ session }
|
||||
|
@@ -0,0 +1 @@
|
||||
export const MaxInvitationLinksAmount = 10;
|
49
packages/service/support/user/team/invitationLink/schema.ts
Normal file
49
packages/service/support/user/team/invitationLink/schema.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
} from '@fastgpt/global/support/user/team/constant';
|
||||
import { connectionMongo, getMongoModel } from '../../../../common/mongo';
|
||||
import { InvitationSchemaType } from './type';
|
||||
import addDays from 'date-fns/esm/fp/addDays/index.js';
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
export const InvitationCollectionName = 'team_invitation_links';
|
||||
|
||||
const InvitationSchema = new Schema({
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: TeamCollectionName,
|
||||
required: true
|
||||
},
|
||||
usedTimesLimit: {
|
||||
type: Number,
|
||||
default: 1,
|
||||
enum: [1, -1]
|
||||
},
|
||||
forbidden: Boolean,
|
||||
expires: Date,
|
||||
description: String,
|
||||
members: {
|
||||
type: [String],
|
||||
default: []
|
||||
}
|
||||
});
|
||||
|
||||
InvitationSchema.virtual('team', {
|
||||
ref: TeamCollectionName,
|
||||
localField: 'teamId',
|
||||
foreignField: '_id',
|
||||
justOne: true
|
||||
});
|
||||
|
||||
try {
|
||||
InvitationSchema.index({ teamId: 1 });
|
||||
InvitationSchema.index({ expires: 1 }, { expireAfterSeconds: 30 * 24 * 60 * 60 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoInvitationLink = getMongoModel<InvitationSchemaType>(
|
||||
InvitationCollectionName,
|
||||
InvitationSchema
|
||||
);
|
37
packages/service/support/user/team/invitationLink/type.ts
Normal file
37
packages/service/support/user/team/invitationLink/type.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { TeamMemberSchema } from '@fastgpt/global/support/user/team/type';
|
||||
|
||||
export type InvitationSchemaType = {
|
||||
_id: string;
|
||||
teamId: string;
|
||||
usedTimesLimit?: number;
|
||||
forbidden?: boolean;
|
||||
expires: Date;
|
||||
description: string;
|
||||
members: string[];
|
||||
};
|
||||
|
||||
export type InvitationType = Omit<InvitationSchemaType, 'members'> & {
|
||||
members: {
|
||||
tmbId: string;
|
||||
avatar: string;
|
||||
name: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
export type InvitationLinkExpiresType = '30m' | '7d' | '1y';
|
||||
|
||||
export type InvitationLinkCreateType = {
|
||||
description: string;
|
||||
expires: InvitationLinkExpiresType;
|
||||
usedTimesLimit: 1 | -1;
|
||||
};
|
||||
export type InvitationLinkUpdateType = Partial<
|
||||
Omit<InvitationSchemaType, 'members' | 'teamId' | '_id'>
|
||||
> & {
|
||||
linkId: string;
|
||||
};
|
||||
|
||||
export type InvitationInfoType = InvitationSchemaType & {
|
||||
teamAvatar: string;
|
||||
teamName: string;
|
||||
};
|
2
packages/service/type.d.ts
vendored
2
packages/service/type.d.ts
vendored
@@ -1,7 +1,7 @@
|
||||
import { FastGPTFeConfigsType, SystemEnvType } from '@fastgpt/global/common/system/types';
|
||||
import {
|
||||
TTSModelType,
|
||||
ReRankModelItemType,
|
||||
RerankModelItemType,
|
||||
STTModelType,
|
||||
EmbeddingModelItemType,
|
||||
LLMModelItemType
|
||||
|
Reference in New Issue
Block a user