mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-05 22:55:27 +00:00
v4.6 -1 (#459)
This commit is contained in:
@@ -1,42 +0,0 @@
|
||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@fastgpt/service/support/user/auth';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authUser({ req, authRoot: true });
|
||||
|
||||
await MongoDataset.updateMany(
|
||||
{
|
||||
type: { $exists: false }
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
type: DatasetTypeEnum.dataset,
|
||||
parentId: null
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const response = await PgClient.update(PgDatasetTableName, {
|
||||
where: [['file_id', 'undefined']],
|
||||
values: [{ key: 'file_id', value: '' }]
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: response.rowCount
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,35 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@fastgpt/service/support/user/auth';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import mongoose from '@fastgpt/service/common/mongo';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authUser({ req, authRoot: true });
|
||||
|
||||
const data = await mongoose.connection.db
|
||||
.collection('dataset.files')
|
||||
.updateMany({}, { $set: { 'metadata.datasetUsed': true } });
|
||||
|
||||
// update pg data
|
||||
const pg = await PgClient.query(`UPDATE ${PgDatasetTableName}
|
||||
SET file_id = ''
|
||||
WHERE (file_id = 'undefined' OR LENGTH(file_id) < 20) AND file_id != '';`);
|
||||
|
||||
jsonRes(res, {
|
||||
data: {
|
||||
data,
|
||||
pg
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,27 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@fastgpt/service/support/user/auth';
|
||||
import { connectToDatabase, Bill } from '@/service/mongo';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authUser({ req, authRoot: true });
|
||||
|
||||
try {
|
||||
await Bill.collection.dropIndex('time_1');
|
||||
} catch (error) {}
|
||||
try {
|
||||
await Bill.collection.createIndex({ time: 1 }, { expireAfterSeconds: 90 * 24 * 60 * 60 });
|
||||
} catch (error) {}
|
||||
|
||||
jsonRes(res, {
|
||||
data: {}
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,104 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@fastgpt/service/support/user/auth';
|
||||
import { connectToDatabase, App } from '@/service/mongo';
|
||||
import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
|
||||
const limit = 300;
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authUser({ req, authRoot: true });
|
||||
|
||||
const totalApps = await App.countDocuments();
|
||||
|
||||
// init app
|
||||
await App.updateMany({}, { $set: { inited: false } });
|
||||
|
||||
for (let i = 0; i < totalApps; i += limit) {
|
||||
await initVariable();
|
||||
console.log(i + limit);
|
||||
}
|
||||
|
||||
jsonRes(res, {
|
||||
data: {
|
||||
total: totalApps
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function initVariable(): Promise<any> {
|
||||
try {
|
||||
const apps = await App.find({ inited: false }).limit(limit);
|
||||
await Promise.all(
|
||||
apps.map(async (app) => {
|
||||
const jsonAPP = app.toObject();
|
||||
// @ts-ignore
|
||||
app.inited = true;
|
||||
const modules = jsonAPP.modules;
|
||||
|
||||
// 找到 variable
|
||||
const variable = modules.find((item) => item.flowType === FlowNodeTypeEnum.variable);
|
||||
if (!variable) return await app.save();
|
||||
|
||||
// 找到 guide 模块
|
||||
const userGuideModule = modules.find(
|
||||
(item) => item.flowType === FlowNodeTypeEnum.userGuide
|
||||
);
|
||||
if (userGuideModule) {
|
||||
userGuideModule.inputs = [
|
||||
userGuideModule.inputs[0],
|
||||
{
|
||||
key: SystemInputEnum.variables,
|
||||
type: FlowNodeInputTypeEnum.systemInput,
|
||||
label: '对话框变量',
|
||||
value: variable.inputs[0]?.value
|
||||
}
|
||||
];
|
||||
} else {
|
||||
modules.unshift({
|
||||
moduleId: 'userGuide',
|
||||
flowType: FlowNodeTypeEnum.userGuide,
|
||||
name: '用户引导',
|
||||
position: {
|
||||
x: 447.98520778293346,
|
||||
y: 721.4016845336229
|
||||
},
|
||||
inputs: [
|
||||
{
|
||||
key: SystemInputEnum.welcomeText,
|
||||
type: FlowNodeInputTypeEnum.input,
|
||||
label: '开场白'
|
||||
},
|
||||
{
|
||||
key: SystemInputEnum.variables,
|
||||
type: FlowNodeInputTypeEnum.systemInput,
|
||||
label: '对话框变量',
|
||||
value: variable.inputs[0]?.value
|
||||
}
|
||||
],
|
||||
outputs: []
|
||||
});
|
||||
}
|
||||
|
||||
jsonAPP.modules = jsonAPP.modules.filter(
|
||||
(item) => item.flowType !== FlowNodeTypeEnum.variable
|
||||
);
|
||||
|
||||
app.modules = JSON.parse(JSON.stringify(jsonAPP.modules));
|
||||
|
||||
await app.save();
|
||||
})
|
||||
);
|
||||
} catch (error) {
|
||||
return initVariable();
|
||||
}
|
||||
}
|
@@ -1,93 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@fastgpt/service/support/user/auth';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
import { DatasetSpecialIdEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { Types, connectionMongo } from '@fastgpt/service/common/mongo';
|
||||
import { delay } from '@/utils/tools';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
let initFileIds: string[] = [];
|
||||
try {
|
||||
const { limit = 100 } = req.body;
|
||||
await connectToDatabase();
|
||||
await authUser({ req, authRoot: true });
|
||||
|
||||
console.log('count rows');
|
||||
// 去重获取 fileId
|
||||
const { rows } = await PgClient.query(`SELECT DISTINCT file_id
|
||||
FROM ${PgDatasetTableName} WHERE file_id IS NOT NULL AND file_id != '';
|
||||
`);
|
||||
console.log('count rows success', rows.length);
|
||||
console.log('start filter');
|
||||
for (let i = 0; i < rows.length; i += limit) {
|
||||
await init(rows.slice(i, i + limit), initFileIds);
|
||||
console.log(i);
|
||||
}
|
||||
|
||||
for (let i = 0; i < initFileIds.length; i++) {
|
||||
await PgClient.query(`UPDATE ${PgDatasetTableName}
|
||||
SET file_id = '${DatasetSpecialIdEnum.manual}'
|
||||
WHERE file_id = '${initFileIds[i]}'`);
|
||||
console.log('update: ', initFileIds[i]);
|
||||
}
|
||||
|
||||
const { rows: emptyIds } = await PgClient.query(
|
||||
`SELECT id FROM ${PgDatasetTableName} WHERE file_id IS NULL OR file_id=''`
|
||||
);
|
||||
console.log('filter success');
|
||||
console.log(emptyIds.length);
|
||||
|
||||
await delay(5000);
|
||||
console.log('start update');
|
||||
|
||||
async function start(start: number) {
|
||||
for (let i = start; i < emptyIds.length; i += limit) {
|
||||
await PgClient.query(`UPDATE ${PgDatasetTableName}
|
||||
SET file_id = '${DatasetSpecialIdEnum.manual}'
|
||||
WHERE id = '${emptyIds[i].id}'`);
|
||||
console.log('update: ', i, emptyIds[i].id);
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < limit; i++) {
|
||||
start(i);
|
||||
}
|
||||
|
||||
console.log('update success');
|
||||
|
||||
jsonRes(res, {
|
||||
data: {
|
||||
empty: emptyIds.length
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function init(rows: any[], initFileIds: string[]) {
|
||||
const collection = connectionMongo.connection.db.collection(`dataset.files`);
|
||||
|
||||
/* 遍历所有的 fileId,去找有没有对应的文件,没有的话则改成manual */
|
||||
const updateResult = await Promise.allSettled(
|
||||
rows.map(async (item) => {
|
||||
// 找下是否有对应的文件
|
||||
const file = await collection.findOne({
|
||||
_id: new Types.ObjectId(item.file_id)
|
||||
});
|
||||
|
||||
if (file) return '';
|
||||
// 没有文件的,改成manual
|
||||
initFileIds.push(item.file_id);
|
||||
|
||||
return item.file_id;
|
||||
})
|
||||
);
|
||||
// @ts-ignore
|
||||
console.log(updateResult.filter((item) => item?.value).length);
|
||||
}
|
@@ -1,344 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { App, connectToDatabase } from '@/service/mongo';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { connectionMongo } from '@fastgpt/service/common/mongo';
|
||||
import { PgDatasetTableName } from '@/constants/plugin';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
|
||||
import { delay } from '@/utils/tools';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import { strIsLink } from '@fastgpt/global/common/string/tools';
|
||||
import { GridFSStorage } from '@/service/lib/gridfs';
|
||||
import { Types } from 'mongoose';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
const { limit = 50 } = req.body as { limit: number };
|
||||
await connectToDatabase();
|
||||
|
||||
console.log('rename');
|
||||
await rename();
|
||||
|
||||
console.log('init mongo data');
|
||||
await initMongo(limit);
|
||||
|
||||
console.log('create collection');
|
||||
await createCollection();
|
||||
|
||||
console.log('update pg collectionId');
|
||||
await updatePgCollection();
|
||||
console.log('init done');
|
||||
|
||||
jsonRes(res, {
|
||||
data: {}
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function rename() {
|
||||
// rename mongo kbs -> datasets
|
||||
try {
|
||||
const collections = await connectionMongo.connection.db
|
||||
.listCollections({ name: 'kbs' })
|
||||
.toArray();
|
||||
if (collections.length > 0) {
|
||||
const kbCollection = connectionMongo.connection.db.collection('kbs');
|
||||
await kbCollection.rename('datasets', { dropTarget: true });
|
||||
console.log('success rename kbs -> datasets');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error: rename kbs -> datasets', error);
|
||||
}
|
||||
|
||||
// rename pg: kb_id -> dataset_id
|
||||
try {
|
||||
const { rows } = await PgClient.query(`SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = '${PgDatasetTableName}'
|
||||
AND column_name = 'kb_id'
|
||||
);`);
|
||||
|
||||
if (rows[0].exists) {
|
||||
await PgClient.query(`ALTER TABLE ${PgDatasetTableName} RENAME COLUMN kb_id TO dataset_id`);
|
||||
console.log('success rename kb_id -> dataset_id');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error: rename kb_id -> dataset_id', error);
|
||||
}
|
||||
// rename pg: file_id -> collection_id
|
||||
try {
|
||||
const { rows } = await PgClient.query(`SELECT EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = '${PgDatasetTableName}'
|
||||
AND column_name = 'file_id'
|
||||
);`);
|
||||
|
||||
if (rows[0].exists) {
|
||||
await PgClient.query(
|
||||
`ALTER TABLE ${PgDatasetTableName} RENAME COLUMN file_id TO collection_id`
|
||||
);
|
||||
console.log('success rename file_id -> collection_id');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error: rename file_id -> collection_id', error);
|
||||
}
|
||||
}
|
||||
|
||||
async function initMongo(limit: number) {
|
||||
let success = 0;
|
||||
|
||||
async function initApp(limit = 100): Promise<any> {
|
||||
// 遍历所有 app,更新 app modules 里的 FlowNodeTypeEnum.kbSearchNode
|
||||
const apps = await App.find({ inited: false }).limit(limit);
|
||||
|
||||
if (apps.length === 0) return;
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
apps.map(async (app) => {
|
||||
const modules = app.toObject().modules;
|
||||
// @ts-ignore
|
||||
app.inited = true;
|
||||
|
||||
modules.forEach((module) => {
|
||||
// @ts-ignore
|
||||
if (module.flowType === 'kbSearchNode') {
|
||||
module.flowType = FlowNodeTypeEnum.datasetSearchNode;
|
||||
module.inputs.forEach((input) => {
|
||||
if (input.key === 'kbList') {
|
||||
input.key = 'datasets';
|
||||
input.value?.forEach((item: any) => {
|
||||
item.datasetId = item.kbId;
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
app.modules = JSON.parse(JSON.stringify(modules));
|
||||
await app.save();
|
||||
})
|
||||
);
|
||||
success += limit;
|
||||
console.log('mongo init:', success);
|
||||
return initApp(limit);
|
||||
} catch (error) {
|
||||
return initApp(limit);
|
||||
}
|
||||
}
|
||||
|
||||
// init app
|
||||
await App.updateMany(
|
||||
{},
|
||||
{
|
||||
$set: {
|
||||
inited: false
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const totalApp = await App.countDocuments();
|
||||
console.log(`total app: ${totalApp}`);
|
||||
await delay(2000);
|
||||
console.log('start init app');
|
||||
await initApp(limit);
|
||||
console.log('init mongo success');
|
||||
}
|
||||
|
||||
type RowType = { user_id: string; dataset_id: string; collection_id: string };
|
||||
async function createCollection() {
|
||||
let success = 0;
|
||||
|
||||
const { rows, rowCount } = await PgClient.query(`SELECT user_id,dataset_id,collection_id
|
||||
FROM ${PgDatasetTableName}
|
||||
GROUP BY user_id,collection_id, dataset_id
|
||||
ORDER BY dataset_id`);
|
||||
|
||||
if (rowCount === 0) {
|
||||
console.log('pg done');
|
||||
return;
|
||||
}
|
||||
// init dataset collection
|
||||
console.log(`total collection: ${rowCount}`);
|
||||
|
||||
// collectionId 的类型:manual, mark, httpLink, fileId
|
||||
async function initCollection(row: RowType): Promise<any> {
|
||||
try {
|
||||
{
|
||||
const userId = row.user_id;
|
||||
const datasetId = row.dataset_id;
|
||||
const collectionId = row.collection_id;
|
||||
|
||||
const count = await MongoDatasetCollection.countDocuments({
|
||||
datasetId,
|
||||
userId,
|
||||
['metadata.pgCollectionId']: collectionId
|
||||
});
|
||||
if (count > 0) {
|
||||
console.log('collection already exist');
|
||||
return;
|
||||
}
|
||||
|
||||
if (collectionId === 'manual') {
|
||||
await MongoDatasetCollection.create({
|
||||
parentId: null,
|
||||
datasetId,
|
||||
userId,
|
||||
name: '手动录入',
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
updateTime: new Date('2099'),
|
||||
metadata: {
|
||||
pgCollectionId: collectionId
|
||||
}
|
||||
});
|
||||
} else if (collectionId === 'mark') {
|
||||
await MongoDatasetCollection.create({
|
||||
parentId: null,
|
||||
datasetId,
|
||||
userId,
|
||||
name: '手动标注',
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
updateTime: new Date('2099'),
|
||||
metadata: {
|
||||
pgCollectionId: collectionId
|
||||
}
|
||||
});
|
||||
} else if (strIsLink(collectionId)) {
|
||||
await MongoDatasetCollection.create({
|
||||
parentId: null,
|
||||
datasetId,
|
||||
userId,
|
||||
name: collectionId,
|
||||
type: DatasetCollectionTypeEnum.link,
|
||||
metadata: {
|
||||
rawLink: collectionId,
|
||||
pgCollectionId: collectionId
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// find file
|
||||
const gridFs = new GridFSStorage('dataset', userId);
|
||||
const collection = gridFs.Collection();
|
||||
const file = await collection.findOne({
|
||||
_id: new Types.ObjectId(collectionId)
|
||||
});
|
||||
|
||||
if (file) {
|
||||
await MongoDatasetCollection.create({
|
||||
parentId: null,
|
||||
datasetId,
|
||||
userId,
|
||||
name: file.filename,
|
||||
type: DatasetCollectionTypeEnum.file,
|
||||
metadata: {
|
||||
fileId: file._id,
|
||||
pgCollectionId: collectionId
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// no file
|
||||
await MongoDatasetCollection.create({
|
||||
parentId: null,
|
||||
datasetId,
|
||||
userId,
|
||||
name: '未知文件',
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
metadata: {
|
||||
pgCollectionId: collectionId
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
console.log('create collection success');
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
await delay(2000);
|
||||
return initCollection(row);
|
||||
}
|
||||
}
|
||||
|
||||
for await (const row of rows) {
|
||||
await initCollection(row);
|
||||
console.log('init collection success: ', ++success);
|
||||
}
|
||||
}
|
||||
|
||||
async function updatePgCollection(): Promise<any> {
|
||||
let success = 0;
|
||||
const limit = 10;
|
||||
const collections = await MongoDatasetCollection.find({
|
||||
'metadata.pgCollectionId': { $exists: true, $ne: '' }
|
||||
}).lean();
|
||||
console.log('total:', collections.length);
|
||||
|
||||
async function update(i: number): Promise<any> {
|
||||
const item = collections[i];
|
||||
if (!item) return;
|
||||
|
||||
try {
|
||||
console.log('start', item.name, item.datasetId, item.metadata.pgCollectionId);
|
||||
const time = Date.now();
|
||||
if (item.metadata.pgCollectionId) {
|
||||
const { rows } = await PgClient.select(PgDatasetTableName, {
|
||||
fields: ['id'],
|
||||
where: [
|
||||
['dataset_id', String(item.datasetId)],
|
||||
'AND',
|
||||
['collection_id', String(item.metadata.pgCollectionId)]
|
||||
],
|
||||
limit: 999999
|
||||
});
|
||||
console.log('update date total', rows.length, 'time:', Date.now() - time);
|
||||
|
||||
await PgClient.query(`
|
||||
update ${PgDatasetTableName} set collection_id = '${item._id}' where dataset_id = '${String(
|
||||
item.datasetId
|
||||
)}' AND collection_id = '${String(item.metadata.pgCollectionId)}'
|
||||
`);
|
||||
|
||||
console.log('pg update time', Date.now() - time);
|
||||
}
|
||||
|
||||
// 更新 file id
|
||||
if (item.type === 'file' && item.metadata.fileId) {
|
||||
const collection = connectionMongo.connection.db.collection(`dataset.files`);
|
||||
await collection.findOneAndUpdate({ _id: new Types.ObjectId(item.metadata.fileId) }, [
|
||||
{
|
||||
$set: {
|
||||
'metadata.datasetId': item.datasetId,
|
||||
'metadata.collectionId': item._id
|
||||
}
|
||||
}
|
||||
]);
|
||||
}
|
||||
|
||||
await MongoDatasetCollection.findByIdAndUpdate(item._id, {
|
||||
$unset: { 'metadata.pgCollectionId': '' }
|
||||
});
|
||||
console.log('success', ++success);
|
||||
|
||||
return update(i + limit);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
await delay(5000);
|
||||
return update(i);
|
||||
}
|
||||
}
|
||||
|
||||
const arr = new Array(limit).fill(0);
|
||||
|
||||
return Promise.all(arr.map((_, i) => update(i)));
|
||||
}
|
335
projects/app/src/pages/api/admin/initv46.ts
Normal file
335
projects/app/src/pages/api/admin/initv46.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { MongoBill } from '@fastgpt/service/support/wallet/bill/schema';
|
||||
import {
|
||||
createDefaultTeam,
|
||||
getTeamInfoByTmbId
|
||||
} from '@fastgpt/service/support/user/team/controller';
|
||||
import { MongoUser } from '@fastgpt/service/support/user/schema';
|
||||
import { UserModelSchema } from '@fastgpt/global/support/user/type';
|
||||
import { delay } from '@/utils/tools';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import {
|
||||
DatasetCollectionSchemaType,
|
||||
DatasetSchemaType,
|
||||
DatasetTrainingSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { connectionMongo } from '@fastgpt/service/common/mongo';
|
||||
import { Types } from 'mongoose';
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { PgClient } from '@fastgpt/service/common/pg';
|
||||
import { PgDatasetTableName } from '@fastgpt/global/core/dataset/constant';
|
||||
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
|
||||
import { MongoOpenApi } from '@fastgpt/service/support/openapi/schema';
|
||||
import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
|
||||
import { POST } from '@fastgpt/service/common/api/plusRequest';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
const { limit = 50, maxSize = 3 } = req.body as { limit: number; maxSize: number };
|
||||
await authCert({ req, authRoot: true });
|
||||
await connectToDatabase();
|
||||
|
||||
await initDefaultTeam(limit, maxSize);
|
||||
await initMongoTeamId(limit);
|
||||
await initDatasetAndApp();
|
||||
await initCollectionFileTeam(limit);
|
||||
|
||||
if (global.systemEnv.pluginBaseUrl) {
|
||||
POST('/admin/init46');
|
||||
}
|
||||
|
||||
await initPgData();
|
||||
|
||||
jsonRes(res, {
|
||||
data: {}
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function initDefaultTeam(limit: number, maxSize: number) {
|
||||
/* init user default Team */
|
||||
const users = await MongoUser.find({}, '_id balance');
|
||||
console.log('init user default team', users.length);
|
||||
// 100 组一次
|
||||
const userArr: UserModelSchema[][] = [];
|
||||
for (let i = 0; i < users.length; i += limit) {
|
||||
userArr.push(users.slice(i, i + limit));
|
||||
}
|
||||
let success = 0;
|
||||
for await (const users of userArr) {
|
||||
await Promise.all(users.map(init));
|
||||
success += limit;
|
||||
console.log(success);
|
||||
}
|
||||
|
||||
async function init(user: UserModelSchema): Promise<any> {
|
||||
try {
|
||||
await createDefaultTeam({
|
||||
userId: user._id,
|
||||
balance: user.balance,
|
||||
maxSize
|
||||
});
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
||||
await delay(1000);
|
||||
return init(user);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function initMongoTeamId(limit: number) {
|
||||
const mongoSchema = [
|
||||
{
|
||||
label: 'MongoPlugin',
|
||||
schema: MongoPlugin
|
||||
},
|
||||
{
|
||||
label: 'MongoChat',
|
||||
schema: MongoChat
|
||||
},
|
||||
{
|
||||
label: 'MongoChatItem',
|
||||
schema: MongoChatItem
|
||||
},
|
||||
{
|
||||
label: 'MongoApp',
|
||||
schema: MongoApp
|
||||
},
|
||||
{
|
||||
label: 'MongoDataset',
|
||||
schema: MongoDataset
|
||||
},
|
||||
{
|
||||
label: 'MongoDatasetCollection',
|
||||
schema: MongoDatasetCollection
|
||||
},
|
||||
{
|
||||
label: 'MongoDatasetTraining',
|
||||
schema: MongoDatasetTraining
|
||||
},
|
||||
{
|
||||
label: 'MongoBill',
|
||||
schema: MongoBill
|
||||
},
|
||||
{
|
||||
label: 'MongoOutLink',
|
||||
schema: MongoOutLink
|
||||
},
|
||||
{
|
||||
label: 'MongoOpenApi',
|
||||
schema: MongoOpenApi
|
||||
}
|
||||
];
|
||||
/* init user default Team */
|
||||
|
||||
for await (const item of mongoSchema) {
|
||||
console.log('start init', item.label);
|
||||
await initTeamTmbId(item.schema);
|
||||
console.log('finish init', item.label);
|
||||
}
|
||||
|
||||
async function initTeamTmbId(schema: any) {
|
||||
const emptyWhere = {
|
||||
$or: [{ teamId: { $exists: false } }, { teamId: null }]
|
||||
};
|
||||
const uniqueUsersWithNoTeamId = await schema.aggregate([
|
||||
{
|
||||
$match: emptyWhere
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$userId', // 按 userId 分组以去重
|
||||
userId: { $first: '$userId' } // 保留第一个出现的 userId
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0, // 不显示 _id 字段
|
||||
userId: 1 // 只显示 userId 字段
|
||||
}
|
||||
}
|
||||
]);
|
||||
const users = uniqueUsersWithNoTeamId;
|
||||
|
||||
console.log('un init total', users.length);
|
||||
// limit 组一次
|
||||
const userArr: any[][] = [];
|
||||
for (let i = 0; i < users.length; i += limit) {
|
||||
userArr.push(users.slice(i, i + limit));
|
||||
}
|
||||
|
||||
let success = 0;
|
||||
for await (const users of userArr) {
|
||||
await Promise.all(users.map((item) => init(item.userId)));
|
||||
success += limit;
|
||||
console.log(success);
|
||||
}
|
||||
|
||||
async function init(userId: string): Promise<any> {
|
||||
try {
|
||||
const tmb = await getTeamInfoByTmbId({ userId });
|
||||
|
||||
await schema.updateMany(
|
||||
{
|
||||
userId,
|
||||
...emptyWhere
|
||||
},
|
||||
{
|
||||
teamId: tmb.teamId,
|
||||
tmbId: tmb.tmbId
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
if (error === 'team not exist' || error === 'tmbId or userId is required') {
|
||||
return;
|
||||
}
|
||||
console.log(error);
|
||||
await delay(1000);
|
||||
return init(userId);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async function initDatasetAndApp() {
|
||||
await MongoDataset.updateMany(
|
||||
{},
|
||||
{
|
||||
$set: {
|
||||
permission: PermissionTypeEnum.private
|
||||
}
|
||||
}
|
||||
);
|
||||
await MongoApp.updateMany(
|
||||
{},
|
||||
{
|
||||
$set: {
|
||||
permission: PermissionTypeEnum.private
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
async function initCollectionFileTeam(limit: number) {
|
||||
/* init user default Team */
|
||||
const DatasetFile = connectionMongo.connection.db.collection(`dataset.files`);
|
||||
const matchWhere = {
|
||||
$or: [{ 'metadata.teamId': { $exists: false } }, { 'metadata.teamId': null }]
|
||||
};
|
||||
const uniqueUsersWithNoTeamId = await DatasetFile.aggregate([
|
||||
{
|
||||
$match: matchWhere
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$metadata.userId', // 按 metadata.userId 分组以去重
|
||||
userId: { $first: '$metadata.userId' } // 保留第一个出现的 userId
|
||||
}
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
_id: 0, // 不显示 _id 字段
|
||||
userId: 1 // 只显示 userId 字段
|
||||
}
|
||||
}
|
||||
]).toArray();
|
||||
const users = uniqueUsersWithNoTeamId;
|
||||
|
||||
console.log('un init total', users.length);
|
||||
// limit 组一次
|
||||
const userArr: any[][] = [];
|
||||
for (let i = 0; i < users.length; i += limit) {
|
||||
userArr.push(users.slice(i, i + limit));
|
||||
}
|
||||
|
||||
let success = 0;
|
||||
for await (const item of userArr) {
|
||||
await Promise.all(item.map((item) => init(item.userId)));
|
||||
success += limit;
|
||||
console.log(success);
|
||||
}
|
||||
|
||||
async function init(userId: string): Promise<any> {
|
||||
try {
|
||||
const tmb = await getTeamInfoByTmbId({
|
||||
userId
|
||||
});
|
||||
|
||||
await DatasetFile.updateMany(
|
||||
{
|
||||
userId,
|
||||
...matchWhere
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
'metadata.teamId': String(tmb.teamId),
|
||||
'metadata.tmbId': String(tmb.tmbId)
|
||||
}
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
if (error === 'team not exist' || error === 'tmbId or userId is required') {
|
||||
return;
|
||||
}
|
||||
console.log(error);
|
||||
await delay(1000);
|
||||
return init(userId);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function initPgData() {
|
||||
const limit = 10;
|
||||
// add column
|
||||
try {
|
||||
await Promise.all([
|
||||
PgClient.query(`ALTER TABLE ${PgDatasetTableName} ADD COLUMN team_id CHAR(50);`),
|
||||
PgClient.query(`ALTER TABLE ${PgDatasetTableName} ADD COLUMN tmb_id CHAR(50);`),
|
||||
PgClient.query(`ALTER TABLE ${PgDatasetTableName} ALTER COLUMN user_id DROP NOT NULL;`)
|
||||
]);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
console.log('column exits');
|
||||
}
|
||||
|
||||
const { rows } = await PgClient.query<{ user_id: string }>(`
|
||||
SELECT DISTINCT user_id FROM ${PgDatasetTableName} WHERE team_id IS NULL;
|
||||
`);
|
||||
console.log('init pg', rows.length);
|
||||
let success = 0;
|
||||
for (let i = 0; i < limit; i++) {
|
||||
init(i);
|
||||
}
|
||||
async function init(index: number): Promise<any> {
|
||||
const userId = rows[index]?.user_id;
|
||||
if (!userId) return;
|
||||
try {
|
||||
const tmb = await getTeamInfoByTmbId({ userId });
|
||||
// update pg
|
||||
await PgClient.query(
|
||||
`Update ${PgDatasetTableName} set team_id = '${tmb.teamId}', tmb_id = '${tmb.tmbId}' where user_id = '${userId}' AND team_id IS NULL;`
|
||||
);
|
||||
console.log(++success);
|
||||
init(index + limit);
|
||||
} catch (error) {
|
||||
if (error === 'default team not exist') {
|
||||
return;
|
||||
}
|
||||
console.log(error);
|
||||
await delay(1000);
|
||||
return init(index);
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user