mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-25 06:14:06 +00:00

* update: Add type * fix: update import statement for NextApiRequest type * fix: update imports to use type for LexicalEditor and EditorState * Refactor imports to use 'import type' for type-only imports across multiple files - Updated imports in various components and API files to use 'import type' for better clarity and to optimize TypeScript's type checking. - Ensured consistent usage of type imports in files related to chat, dataset, workflow, and user management. - Improved code readability and maintainability by distinguishing between value and type imports. * refactor: remove old ESLint configuration and add new rules - Deleted the old ESLint configuration file from the app project. - Added a new ESLint configuration file with updated rules and settings. - Changed imports to use type-only imports in various files for better clarity and performance. - Updated TypeScript configuration to remove unnecessary options. - Added an ESLint ignore file to exclude build and dependency directories from linting. * fix: update imports to use 'import type' for type-only imports in schema files
133 lines
3.4 KiB
TypeScript
133 lines
3.4 KiB
TypeScript
import type { Request, Response } from 'express';
|
|
import { Cluster } from 'puppeteer-cluster';
|
|
import dotenv from 'dotenv';
|
|
import { performDeepSearch } from '../utils/deepSearch';
|
|
import { fetchSearchResults as fetchBaiduResults } from '../engines/baiduEngine';
|
|
import { fetchSearchResults as fetchSearchxngResults } from '../engines/searchxngEngine';
|
|
|
|
dotenv.config();
|
|
|
|
const strategies = JSON.parse(process.env.STRATEGIES || '[]');
|
|
const detectWebsites = process.env.DETECT_WEBSITES?.split(',') || [];
|
|
const maxConcurrency = parseInt(process.env.MAX_CONCURRENCY || '10', 10);
|
|
|
|
export const search = async (req: Request, res: Response): Promise<void> => {
|
|
const {
|
|
query,
|
|
pageCount = 10,
|
|
needDetails = 'false',
|
|
engine = 'baidu',
|
|
categories = 'general'
|
|
} = req.query;
|
|
const needDetailsBool = needDetails === 'true';
|
|
|
|
if (!query) {
|
|
res.status(400).json({
|
|
status: 400,
|
|
error: {
|
|
code: 'MISSING_PARAM',
|
|
message: '缺少必要参数: query'
|
|
}
|
|
});
|
|
return;
|
|
}
|
|
let fetchSearchResults;
|
|
let searchUrlBase;
|
|
try {
|
|
if (engine === 'baidu') {
|
|
fetchSearchResults = fetchBaiduResults;
|
|
searchUrlBase = process.env.ENGINE_BAIDUURL;
|
|
} else if (engine === 'searchxng') {
|
|
fetchSearchResults = fetchSearchxngResults;
|
|
searchUrlBase = process.env.ENGINE_SEARCHXNGURL;
|
|
} else {
|
|
res.status(400).json({
|
|
status: 400,
|
|
error: {
|
|
code: 'INVALID_ENGINE',
|
|
message: '无效的搜索引擎'
|
|
}
|
|
});
|
|
return;
|
|
}
|
|
|
|
const { resultUrls, results } = await fetchSearchResults(
|
|
query as string,
|
|
Number(pageCount),
|
|
searchUrlBase || '',
|
|
categories as string
|
|
);
|
|
|
|
//如果返回值为空,返回空数组
|
|
if (results.size === 0) {
|
|
console.log('No results found');
|
|
res.status(200).json({
|
|
status: 200,
|
|
data: {
|
|
results: []
|
|
}
|
|
});
|
|
return;
|
|
}
|
|
|
|
if (!needDetailsBool) {
|
|
console.log('Need details is false');
|
|
results.forEach((value: any) => {
|
|
if (value.crawlStatus === 'Pending') {
|
|
value.crawlStatus = 'Success';
|
|
}
|
|
});
|
|
res.status(200).json({
|
|
status: 200,
|
|
data: {
|
|
results: Array.from(results.values())
|
|
}
|
|
});
|
|
} else {
|
|
console.log('Need details is true');
|
|
|
|
const clusterInstance = await Cluster.launch({
|
|
concurrency: Cluster.CONCURRENCY_CONTEXT,
|
|
maxConcurrency: maxConcurrency,
|
|
puppeteerOptions: {
|
|
ignoreDefaultArgs: ['--enable-automation'],
|
|
headless: 'true',
|
|
executablePath: '/usr/bin/chromium', // 明确指定 Chromium 路径
|
|
pipe: true,
|
|
args: [
|
|
'--no-sandbox',
|
|
'--disable-setuid-sandbox',
|
|
'--disable-dev-shm-usage',
|
|
'--disable-gpu'
|
|
]
|
|
}
|
|
});
|
|
|
|
const sortedResults = await performDeepSearch(
|
|
clusterInstance,
|
|
resultUrls,
|
|
results,
|
|
strategies,
|
|
detectWebsites,
|
|
Number(pageCount)
|
|
);
|
|
res.status(200).json({
|
|
status: 200,
|
|
data: {
|
|
results: sortedResults.slice(0, Number(pageCount))
|
|
}
|
|
});
|
|
}
|
|
} catch (error) {
|
|
res.status(500).json({
|
|
status: 500,
|
|
error: {
|
|
code: 'INTERNAL_SERVER_ERROR',
|
|
message: '发生错误'
|
|
}
|
|
});
|
|
}
|
|
};
|
|
|
|
export default { search };
|