perf: add process memory metrics (#6656)

* perf: reduce trace span and metrics

* perf: add process memory metrics

* fix: translations
This commit is contained in:
Ryo
2026-03-26 23:37:45 +08:00
committed by GitHub
parent 6e6b026d13
commit 2234859440
12 changed files with 760 additions and 553 deletions
+14 -2
View File
@@ -1,5 +1,10 @@
import { configureMetricsFromEnv, disposeMetrics, getMeter } from '@fastgpt-sdk/otel/metrics';
import {
configureMetricsFromEnv,
disposeMetrics as disposeOtelMetrics,
getMeter
} from '@fastgpt-sdk/otel/metrics';
import { env } from '../../env';
import { startRuntimeMetrics, stopRuntimeMetrics } from './runtime';
export async function configureMetrics() {
await configureMetricsFromEnv({
@@ -7,6 +12,13 @@ export async function configureMetrics() {
defaultServiceName: 'fastgpt-client',
defaultMeterName: 'fastgpt-client'
});
startRuntimeMetrics();
}
export { disposeMetrics, getMeter };
export async function disposeMetrics() {
stopRuntimeMetrics();
await disposeOtelMetrics();
}
export { getMeter };
@@ -0,0 +1,98 @@
import type {
BatchObservableCallback,
Meter,
Observable,
ObservableGauge
} from '@opentelemetry/api';
import { getMeter } from '@fastgpt-sdk/otel/metrics';
type RuntimeMetricAttributes = Record<string, never>;
type RuntimeObservableSet = {
meter: Meter;
processMemoryRss: ObservableGauge<RuntimeMetricAttributes>;
processMemoryHeapUsed: ObservableGauge<RuntimeMetricAttributes>;
processMemoryHeapTotal: ObservableGauge<RuntimeMetricAttributes>;
processMemoryExternal: ObservableGauge<RuntimeMetricAttributes>;
processMemoryArrayBuffers: ObservableGauge<RuntimeMetricAttributes>;
processUptime: ObservableGauge<RuntimeMetricAttributes>;
};
const prefix = 'fastgpt.runtime.process';
let runtimeMetricsRegistered = false;
let runtimeMeter: Meter | undefined;
let runtimeObservables: Observable<RuntimeMetricAttributes>[] = [];
let runtimeMetricsCallback: BatchObservableCallback<RuntimeMetricAttributes> | undefined;
function createRuntimeObservables(): RuntimeObservableSet {
const meter = getMeter('fastgpt.runtime');
return {
meter,
processMemoryRss: meter.createObservableGauge(`${prefix}.memory.rss`, {
description: 'Resident set size memory used by the current process',
unit: 'By'
}),
processMemoryHeapUsed: meter.createObservableGauge(`${prefix}.memory.heap_used`, {
description: 'V8 heap memory currently used by the current process',
unit: 'By'
}),
processMemoryHeapTotal: meter.createObservableGauge(`${prefix}.memory.heap_total`, {
description: 'Total V8 heap memory allocated for the current process',
unit: 'By'
}),
processMemoryExternal: meter.createObservableGauge(`${prefix}.memory.external`, {
description: 'Memory used by C++ objects bound to JavaScript objects',
unit: 'By'
}),
processMemoryArrayBuffers: meter.createObservableGauge(`${prefix}.memory.array_buffers`, {
description: 'Memory allocated for ArrayBuffer and SharedArrayBuffer instances',
unit: 'By'
}),
processUptime: meter.createObservableGauge(`${prefix}.uptime`, {
description: 'Process uptime',
unit: 's'
})
};
}
export function startRuntimeMetrics() {
if (runtimeMetricsRegistered) return;
const observables = createRuntimeObservables();
runtimeMeter = observables.meter;
runtimeObservables = [
observables.processMemoryRss,
observables.processMemoryHeapUsed,
observables.processMemoryHeapTotal,
observables.processMemoryExternal,
observables.processMemoryArrayBuffers,
observables.processUptime
];
runtimeMetricsCallback = (result) => {
const memoryUsage = process.memoryUsage();
result.observe(observables.processMemoryRss, memoryUsage.rss);
result.observe(observables.processMemoryHeapUsed, memoryUsage.heapUsed);
result.observe(observables.processMemoryHeapTotal, memoryUsage.heapTotal);
result.observe(observables.processMemoryExternal, memoryUsage.external);
result.observe(observables.processMemoryArrayBuffers, memoryUsage.arrayBuffers);
result.observe(observables.processUptime, process.uptime());
};
runtimeMeter.addBatchObservableCallback(runtimeMetricsCallback, runtimeObservables);
runtimeMetricsRegistered = true;
}
export function stopRuntimeMetrics() {
if (!runtimeMetricsRegistered || !runtimeMetricsCallback || !runtimeMeter) return;
runtimeMeter.removeBatchObservableCallback(runtimeMetricsCallback, runtimeObservables);
runtimeMetricsRegistered = false;
runtimeMeter = undefined;
runtimeObservables = [];
runtimeMetricsCallback = undefined;
}
+36 -6
View File
@@ -13,6 +13,37 @@ export type NextApiHandler<T = any> = (
res: NextApiResponse<T>
) => unknown | Promise<unknown>;
function isIdLikeRouteSegment(segment: string) {
return (
/^\d{4,}$/.test(segment) ||
/^[0-9a-f]{24}$/i.test(segment) ||
/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i.test(segment) ||
/^[A-Za-z0-9_-]{16,}$/.test(segment)
);
}
function normalizeRouteSegment(segment: string) {
return isIdLikeRouteSegment(segment) ? ':id' : segment;
}
function parseHeaderNumber(value: string | string[] | undefined) {
const normalized = Array.isArray(value) ? value[0] : value;
if (!normalized) return undefined;
const parsed = Number(normalized);
return Number.isFinite(parsed) ? parsed : undefined;
}
function getRequestRoute(url: string) {
const [route = '/'] = url.split('?');
if (!route || route === '/') return '/';
return route
.split('/')
.map((segment) => normalizeRouteSegment(segment))
.join('/');
}
export const NextEntry = ({
beforeCallback = []
}: {
@@ -28,23 +59,22 @@ export const NextEntry = ({
const responseLogger = getLogger(LogCategories.HTTP.RESPONSE);
const url = req.url || '';
const route = getRequestRoute(url);
const method = req.method?.toUpperCase() || '';
const ip = req.headers['x-forwarded-for'] || req.socket?.remoteAddress;
const userAgent = req.headers['user-agent'];
const contentLength = req.headers['content-length'];
const requestBodySize = parseHeaderNumber(contentLength);
return withContext({ requestId }, async () =>
withActiveSpan(
{
name: `http.request ${method || 'UNKNOWN'} ${url || '/'}`,
name: 'http.request',
tracerName: 'fastgpt.http',
attributes: {
'fastgpt.request.id': requestId,
'http.request.method': method,
'url.full': url,
'client.address': Array.isArray(ip) ? ip.join(',') : ip,
'user_agent.original': userAgent,
'http.request.body.size': contentLength
'http.route': route,
'http.request.body.size': requestBodySize
}
},
async (span) => {
+14 -1
View File
@@ -29,6 +29,19 @@ export type ActiveSpanOptions = {
attributes?: Record<string, unknown>;
};
const DEFAULT_PRODUCTION_TRACING_SAMPLE_RATIO = 0.05;
const DEFAULT_NON_PRODUCTION_TRACING_SAMPLE_RATIO = 1;
function getDefaultTracingSampleRatio() {
if (typeof env.TRACING_OTEL_SAMPLE_RATIO === 'number') {
return env.TRACING_OTEL_SAMPLE_RATIO;
}
return process.env.NODE_ENV === 'production'
? DEFAULT_PRODUCTION_TRACING_SAMPLE_RATIO
: DEFAULT_NON_PRODUCTION_TRACING_SAMPLE_RATIO;
}
function normalizeAttributes(attributes?: Record<string, unknown>) {
if (!attributes) return;
@@ -51,7 +64,7 @@ export async function configureTracing() {
env,
defaultServiceName: 'fastgpt-client',
defaultTracerName: 'fastgpt-client',
defaultSampleRatio: env.TRACING_OTEL_SAMPLE_RATIO
defaultSampleRatio: getDefaultTracingSampleRatio()
});
}