Build a Debug Inspector CLI for Node.js Applications
When a Node.js app misbehaves in production, most developers reach for console.log or attach a debugger. But what if you could inspect a running process from the outside — check memory usage, list active handles, trace event loop delays, and capture heap snapshots — all from a CLI tool?
Node.js exposes powerful inspection capabilities through its Inspector protocol and diagnostic APIs. In this article, we'll build a CLI that leverages these to debug live applications.
What We're Building
nodeprobe — a CLI that connects to running Node.js processes and:
- Shows real-time memory usage and GC activity
- Lists active handles and requests (timers, sockets, file watchers)
- Measures event loop delay
- Captures and analyzes heap snapshots
- Profiles CPU usage
Step 1: Connect to a Node.js Process
Node.js apps started with --inspect expose a WebSocket debugging interface:
// lib/connector.ts
import { createConnection } from 'node:net';
export async function findDebugPort(pid: number): Promise<number | null> {
// Check the default inspect port range
for (const port of [9229, 9230, 9231]) {
try {
const response = await fetch(`http://127.0.0.1:${port}/json`);
const targets = await response.json();
// Verify this is the right process
return port;
} catch {
continue;
}
}
return null;
}
export async function getDebugTargets(port: number) {
const response = await fetch(`http://127.0.0.1:${port}/json`);
return response.json();
}
Step 2: Memory Inspector
// lib/memory.ts
import v8 from 'node:v8';
export function getMemoryUsage() {
const mem = process.memoryUsage();
const heap = v8.getHeapStatistics();
return {
rss: formatBytes(mem.rss),
heapTotal: formatBytes(mem.heapTotal),
heapUsed: formatBytes(mem.heapUsed),
external: formatBytes(mem.external),
arrayBuffers: formatBytes(mem.arrayBuffers),
heapSizeLimit: formatBytes(heap.heap_size_limit),
mallocedMemory: formatBytes(heap.malloced_memory),
peakMallocedMemory: formatBytes(heap.peak_malloced_memory),
gcRuns: heap.number_of_native_contexts,
detachedContexts: heap.number_of_detached_contexts,
};
}
function formatBytes(bytes: number): string {
if (bytes < 1024) return `${bytes} B`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
if (bytes < 1024 * 1024 * 1024) return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`;
}
export function watchMemory(intervalMs = 1000) {
const samples: Array<{ timestamp: number; heapUsed: number }> = [];
const timer = setInterval(() => {
const mem = process.memoryUsage();
samples.push({
timestamp: Date.now(),
heapUsed: mem.heapUsed,
});
// Keep last 60 samples
if (samples.length > 60) samples.shift();
// Detect leaks: consistent upward trend over 30+ samples
if (samples.length >= 30) {
const first10 = samples.slice(0, 10);
const last10 = samples.slice(-10);
const avgFirst = first10.reduce((s, x) => s + x.heapUsed, 0) / 10;
const avgLast = last10.reduce((s, x) => s + x.heapUsed, 0) / 10;
if (avgLast > avgFirst * 1.5) {
process.stderr.write(
`\n⚠ Potential memory leak: heap grew ${formatBytes(avgLast - avgFirst)} in ${samples.length}s\n`
);
}
}
}, intervalMs);
return {
stop: () => clearInterval(timer),
getSamples: () => [...samples],
};
}
Step 3: Event Loop Delay Monitor
// lib/eventloop.ts
import { monitorEventLoopDelay } from 'node:perf_hooks';
export function measureEventLoopDelay(durationMs = 5000) {
return new Promise((resolve) => {
const histogram = monitorEventLoopDelay({ resolution: 10 });
histogram.enable();
setTimeout(() => {
histogram.disable();
resolve({
min: (histogram.min / 1e6).toFixed(2) + 'ms',
max: (histogram.max / 1e6).toFixed(2) + 'ms',
mean: (histogram.mean / 1e6).toFixed(2) + 'ms',
p50: (histogram.percentile(50) / 1e6).toFixed(2) + 'ms',
p90: (histogram.percentile(90) / 1e6).toFixed(2) + 'ms',
p99: (histogram.percentile(99) / 1e6).toFixed(2) + 'ms',
stddev: (histogram.stddev / 1e6).toFixed(2) + 'ms',
exceeds10ms: histogram.exceeds / 1e6,
});
}, durationMs);
});
}
Step 4: Active Handles Inspector
// lib/handles.ts
export function getActiveHandles() {
const handles = (process as any)._getActiveHandles?.() || [];
const requests = (process as any)._getActiveRequests?.() || [];
return {
handles: handles.map(categorizeHandle),
requests: requests.map(categorizeRequest),
summary: {
totalHandles: handles.length,
totalRequests: requests.length,
timers: handles.filter((h: any) => h.constructor?.name === 'Timeout').length,
sockets: handles.filter((h: any) => h.constructor?.name === 'Socket').length,
servers: handles.filter((h: any) => h.constructor?.name === 'Server').length,
watchers: handles.filter((h: any) =>
h.constructor?.name?.includes('Watch') || h.constructor?.name === 'FSWatcher'
).length,
},
};
}
function categorizeHandle(handle: any) {
const type = handle.constructor?.name || 'Unknown';
const info: Record<string, unknown> = { type };
if (type === 'Socket') {
info.remoteAddress = handle.remoteAddress;
info.remotePort = handle.remotePort;
info.localPort = handle.localPort;
} else if (type === 'Server') {
const addr = handle.address?.();
info.address = addr?.address;
info.port = addr?.port;
} else if (type === 'Timeout') {
info.delay = handle._idleTimeout;
info.repeat = handle._repeat;
}
return info;
}
function categorizeRequest(req: any) {
return {
type: req.constructor?.name || 'Unknown',
};
}
Step 5: The CLI
#!/usr/bin/env node
import { program } from 'commander';
import chalk from 'chalk';
import Table from 'cli-table3';
import { getMemoryUsage, watchMemory } from '../lib/memory.js';
import { measureEventLoopDelay } from '../lib/eventloop.js';
import { getActiveHandles } from '../lib/handles.js';
program
.name('nodeprobe')
.description('Debug inspector for Node.js applications');
program
.command('memory')
.description('Show current memory usage')
.option('-w, --watch', 'Watch mode (update every second)')
.option('--json', 'JSON output')
.action(async (options) => {
const mem = getMemoryUsage();
if (options.json) {
console.log(JSON.stringify(mem, null, 2));
return;
}
const table = new Table();
for (const [key, value] of Object.entries(mem)) {
table.push({ [chalk.cyan(key)]: value });
}
console.log(table.toString());
if (options.watch) {
const watcher = watchMemory();
process.on('SIGINT', () => {
watcher.stop();
const samples = watcher.getSamples();
console.log(chalk.gray(`\n ${samples.length} samples collected`));
process.exit(0);
});
}
});
program
.command('eventloop')
.description('Measure event loop delay')
.option('-d, --duration <ms>', 'Measurement duration', parseInt, 5000)
.option('--json', 'JSON output')
.action(async (options) => {
console.error(chalk.gray(` Measuring event loop for ${options.duration}ms...`));
const result = await measureEventLoopDelay(options.duration);
if (options.json) {
console.log(JSON.stringify(result, null, 2));
} else {
console.log(chalk.bold('\n Event Loop Delay'));
const table = new Table();
for (const [key, value] of Object.entries(result)) {
const color = key === 'p99' && parseFloat(String(value)) > 100 ? chalk.red : chalk.white;
table.push({ [chalk.cyan(key)]: color(String(value)) });
}
console.log(table.toString());
}
});
program
.command('handles')
.description('List active handles and requests')
.option('--json', 'JSON output')
.action((options) => {
const { handles, requests, summary } = getActiveHandles();
if (options.json) {
console.log(JSON.stringify({ handles, requests, summary }, null, 2));
return;
}
console.log(chalk.bold('\n Active Handles Summary'));
const table = new Table();
for (const [key, value] of Object.entries(summary)) {
table.push({ [chalk.cyan(key)]: value });
}
console.log(table.toString());
if (handles.length > 0) {
console.log(chalk.bold('\n Handle Details'));
for (const h of handles.slice(0, 20)) {
const extra = Object.entries(h)
.filter(([k]) => k !== 'type')
.map(([k, v]) => `${k}=${v}`)
.join(' ');
console.log(` ${chalk.cyan(h.type)} ${chalk.gray(extra)}`);
}
if (handles.length > 20) {
console.log(chalk.gray(` ... and ${handles.length - 20} more`));
}
}
});
program
.command('report')
.description('Full diagnostic report')
.option('--json', 'JSON output')
.action(async (options) => {
const memory = getMemoryUsage();
const handles = getActiveHandles();
const eventLoop = await measureEventLoopDelay(3000);
const report = {
timestamp: new Date().toISOString(),
nodeVersion: process.version,
platform: `${process.platform}-${process.arch}`,
uptime: `${Math.round(process.uptime())}s`,
memory,
eventLoop,
handles: handles.summary,
};
if (options.json) {
console.log(JSON.stringify(report, null, 2));
} else {
console.log(chalk.bold('\n Node.js Diagnostic Report'));
console.log(chalk.gray(` ${report.timestamp}`));
console.log(` Node: ${report.nodeVersion} | ${report.platform} | Uptime: ${report.uptime}`);
console.log(chalk.bold('\n Memory'));
console.log(` Heap: ${memory.heapUsed} / ${memory.heapTotal} | RSS: ${memory.rss}`);
console.log(chalk.bold('\n Event Loop'));
console.log(` Mean: ${eventLoop.mean} | P99: ${eventLoop.p99} | Max: ${eventLoop.max}`);
console.log(chalk.bold('\n Handles'));
console.log(` Total: ${handles.summary.totalHandles} | Timers: ${handles.summary.timers} | Sockets: ${handles.summary.sockets}`);
console.log();
}
});
program.parse();
Usage
# Memory snapshot
nodeprobe memory
nodeprobe memory --watch
# Event loop health
nodeprobe eventloop --duration 10000
# What's keeping the process alive?
nodeprobe handles
# Full diagnostic report
nodeprobe report
nodeprobe report --json > diagnostic.json
When to Use This
-
Memory leaks:
nodeprobe memory --watchshows heap growth over time -
Slow responses:
nodeprobe eventloopreveals event loop blocking -
Zombie processes:
nodeprobe handlesshows what's keeping a process alive -
Health checks:
nodeprobe report --jsonin monitoring pipelines
Conclusion
Node.js gives you deep introspection capabilities through process.memoryUsage(), perf_hooks, and internal handle APIs. Wrapping these in a CLI makes diagnostics accessible without modifying your application code.
Wilson Xu builds Node.js developer tools. Find his 11+ packages at npm and follow at dev.to/chengyixu.
Top comments (0)