feat(monitoring): add performance tracker with ring buffer

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Martin Porwoll 2026-02-15 00:29:58 +00:00
parent 4907371715
commit dc14c5dbc7
2 changed files with 158 additions and 0 deletions

View file

@ -0,0 +1,96 @@
/**
* Performance Tracker
*
* Records HTTP request metrics in a fixed-size ring buffer and computes
* percentile-based performance statistics over configurable time windows.
* Used by the monitoring dashboard to display response time distributions,
* error rates, and throughput.
*/
import type { PerformanceEntry, PerformanceMetrics } from './types.js'
const PERIOD_MS: Record<string, number> = {
'1h': 3_600_000,
'6h': 21_600_000,
'24h': 86_400_000,
'7d': 604_800_000,
}
const EMPTY_METRICS: PerformanceMetrics = {
avgResponseTimeMs: 0,
p95ResponseTimeMs: 0,
p99ResponseTimeMs: 0,
errorRate: 0,
requestsPerMinute: 0,
}
export class PerformanceTracker {
private readonly buffer: PerformanceEntry[]
private pointer: number = 0
private count: number = 0
private readonly capacity: number
constructor(capacity: number = 10_000) {
this.capacity = capacity
this.buffer = new Array(capacity)
}
track(method: string, path: string, statusCode: number, durationMs: number): void {
this.buffer[this.pointer] = {
timestamp: Date.now(),
method,
path,
statusCode,
durationMs,
}
this.pointer = (this.pointer + 1) % this.capacity
if (this.count < this.capacity) {
this.count++
}
}
getMetrics(period: '1h' | '6h' | '24h' | '7d' = '1h'): PerformanceMetrics {
const cutoff = Date.now() - (PERIOD_MS[period] ?? PERIOD_MS['1h'])
const entries: PerformanceEntry[] = []
for (let i = 0; i < this.count; i++) {
const entry = this.buffer[i]
if (entry && entry.timestamp >= cutoff) {
entries.push(entry)
}
}
if (entries.length === 0) {
return { ...EMPTY_METRICS }
}
const durations = entries.map((e) => e.durationMs).sort((a, b) => a - b)
const avg = durations.reduce((sum, d) => sum + d, 0) / durations.length
const p95 = percentile(durations, 0.95)
const p99 = percentile(durations, 0.99)
const errorCount = entries.filter((e) => e.statusCode >= 500).length
const errorRate = errorCount / entries.length
const earliestTimestamp = Math.min(...entries.map((e) => e.timestamp))
const windowMinutes = Math.max((Date.now() - earliestTimestamp) / 60_000, 1)
const requestsPerMinute = entries.length / windowMinutes
return {
avgResponseTimeMs: Math.round(avg),
p95ResponseTimeMs: p95,
p99ResponseTimeMs: p99,
errorRate: Math.round(errorRate * 1000) / 1000,
requestsPerMinute: Math.round(requestsPerMinute * 10) / 10,
}
}
}
function percentile(sorted: number[], p: number): number {
const index = Math.floor(sorted.length * p)
return sorted[Math.min(index, sorted.length - 1)]
}
/** Singleton instance used across the application. */
export const performanceTracker = new PerformanceTracker(10_000)

View file

@ -0,0 +1,62 @@
import { describe, it, expect } from 'vitest'
import { PerformanceTracker } from '@/lib/monitoring/performance-tracker'
describe('PerformanceTracker', () => {
it('tracks requests and computes metrics', () => {
const tracker = new PerformanceTracker(1000)
tracker.track('GET', '/api/posts', 200, 120)
tracker.track('GET', '/api/posts', 200, 250)
tracker.track('GET', '/api/posts', 500, 800)
const metrics = tracker.getMetrics('1h')
expect(metrics.avgResponseTimeMs).toBeCloseTo(390, -1)
expect(metrics.errorRate).toBeCloseTo(0.333, 2)
expect(metrics.requestsPerMinute).toBeGreaterThan(0)
expect(metrics.p95ResponseTimeMs).toBeGreaterThanOrEqual(metrics.avgResponseTimeMs)
})
it('ring buffer evicts old entries when capacity is exceeded', () => {
const tracker = new PerformanceTracker(2)
tracker.track('GET', '/a', 200, 100)
tracker.track('GET', '/b', 200, 200)
tracker.track('GET', '/c', 200, 300)
const metrics = tracker.getMetrics('1h')
// Only the last 2 entries should remain (200ms and 300ms)
expect(metrics.avgResponseTimeMs).toBeCloseTo(250, -1)
})
it('returns zeros when no entries exist', () => {
const tracker = new PerformanceTracker(100)
const metrics = tracker.getMetrics('1h')
expect(metrics.avgResponseTimeMs).toBe(0)
expect(metrics.p95ResponseTimeMs).toBe(0)
expect(metrics.p99ResponseTimeMs).toBe(0)
expect(metrics.errorRate).toBe(0)
expect(metrics.requestsPerMinute).toBe(0)
})
it('computes p95 and p99 correctly', () => {
const tracker = new PerformanceTracker(200)
for (let i = 1; i <= 100; i++) {
tracker.track('GET', '/test', 200, i)
}
const metrics = tracker.getMetrics('1h')
expect(metrics.p95ResponseTimeMs).toBeGreaterThanOrEqual(95)
expect(metrics.p99ResponseTimeMs).toBeGreaterThanOrEqual(99)
expect(metrics.avgResponseTimeMs).toBeCloseTo(50, -1)
})
it('only counts 5xx status codes as errors', () => {
const tracker = new PerformanceTracker(100)
tracker.track('GET', '/a', 200, 100)
tracker.track('GET', '/b', 404, 100)
tracker.track('GET', '/c', 500, 100)
tracker.track('GET', '/d', 503, 100)
const metrics = tracker.getMetrics('1h')
expect(metrics.errorRate).toBeCloseTo(0.5, 2)
})
})