diff --git a/packages/fetcher/package.json b/packages/fetcher/package.json
index 3245e8c..bdcc986 100644
--- a/packages/fetcher/package.json
+++ b/packages/fetcher/package.json
@@ -11,6 +11,9 @@
"types": "./dist/index.d.ts"
}
},
+ "bin": {
+ "openlaw-fetch": "./dist/cli.js"
+ },
"scripts": {
"build": "tsc --project tsconfig.build.json",
"typecheck": "tsc --noEmit",
diff --git a/packages/fetcher/src/__tests__/metrics.test.ts b/packages/fetcher/src/__tests__/metrics.test.ts
new file mode 100644
index 0000000..c58bb16
--- /dev/null
+++ b/packages/fetcher/src/__tests__/metrics.test.ts
@@ -0,0 +1,326 @@
+import { describe, it, expect, beforeEach, vi, afterEach } from 'vitest';
+import { FetcherMetrics } from '../metrics.js';
+import { OlrcFetcher } from '../fetcher.js';
+import { HashStore } from '../hash-store.js';
+import { sha256 } from '../fetcher.js';
+import { createLogger } from '@civic-source/shared';
+import type { ReleasePoint } from '@civic-source/types';
+
+// --- FetcherMetrics unit tests ---
+
+describe('FetcherMetrics', () => {
+ let metrics: FetcherMetrics;
+
+ beforeEach(() => {
+ metrics = new FetcherMetrics();
+ });
+
+ describe('initial snapshot', () => {
+ it('starts with all counters at zero', () => {
+ const snap = metrics.getSnapshot();
+ expect(snap.releasePointsDiscovered).toBe(0);
+ expect(snap.releasePointsDownloaded).toBe(0);
+ expect(snap.releasePointsSkipped).toBe(0);
+ expect(snap.downloadErrors.network).toBe(0);
+ expect(snap.downloadErrors.nonZip).toBe(0);
+ expect(snap.downloadErrors.hash).toBe(0);
+ expect(snap.downloadDurationsMs).toEqual([]);
+ });
+ });
+
+ describe('recordDiscovered', () => {
+ it('increments releasePointsDiscovered by the given count', () => {
+ metrics.recordDiscovered(5);
+ expect(metrics.getSnapshot().releasePointsDiscovered).toBe(5);
+ });
+
+ it('accumulates across multiple calls', () => {
+ metrics.recordDiscovered(3);
+ metrics.recordDiscovered(7);
+ expect(metrics.getSnapshot().releasePointsDiscovered).toBe(10);
+ });
+
+ it('accepts zero without changing the counter', () => {
+ metrics.recordDiscovered(0);
+ expect(metrics.getSnapshot().releasePointsDiscovered).toBe(0);
+ });
+ });
+
+ describe('recordDownloaded', () => {
+ it('increments releasePointsDownloaded by 1', () => {
+ metrics.recordDownloaded();
+ expect(metrics.getSnapshot().releasePointsDownloaded).toBe(1);
+ });
+
+ it('accumulates across multiple calls', () => {
+ metrics.recordDownloaded();
+ metrics.recordDownloaded();
+ metrics.recordDownloaded();
+ expect(metrics.getSnapshot().releasePointsDownloaded).toBe(3);
+ });
+ });
+
+ describe('recordSkipped', () => {
+ it('increments releasePointsSkipped by 1', () => {
+ metrics.recordSkipped();
+ expect(metrics.getSnapshot().releasePointsSkipped).toBe(1);
+ });
+
+ it('accumulates across multiple calls', () => {
+ metrics.recordSkipped();
+ metrics.recordSkipped();
+ expect(metrics.getSnapshot().releasePointsSkipped).toBe(2);
+ });
+ });
+
+ describe('recordError', () => {
+ it('increments network error counter', () => {
+ metrics.recordError('network');
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(1);
+ expect(metrics.getSnapshot().downloadErrors.nonZip).toBe(0);
+ expect(metrics.getSnapshot().downloadErrors.hash).toBe(0);
+ });
+
+ it('increments non-zip error counter', () => {
+ metrics.recordError('non-zip');
+ expect(metrics.getSnapshot().downloadErrors.nonZip).toBe(1);
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(0);
+ });
+
+ it('increments hash error counter', () => {
+ metrics.recordError('hash');
+ expect(metrics.getSnapshot().downloadErrors.hash).toBe(1);
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(0);
+ });
+
+ it('tracks each error type independently', () => {
+ metrics.recordError('network');
+ metrics.recordError('network');
+ metrics.recordError('non-zip');
+ metrics.recordError('hash');
+ const snap = metrics.getSnapshot();
+ expect(snap.downloadErrors.network).toBe(2);
+ expect(snap.downloadErrors.nonZip).toBe(1);
+ expect(snap.downloadErrors.hash).toBe(1);
+ });
+ });
+
+ describe('recordDuration', () => {
+ it('appends a duration value', () => {
+ metrics.recordDuration(42);
+ expect(metrics.getSnapshot().downloadDurationsMs).toEqual([42]);
+ });
+
+ it('appends multiple durations in insertion order', () => {
+ metrics.recordDuration(10);
+ metrics.recordDuration(20);
+ metrics.recordDuration(30);
+ expect(metrics.getSnapshot().downloadDurationsMs).toEqual([10, 20, 30]);
+ });
+ });
+
+ describe('getSnapshot isolation', () => {
+ it('returns a copy — mutating the returned array does not affect internal state', () => {
+ metrics.recordDuration(100);
+ const snap = metrics.getSnapshot();
+ snap.downloadDurationsMs.push(999);
+ expect(metrics.getSnapshot().downloadDurationsMs).toEqual([100]);
+ });
+
+ it('returns a copy of downloadErrors — mutating does not affect internal state', () => {
+ metrics.recordError('network');
+ const snap = metrics.getSnapshot();
+ snap.downloadErrors.network = 9999;
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(1);
+ });
+ });
+});
+
+// --- OlrcFetcher metrics integration tests ---
+
+describe('OlrcFetcher metrics integration', () => {
+ const logger = createLogger('test', 'error');
+
+ afterEach(() => {
+ vi.restoreAllMocks();
+ });
+
+ it('accepts an injected FetcherMetrics and exposes it via .metrics', () => {
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics });
+ expect(fetcher.metrics).toBe(metrics);
+ });
+
+ it('creates its own FetcherMetrics when none is provided', () => {
+ const fetcher = new OlrcFetcher({ logger });
+ expect(fetcher.metrics).toBeInstanceOf(FetcherMetrics);
+ });
+
+ it('records release_points_discovered from listReleasePoints', async () => {
+ const html = `
+
Public Law 118-200 (11/15/2024)
+ T42
+ T26
+ `;
+ vi.spyOn(globalThis, 'fetch').mockResolvedValueOnce(new Response(html, { status: 200 }));
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics });
+ await fetcher.listReleasePoints();
+
+ expect(metrics.getSnapshot().releasePointsDiscovered).toBe(2);
+ });
+
+ it('records release_points_discovered from listHistoricalReleasePoints', async () => {
+ const priorHtml = `
+
+ Public Law 113-21 (07/18/2013)
+
+ Public Law 118-200 (11/15/2024)
+ `;
+ const currentHtml = `Public Law 119-73 (01/23/2026)
`;
+ vi.spyOn(globalThis, 'fetch')
+ .mockResolvedValueOnce(new Response(priorHtml, { status: 200 }))
+ .mockResolvedValueOnce(new Response(currentHtml, { status: 200 }));
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics });
+ await fetcher.listHistoricalReleasePoints();
+
+ // parsePriorReleasePoints returns 2 entries; current adds 1 more but
+ // the metrics call happens before dedup/merge, so we record the prior count only.
+ expect(metrics.getSnapshot().releasePointsDiscovered).toBe(2);
+ });
+
+ it('records release_points_downloaded on successful fetchXml', async () => {
+ const zipContent = Buffer.from([0x50, 0x4b, 0x03, 0x04, ...Buffer.from('payload')]);
+ vi.spyOn(globalThis, 'fetch').mockResolvedValueOnce(
+ new Response(zipContent, { status: 200 })
+ );
+
+ const { mkdtemp } = await import('node:fs/promises');
+ const { tmpdir } = await import('node:os');
+ const { join } = await import('node:path');
+ const tmpDir = await mkdtemp(join(tmpdir(), 'metrics-test-'));
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics, hashStore: new HashStore(tmpDir) });
+ const rp: ReleasePoint = {
+ title: '42',
+ publicLaw: 'PL 118-200',
+ dateET: '2024-01-01T00:00:00Z',
+ uslmUrl: 'https://example.com/t42.zip',
+ sha256Hash: '0'.repeat(64),
+ };
+
+ const result = await fetcher.fetchXml(rp);
+ expect(result.ok).toBe(true);
+ expect(metrics.getSnapshot().releasePointsDownloaded).toBe(1);
+ expect(metrics.getSnapshot().releasePointsSkipped).toBe(0);
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(0);
+ });
+
+ it('records release_points_skipped on hash-unchanged fetchXml', async () => {
+ const zipContent = Buffer.from([0x50, 0x4b, 0x03, 0x04, ...Buffer.from('cached-data')]);
+ const hash = sha256(zipContent);
+
+ const { mkdtemp } = await import('node:fs/promises');
+ const { tmpdir } = await import('node:os');
+ const { join } = await import('node:path');
+ const tmpDir = await mkdtemp(join(tmpdir(), 'metrics-test-'));
+ const hashStore = new HashStore(tmpDir);
+
+ const rp: ReleasePoint = {
+ title: '26',
+ publicLaw: 'PL 118-200',
+ dateET: '2024-01-01T00:00:00Z',
+ uslmUrl: 'https://example.com/t26.zip',
+ sha256Hash: '0'.repeat(64),
+ };
+ await hashStore.setHash(`xml:${rp.title}:${rp.uslmUrl}`, hash);
+
+ vi.spyOn(globalThis, 'fetch').mockResolvedValueOnce(
+ new Response(zipContent, { status: 200 })
+ );
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics, hashStore });
+ const result = await fetcher.fetchXml(rp);
+
+ expect(result.ok).toBe(true);
+ if (result.ok) expect(result.value).toBe('');
+ expect(metrics.getSnapshot().releasePointsSkipped).toBe(1);
+ expect(metrics.getSnapshot().releasePointsDownloaded).toBe(0);
+ });
+
+ it('records network error on fetchXml fetch failure', async () => {
+ vi.spyOn(globalThis, 'fetch').mockResolvedValue(
+ new Response('', { status: 500, statusText: 'Error' })
+ );
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics });
+ const rp: ReleasePoint = {
+ title: '1',
+ publicLaw: 'PL 118-1',
+ dateET: '2024-01-01T00:00:00Z',
+ uslmUrl: 'https://example.com/t1.zip',
+ sha256Hash: '0'.repeat(64),
+ };
+
+ const result = await fetcher.fetchXml(rp);
+ expect(result.ok).toBe(false);
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(1);
+ expect(metrics.getSnapshot().releasePointsDownloaded).toBe(0);
+ });
+
+ it('records non-zip error on fetchXml invalid content', async () => {
+ const notZip = Buffer.from('this is not a ZIP file');
+ vi.spyOn(globalThis, 'fetch').mockResolvedValueOnce(
+ new Response(notZip, { status: 200 })
+ );
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics });
+ const rp: ReleasePoint = {
+ title: '5',
+ publicLaw: 'PL 118-5',
+ dateET: '2024-01-01T00:00:00Z',
+ uslmUrl: 'https://example.com/t5.zip',
+ sha256Hash: '0'.repeat(64),
+ };
+
+ const result = await fetcher.fetchXml(rp);
+ expect(result.ok).toBe(false);
+ expect(metrics.getSnapshot().downloadErrors.nonZip).toBe(1);
+ expect(metrics.getSnapshot().downloadErrors.network).toBe(0);
+ });
+
+ it('records download_duration_ms for every fetchXml call', async () => {
+ const zipContent = Buffer.from([0x50, 0x4b, 0x03, 0x04, ...Buffer.from('data')]);
+ // Each Response body can only be read once — use a fresh instance per call.
+ vi.spyOn(globalThis, 'fetch')
+ .mockResolvedValueOnce(new Response(zipContent, { status: 200 }))
+ .mockResolvedValueOnce(new Response(zipContent, { status: 200 }));
+
+ const { mkdtemp } = await import('node:fs/promises');
+ const { tmpdir } = await import('node:os');
+ const { join } = await import('node:path');
+ const tmpDir = await mkdtemp(join(tmpdir(), 'metrics-dur-test-'));
+
+ const metrics = new FetcherMetrics();
+ const fetcher = new OlrcFetcher({ logger, metrics, hashStore: new HashStore(tmpDir) });
+
+ const rp1: ReleasePoint = { title: '1', publicLaw: 'PL 118-1', dateET: '2024-01-01T00:00:00Z', uslmUrl: 'https://example.com/t1.zip', sha256Hash: '0'.repeat(64) };
+ const rp2: ReleasePoint = { title: '2', publicLaw: 'PL 118-1', dateET: '2024-01-01T00:00:00Z', uslmUrl: 'https://example.com/t2.zip', sha256Hash: '0'.repeat(64) };
+
+ await fetcher.fetchXml(rp1);
+ await fetcher.fetchXml(rp2);
+
+ const snap = metrics.getSnapshot();
+ expect(snap.downloadDurationsMs).toHaveLength(2);
+ for (const d of snap.downloadDurationsMs) {
+ expect(d).toBeGreaterThanOrEqual(0);
+ }
+ });
+});
diff --git a/packages/fetcher/src/cli.ts b/packages/fetcher/src/cli.ts
new file mode 100644
index 0000000..f969e9d
--- /dev/null
+++ b/packages/fetcher/src/cli.ts
@@ -0,0 +1,221 @@
+#!/usr/bin/env node
+/**
+ * openlaw-fetch CLI — download and inspect OLRC US Code release points.
+ *
+ * Commands:
+ * list List current release point titles
+ * history List all historical release points chronologically
+ * download --title Download XML for a specific title
+ * download --all Download all titles for current release
+ */
+
+import { OlrcFetcher } from './fetcher.js';
+import { createLogger } from '@civic-source/shared';
+import type { ReleasePoint } from '@civic-source/types';
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+function printUsage(): void {
+ process.stdout.write(
+ [
+ 'Usage: openlaw-fetch [options]',
+ '',
+ 'Commands:',
+ ' list List current release point titles',
+ ' history List all historical release points chronologically',
+ ' download --title Download XML for title N',
+ ' download --all Download all titles for current release',
+ '',
+ 'Options:',
+ ' --title Title number (e.g. 26)',
+ ' --all Download all titles',
+ ' --help Show this help',
+ '',
+ ].join('\n')
+ );
+}
+
+/** Pad a string to a fixed width (truncates if too long). */
+function col(value: string, width: number): string {
+ if (value.length >= width) return value.slice(0, width - 1) + ' ';
+ return value + ' '.repeat(width - value.length);
+}
+
+function printReleaseTable(points: ReleasePoint[]): void {
+ const header = col('Title', 8) + col('Public Law', 16) + col('Date (ET)', 14) + 'URL';
+ const separator = '-'.repeat(header.length);
+ process.stdout.write(header + '\n');
+ process.stdout.write(separator + '\n');
+ for (const p of points) {
+ const date = p.dateET.slice(0, 10); // YYYY-MM-DD
+ process.stdout.write(
+ col(p.title, 8) + col(p.publicLaw, 16) + col(date, 14) + p.uslmUrl + '\n'
+ );
+ }
+ process.stdout.write(`\n${points.length} title(s)\n`);
+}
+
+interface HistoricalPoint {
+ publicLaw: string;
+ congress: string;
+ law: string;
+ dateET: string;
+ path: string;
+}
+
+function printHistoryTable(points: HistoricalPoint[]): void {
+ const header = col('Public Law', 16) + col('Congress', 12) + col('Law', 16) + col('Date (ET)', 14) + 'Path';
+ const separator = '-'.repeat(header.length);
+ process.stdout.write(header + '\n');
+ process.stdout.write(separator + '\n');
+ for (const p of points) {
+ const date = p.dateET.slice(0, 10);
+ process.stdout.write(
+ col(p.publicLaw, 16) + col(p.congress, 12) + col(p.law, 16) + col(date, 14) + p.path + '\n'
+ );
+ }
+ process.stdout.write(`\n${points.length} release point(s)\n`);
+}
+
+function parseArgs(argv: string[]): Record {
+ const args: Record = {};
+ for (let i = 0; i < argv.length; i++) {
+ const arg = argv[i];
+ if (arg === undefined) continue;
+ if (arg === '--all' || arg === '--help') {
+ args[arg.slice(2)] = true;
+ } else if (arg === '--title' && i + 1 < argv.length) {
+ const next = argv[++i];
+ if (next !== undefined) args['title'] = next;
+ } else if (!arg.startsWith('--')) {
+ args['command'] = arg;
+ }
+ }
+ return args;
+}
+
+// ---------------------------------------------------------------------------
+// Commands
+// ---------------------------------------------------------------------------
+
+async function cmdList(fetcher: OlrcFetcher): Promise {
+ process.stdout.write('Fetching current release points…\n\n');
+ const result = await fetcher.listReleasePoints();
+ if (!result.ok) {
+ process.stderr.write(`Error: ${result.error.message}\n`);
+ process.exit(1);
+ }
+ printReleaseTable(result.value);
+}
+
+async function cmdHistory(fetcher: OlrcFetcher): Promise {
+ process.stdout.write('Fetching historical release points…\n\n');
+ const result = await fetcher.listHistoricalReleasePoints();
+ if (!result.ok) {
+ process.stderr.write(`Error: ${result.error.message}\n`);
+ process.exit(1);
+ }
+ printHistoryTable(result.value);
+}
+
+async function cmdDownload(
+ fetcher: OlrcFetcher,
+ args: Record
+): Promise {
+ const downloadAll = args['all'] === true;
+ const titleArg = typeof args['title'] === 'string' ? args['title'] : undefined;
+
+ if (!downloadAll && titleArg === undefined) {
+ process.stderr.write('Error: download requires --title or --all\n');
+ printUsage();
+ process.exit(1);
+ }
+
+ // Resolve release point(s)
+ const listResult = await fetcher.listReleasePoints(downloadAll ? undefined : titleArg);
+ if (!listResult.ok) {
+ process.stderr.write(`Error listing release points: ${listResult.error.message}\n`);
+ process.exit(1);
+ }
+
+ const points = listResult.value;
+ if (points.length === 0) {
+ process.stderr.write(`No release points found${titleArg !== undefined ? ` for title ${titleArg}` : ''}.\n`);
+ process.exit(1);
+ }
+
+ process.stdout.write(`Downloading ${points.length} title(s)…\n`);
+
+ let succeeded = 0;
+ let skipped = 0;
+ let failed = 0;
+
+ for (const point of points) {
+ process.stdout.write(` Title ${point.title} (${point.publicLaw})… `);
+ const result = await fetcher.fetchXml(point);
+ if (!result.ok) {
+ process.stdout.write('FAILED\n');
+ process.stderr.write(` Error: ${result.error.message}\n`);
+ failed++;
+ } else if (result.value === '') {
+ process.stdout.write('unchanged (skipped)\n');
+ skipped++;
+ } else {
+ // result.value is base64-encoded ZIP content
+ const bytes = Buffer.from(result.value, 'base64').length;
+ process.stdout.write(`OK (${bytes.toLocaleString()} bytes, base64 ZIP)\n`);
+ succeeded++;
+ }
+ }
+
+ process.stdout.write(`\nDone: ${succeeded} downloaded, ${skipped} unchanged, ${failed} failed.\n`);
+ if (failed > 0) process.exit(1);
+}
+
+// ---------------------------------------------------------------------------
+// Entry point
+// ---------------------------------------------------------------------------
+
+async function main(): Promise {
+ const argv = process.argv.slice(2);
+ const args = parseArgs(argv);
+
+ if (args['help'] === true || argv.length === 0) {
+ printUsage();
+ process.exit(0);
+ }
+
+ const command = args['command'];
+ if (command === undefined) {
+ process.stderr.write('Error: no command specified\n\n');
+ printUsage();
+ process.exit(1);
+ }
+
+ const logger = createLogger('openlaw-fetch');
+ const fetcher = new OlrcFetcher({ logger });
+
+ switch (command) {
+ case 'list':
+ await cmdList(fetcher);
+ break;
+ case 'history':
+ await cmdHistory(fetcher);
+ break;
+ case 'download':
+ await cmdDownload(fetcher, args);
+ break;
+ default:
+ process.stderr.write(`Error: unknown command "${command}"\n\n`);
+ printUsage();
+ process.exit(1);
+ }
+}
+
+main().catch((err: unknown) => {
+ const message = err instanceof Error ? err.message : String(err);
+ process.stderr.write(`Fatal: ${message}\n`);
+ process.exit(1);
+});
diff --git a/packages/fetcher/src/fetcher.ts b/packages/fetcher/src/fetcher.ts
index 5d85c14..6237527 100644
--- a/packages/fetcher/src/fetcher.ts
+++ b/packages/fetcher/src/fetcher.ts
@@ -3,6 +3,7 @@ import { type IUsCodeFetcher, type ReleasePoint, type Result, ok, err } from '@c
import { type Logger, createLogger, fetchWithRetry as sharedFetchWithRetry } from '@civic-source/shared';
import { OLRC_DOWNLOAD_PAGE, OLRC_PRIOR_RELEASE_POINTS_PAGE } from './constants.js';
import { HashStore } from './hash-store.js';
+import { FetcherMetrics } from './metrics.js';
/** Compute SHA-256 hex digest of a buffer */
export function sha256(data: Buffer): string {
@@ -158,10 +159,12 @@ export function parseReleasePoints(html: string): ReleasePoint[] {
export class OlrcFetcher implements IUsCodeFetcher {
private readonly logger: Logger;
private readonly hashStore: HashStore;
+ readonly metrics: FetcherMetrics;
- constructor(options?: { logger?: Logger; hashStore?: HashStore }) {
+ constructor(options?: { logger?: Logger; hashStore?: HashStore; metrics?: FetcherMetrics }) {
this.logger = options?.logger ?? createLogger('OlrcFetcher');
this.hashStore = options?.hashStore ?? new HashStore();
+ this.metrics = options?.metrics ?? new FetcherMetrics();
}
/** List available release points from the current download page, optionally filtered by title */
@@ -177,6 +180,7 @@ export class OlrcFetcher implements IUsCodeFetcher {
const html = await result.value.text();
let points = parseReleasePoints(html);
+ this.metrics.recordDiscovered(points.length);
if (title !== undefined) {
points = points.filter((p) => p.title === title);
@@ -205,6 +209,7 @@ export class OlrcFetcher implements IUsCodeFetcher {
const priorHtml = await priorResult.value.text();
const historicalPoints = parsePriorReleasePoints(priorHtml);
+ this.metrics.recordDiscovered(historicalPoints.length);
// Also fetch current release point to include it
const currentResult = await fetchWithRetry(OLRC_DOWNLOAD_PAGE, this.logger);
@@ -239,9 +244,13 @@ export class OlrcFetcher implements IUsCodeFetcher {
async fetchXml(releasePoint: ReleasePoint): Promise> {
this.logger.info('Fetching XML', { title: releasePoint.title, url: releasePoint.uslmUrl });
const timer = this.logger.startTimer('fetchXml');
+ const startMs = performance.now();
const result = await fetchWithRetry(releasePoint.uslmUrl, this.logger);
if (!result.ok) {
+ const durationMs = performance.now() - startMs;
+ this.metrics.recordDuration(durationMs);
+ this.metrics.recordError('network');
timer();
return result;
}
@@ -254,17 +263,26 @@ export class OlrcFetcher implements IUsCodeFetcher {
const changed = await this.hashStore.hasChanged(hashKey, hash);
if (!changed) {
this.logger.info('Content unchanged, skipping', { title: releasePoint.title, hash });
+ const durationMs = performance.now() - startMs;
+ this.metrics.recordDuration(durationMs);
+ this.metrics.recordSkipped();
timer();
return ok('');
}
// Validate that we got something that looks like a ZIP (PK signature)
if (buffer.length < 4 || buffer[0] !== 0x50 || buffer[1] !== 0x4b) {
+ const durationMs = performance.now() - startMs;
+ this.metrics.recordDuration(durationMs);
+ this.metrics.recordError('non-zip');
timer();
return err(new Error('Downloaded content is not a valid ZIP file'));
}
await this.hashStore.setHash(hashKey, hash);
+ const durationMs = performance.now() - startMs;
+ this.metrics.recordDuration(durationMs);
+ this.metrics.recordDownloaded();
timer();
// Return raw buffer as base64 — caller will extract XML from the ZIP
diff --git a/packages/fetcher/src/index.ts b/packages/fetcher/src/index.ts
index cb4fe12..f501758 100644
--- a/packages/fetcher/src/index.ts
+++ b/packages/fetcher/src/index.ts
@@ -21,4 +21,5 @@ export {
type CurrentReleaseInfo,
} from './fetcher.js';
export { HashStore } from './hash-store.js';
+export { FetcherMetrics, type FetcherMetricsSnapshot, type DownloadErrorType } from './metrics.js';
export { createLogger, type Logger, type LogLevel } from '@civic-source/shared';
diff --git a/packages/fetcher/src/metrics.ts b/packages/fetcher/src/metrics.ts
new file mode 100644
index 0000000..d069280
--- /dev/null
+++ b/packages/fetcher/src/metrics.ts
@@ -0,0 +1,89 @@
+/**
+ * Fetcher-level observability metrics.
+ *
+ * Tracks release point discovery, download success/skip/error, and per-call
+ * download duration. Provides a plain-object snapshot for reporting or
+ * forwarding to a higher-level pipeline metrics collector.
+ */
+
+/** Error type discriminator for download_errors counter. */
+export type DownloadErrorType = 'network' | 'non-zip' | 'hash';
+
+/** Point-in-time snapshot of all fetcher metrics. */
+export interface FetcherMetricsSnapshot {
+ /** Total release points discovered (sum of parseReleasePoints + parsePriorReleasePoints results). */
+ releasePointsDiscovered: number;
+ /** Successful fetchXml calls that returned new content. */
+ releasePointsDownloaded: number;
+ /** fetchXml calls skipped because the hash was unchanged (cache hits). */
+ releasePointsSkipped: number;
+ /** Download errors broken down by type. */
+ downloadErrors: {
+ /** fetch() threw or returned a non-2xx status. */
+ network: number;
+ /** Downloaded bytes did not begin with the ZIP PK signature. */
+ nonZip: number;
+ /** Hash computation or store interaction failed. */
+ hash: number;
+ };
+ /** All recorded per-call download durations in milliseconds, in insertion order. */
+ downloadDurationsMs: number[];
+}
+
+/** Mutable fetcher metrics collector. */
+export class FetcherMetrics {
+ private _releasePointsDiscovered = 0;
+ private _releasePointsDownloaded = 0;
+ private _releasePointsSkipped = 0;
+ private readonly _downloadErrors = { network: 0, nonZip: 0, hash: 0 };
+ private readonly _downloadDurationsMs: number[] = [];
+
+ /** Increment the release_points_discovered counter by `count`. */
+ recordDiscovered(count: number): void {
+ this._releasePointsDiscovered += count;
+ }
+
+ /** Increment the release_points_downloaded counter by 1. */
+ recordDownloaded(): void {
+ this._releasePointsDownloaded += 1;
+ }
+
+ /** Increment the release_points_skipped counter by 1. */
+ recordSkipped(): void {
+ this._releasePointsSkipped += 1;
+ }
+
+ /**
+ * Increment the download_errors counter for the given error type.
+ * @param type - 'network' | 'non-zip' | 'hash'
+ */
+ recordError(type: DownloadErrorType): void {
+ if (type === 'network') {
+ this._downloadErrors.network += 1;
+ } else if (type === 'non-zip') {
+ this._downloadErrors.nonZip += 1;
+ } else {
+ this._downloadErrors.hash += 1;
+ }
+ }
+
+ /** Append a per-call download duration measurement in milliseconds. */
+ recordDuration(durationMs: number): void {
+ this._downloadDurationsMs.push(durationMs);
+ }
+
+ /**
+ * Return an immutable snapshot of all current metrics.
+ * The `downloadDurationsMs` array is a shallow copy so callers cannot
+ * mutate internal state.
+ */
+ getSnapshot(): FetcherMetricsSnapshot {
+ return {
+ releasePointsDiscovered: this._releasePointsDiscovered,
+ releasePointsDownloaded: this._releasePointsDownloaded,
+ releasePointsSkipped: this._releasePointsSkipped,
+ downloadErrors: { ...this._downloadErrors },
+ downloadDurationsMs: [...this._downloadDurationsMs],
+ };
+ }
+}