Implements Spor C of post-v3.4.0 roadmap. Zero-dep harness measures CLAUDE_CODE_FORK_SUBAGENT cache-prefix preservation across 3 fork-children with identical --allowedTools at 150-250K parent context. Harness uses --append-system-prompt-file (avoids stdin buffer cap at >200K bytes) + --exclude-dynamic-system-prompt-sections (prevents per-child cache-prefix divergence from cwd/env/git-status). Companion analyser summarizes accumulated ultraexecute-stats.jsonl: percentile wall_time (p50/p90/max), total events, ISO time range. Output: JSON via --json <path> CLI shim. Result file is gitignored (*.local.md). Master-plan thresholds (<= 1.5K positive / >= 3.5K negative) gate the v3.5.0 Path C decision. Brief: .claude/projects/2026-05-04-spor-c-q3-cache-prefix-experiment/brief.md Master-plan: .claude/projects/2026-05-04-post-v3.4.0-roadmap/master-plan.md
117 lines
3.6 KiB
JavaScript
117 lines
3.6 KiB
JavaScript
// lib/stats/cache-analyzer.mjs
|
|
// Summarizes ultraexecute-stats.jsonl: total events, percentile wall times,
|
|
// time range. Companion to event-emit.mjs (which produces the jsonl).
|
|
//
|
|
// Designed for /ultraplan-local Spor C: gives C3 telemetry context when
|
|
// interpreting Q3 experiment numbers (5+ weeks of accumulated data on the
|
|
// operator's machine as of 2026-05-04).
|
|
//
|
|
// Zero npm dependencies. Node stdlib only.
|
|
|
|
import { readFileSync, existsSync } from 'node:fs';
|
|
|
|
function usage() {
|
|
return `cache-analyzer.mjs — summarize ultraexecute-stats.jsonl
|
|
|
|
USAGE:
|
|
node lib/stats/cache-analyzer.mjs --json <path-to-jsonl>
|
|
|
|
OUTPUT (stdout, JSON):
|
|
{
|
|
"total_events": <n>,
|
|
"events_with_duration": <n>,
|
|
"wall_time_ms_p50": <ms or null>,
|
|
"wall_time_ms_p90": <ms or null>,
|
|
"wall_time_ms_max": <ms or null>,
|
|
"unique_event_names": [...],
|
|
"oldest_event_iso": "<iso8601 or null>",
|
|
"newest_event_iso": "<iso8601 or null>"
|
|
}
|
|
|
|
EXIT:
|
|
0 success, 1 file not found / read error, 2 usage error.
|
|
`;
|
|
}
|
|
|
|
export function summarize(lines) {
|
|
const summary = {
|
|
total_events: 0,
|
|
events_with_duration: 0,
|
|
wall_time_ms_p50: null,
|
|
wall_time_ms_p90: null,
|
|
wall_time_ms_max: null,
|
|
unique_event_names: [],
|
|
oldest_event_iso: null,
|
|
newest_event_iso: null,
|
|
};
|
|
|
|
const durations = [];
|
|
const names = new Set();
|
|
let oldestMs = null;
|
|
let newestMs = null;
|
|
|
|
for (const line of lines) {
|
|
const trimmed = line.trim();
|
|
if (trimmed === '') continue;
|
|
let obj;
|
|
try { obj = JSON.parse(trimmed); }
|
|
catch { continue; }
|
|
summary.total_events++;
|
|
if (obj.event && typeof obj.event === 'string') names.add(obj.event);
|
|
else if (obj.name && typeof obj.name === 'string') names.add(obj.name);
|
|
if (typeof obj.duration_ms === 'number' && Number.isFinite(obj.duration_ms)) {
|
|
durations.push(obj.duration_ms);
|
|
summary.events_with_duration++;
|
|
}
|
|
const tsField = obj.timestamp || obj.ts || obj.iso || obj.time;
|
|
if (typeof tsField === 'string') {
|
|
const t = Date.parse(tsField);
|
|
if (!Number.isNaN(t)) {
|
|
if (oldestMs === null || t < oldestMs) oldestMs = t;
|
|
if (newestMs === null || t > newestMs) newestMs = t;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (durations.length > 0) {
|
|
durations.sort((a, b) => a - b);
|
|
const p50Idx = Math.floor(durations.length * 0.5);
|
|
const p90Idx = Math.floor(durations.length * 0.9);
|
|
summary.wall_time_ms_p50 = durations[Math.min(p50Idx, durations.length - 1)];
|
|
summary.wall_time_ms_p90 = durations[Math.min(p90Idx, durations.length - 1)];
|
|
summary.wall_time_ms_max = durations[durations.length - 1];
|
|
}
|
|
|
|
summary.unique_event_names = [...names].sort();
|
|
if (oldestMs !== null) summary.oldest_event_iso = new Date(oldestMs).toISOString();
|
|
if (newestMs !== null) summary.newest_event_iso = new Date(newestMs).toISOString();
|
|
|
|
return summary;
|
|
}
|
|
|
|
export function summarizeFile(path) {
|
|
if (!existsSync(path)) {
|
|
return { error: `file not found: ${path}` };
|
|
}
|
|
let text;
|
|
try { text = readFileSync(path, 'utf-8'); }
|
|
catch (e) { return { error: `read error: ${e.message}` }; }
|
|
return summarize(text.split('\n'));
|
|
}
|
|
|
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
|
const args = process.argv.slice(2);
|
|
const jsonIdx = args.indexOf('--json');
|
|
if (jsonIdx === -1 || !args[jsonIdx + 1]) {
|
|
process.stderr.write(usage());
|
|
process.exit(2);
|
|
}
|
|
const path = args[jsonIdx + 1];
|
|
const result = summarizeFile(path);
|
|
if (result.error) {
|
|
process.stderr.write(`cache-analyzer: ${result.error}\n`);
|
|
process.exit(1);
|
|
}
|
|
process.stdout.write(JSON.stringify(result, null, 2) + '\n');
|
|
process.exit(0);
|
|
}
|