ktg-plugin-marketplace/plugins/ultraplan-local/hooks/scripts/pre-compact-flush.mjs
Kjell Tore Guttormsen cc38155fa6 feat(ultraplan-local): Spor 2 — HANDOVER-CONTRACTS.md + PreCompact-hook (P0 progress.json drift fix)
Reconciles divergence after parallel-session race: includes both Spor 1 wiring (validators inn i 4 commands + 1 agent) og Spor 2 (HANDOVER-CONTRACTS.md + PreCompact-hook).

Spor 1 wiring (re-applied etter rebase):
- /ultrabrief-local Phase 4g — brief-validator post-write
- /ultraplan-local Phase 1 — brief-validator --soft + research-validator --dir + architecture-discovery
- planning-orchestrator Phase 5.5 — plan-validator --strict erstatter 3 grep -cE-kall
- /ultraexecute-local Phase 2.3 (--validate) — plan-validator + progress-validator
- YAML-parser-utvidelse: list-of-dicts (must_contain), støtter v1.7 template-format

Spor 2 NEW:
- docs/HANDOVER-CONTRACTS.md (~310 linjer) — single source of truth for de 5 pipeline-handover-formatene m/ faste sub-headinger (Producer / Consumer / Path / Frontmatter schema / Body invariants / Validation strategy / Versioning / Failure modes)
- hooks/scripts/pre-compact-flush.mjs (NY) — fikser dokumentert P0 i docs/ultraexecute-v2-observations-from-config-audit-v4.md:
  * Fyrer på PreCompact-event (CC v2.1.105+)
  * Lokaliserer progress.json under .claude/projects/*/
  * Sammenligner stored current_step mot git log {session_start_sha}..HEAD
  * Atomisk write (tmp + rename), monoton — current_step kan aldri reduseres
  * Aldri blokkerer compaction (exit 0)
- hooks/hooks.json registrerer PreCompact-hooken

Resultat: /ultraexecute-local --resume virker nå etter context compaction selv ved skill-driven execution.

Docs:
- README.md (plugin): "Quality infrastructure", "Handover contracts", "PreCompact resume integrity"
- CLAUDE.md (plugin): peker til HANDOVER-CONTRACTS.md + dokumenterer pre-compact-flush
- README.md (marketplace root): bullet-liste over Spor 2-deliverables (resolved merge-konflikt fra parallell-sesjon)

Tester: 109 grønn (ingen regresjon).

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-01 06:07:01 +02:00

155 lines
No EOL
5.6 KiB
JavaScript

#!/usr/bin/env node
// Hook: pre-compact-flush.mjs
// Event: PreCompact (Claude Code v2.1.105+)
// Purpose: Flush progress.json drift before context compaction so /ultraexecute-local
--resume
// works after long conversations. Direct fix for the documented P0 in
// docs/ultraexecute-v2-observations-from-config-audit-v4.md.
//
// Behavior:
// 1. Locate {cwd}/.claude/projects/* / progress.json (any nested project)
// 2. Read progress.json + sibling plan.md
// 3. Run `git log --oneline {session_start_sha}..HEAD`
// 4. For each commit, match against plan steps' commit_message_pattern
// 5. If derived current_step > stored current_step → write fresh checkpoint
// atomically (tmp + rename), monotonic only (current_step never decreases).
// 6. Always exit 0 — NEVER blocks compaction.
import { readFileSync, writeFileSync, renameSync, existsSync, readdirSync, statSync } from
'node:fs';
import { join, dirname } from 'node:path';
import { execSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const HERE = dirname(fileURLToPath(import.meta.url));
const PLUGIN_ROOT = join(HERE, '..', '..');
function findProgressFiles(cwd) {
const projectsDir = join(cwd, '.claude', 'projects');
if (!existsSync(projectsDir) || !statSync(projectsDir).isDirectory()) return [];
const out = [];
for (const entry of readdirSync(projectsDir)) {
const projDir = join(projectsDir, entry);
if (!statSync(projDir).isDirectory()) continue;
const progPath = join(projDir, 'progress.json');
if (existsSync(progPath) && statSync(progPath).isFile()) {
out.push({ projDir, progPath, planPath: join(projDir, 'plan.md') });
}
}
return out;
}
function readJson(path) {
try { return JSON.parse(readFileSync(path, 'utf-8')); }
catch { return null; }
}
function readPlanCheckpointPatterns(planPath) {
if (!existsSync(planPath)) return new Map();
const text = readFileSync(planPath, 'utf-8');
const map = new Map();
const stepRe = /^### Step (\d+):/gm;
const checkpointRe = /\*\*Checkpoint:\*\*\s+`git commit -m "([^"]+)"`/;
const headings = [];
let m;
while ((m = stepRe.exec(text)) !== null) {
headings.push({ n: Number.parseInt(m[1], 10), idx: m.index });
}
for (let i = 0; i < headings.length; i++) {
const start = headings[i].idx;
const end = i + 1 < headings.length ? headings[i + 1].idx : text.length;
const body = text.slice(start, end);
const cp = body.match(checkpointRe);
if (cp) {
const msg = cp[1];
const conventionalPrefix = (msg.match(/^([a-z]+)\(([^)]+)\):/) || [])[0];
if (conventionalPrefix) map.set(headings[i].n, conventionalPrefix);
}
}
return map;
}
function gitLog(repoDir, baseSha) {
if (!baseSha) return [];
try {
const out = execSync(`git -C "${repoDir}" log --pretty=format:'%H %s' ${baseSha}..HEAD
2>/dev/null`, {
encoding: 'utf-8', timeout: 5000,
});
return out.trim().split('\n').filter(Boolean).map(line => {
const sp = line.indexOf(' ');
return { sha: line.slice(0, sp), subject: line.slice(sp + 1) };
});
} catch { return []; }
}
function deriveCurrentStep(progress, plan, gitCommits) {
if (!progress || !progress.steps || gitCommits.length === 0) return null;
const stored = progress.current_step || 0;
let highestMatched = stored;
for (const [stepN, prefix] of plan.entries()) {
const matchedCommit = gitCommits.find(c => c.subject.startsWith(prefix.replace(/\\/g,
'')));
if (matchedCommit && stepN > highestMatched) highestMatched = stepN;
}
return highestMatched;
}
function atomicWrite(path, obj) {
const tmp = path + '.tmp';
writeFileSync(tmp, JSON.stringify(obj, null, 2));
renameSync(tmp, path);
}
function repoRootOf(dir) {
try {
return execSync(`git -C "${dir}" rev-parse --show-toplevel 2>/dev/null`, { encoding:
'utf-8', timeout: 2000 }).trim();
} catch { return null; }
}
let stdinPayload = '';
try { stdinPayload = readFileSync(0, 'utf-8'); } catch { /* fine */ }
const cwd = process.env.CLAUDE_PROJECT_DIR || process.cwd();
const progressFiles = findProgressFiles(cwd);
if (progressFiles.length === 0) {
process.exit(0);
}
let mutationsMade = 0;
for (const { projDir, progPath, planPath } of progressFiles) {
const progress = readJson(progPath);
if (!progress || progress.status === 'completed') continue;
const repoRoot = repoRootOf(projDir);
if (!repoRoot) continue;
const plan = readPlanCheckpointPatterns(planPath);
if (plan.size === 0) continue;
const sessionStart = progress.session_start_sha;
if (!sessionStart) continue;
const commits = gitLog(repoRoot, sessionStart);
const derivedStep = deriveCurrentStep(progress, plan, commits);
if (derivedStep !== null && derivedStep > (progress.current_step || 0)) {
progress.current_step = derivedStep;
progress.updated_at = new Date().toISOString();
if (!progress.steps[String(derivedStep)]) {
progress.steps[String(derivedStep)] = {
status: 'completed', attempts: 1, error: null,
completed_at: progress.updated_at, commit: null, manifest_audit: 'n/a',
note: 'reconstructed by pre-compact-flush from git log',
};
}
atomicWrite(progPath, progress);
process.stderr.write(`[ultraplan-local] pre-compact flush: ${progPath}
current_step=${derivedStep}\n`);
mutationsMade++;
}
}
process.exit(0);