refactor(ultraplan-local): extract atomicWriteJson to lib/util

Three changes in one commit:

1. NEW lib/util/atomic-write.mjs — exports atomicWriteJson(path, obj),
   the canonical tmp+rename pattern. Reused by pre-compact-flush.mjs and
   (in subsequent steps) by the new session-state writer.

2. NEW tests/lib/atomic-write.test.mjs — 4 unit tests covering
   round-trip, no-orphan-tmp, overwrite-atomic, pretty-print formatting.

3. REFACTOR hooks/scripts/pre-compact-flush.mjs — replace the inline
   atomicWrite() with the imported atomicWriteJson(). Also fixes a
   pre-existing syntax error (leading whitespace + stray --resume token
   outside the comment block) that silently broke the hook from v3.1.0
   onward — PreCompact runtime is fail-open and swallowed the error.
   File reformatted with standard zero-indent JS.

163 → 167 tests, 0 fail.

Step 2 of /ultracontinue v3.3.0 (project 2026-05-01-ultracontinue).
This commit is contained in:
Kjell Tore Guttormsen 2026-05-01 20:21:15 +02:00
commit 655c8d46f8
3 changed files with 216 additions and 146 deletions

View file

@ -1,155 +1,150 @@
#!/usr/bin/env node
// Hook: pre-compact-flush.mjs
// Event: PreCompact (Claude Code v2.1.105+)
// Purpose: Flush progress.json drift before context compaction so /ultraexecute-local
--resume
// works after long conversations. Direct fix for the documented P0 in
// docs/ultraexecute-v2-observations-from-config-audit-v4.md.
//
// Behavior:
// 1. Locate {cwd}/.claude/projects/* / progress.json (any nested project)
// 2. Read progress.json + sibling plan.md
// 3. Run `git log --oneline {session_start_sha}..HEAD`
// 4. For each commit, match against plan steps' commit_message_pattern
// 5. If derived current_step > stored current_step → write fresh checkpoint
// atomically (tmp + rename), monotonic only (current_step never decreases).
// 6. Always exit 0 — NEVER blocks compaction.
#!/usr/bin/env node
// Hook: pre-compact-flush.mjs
// Event: PreCompact (Claude Code v2.1.105+)
// Purpose: Flush progress.json drift before context compaction so
// /ultraexecute-local --resume works after long conversations.
// Direct fix for the documented P0 in
// docs/ultraexecute-v2-observations-from-config-audit-v4.md.
//
// Behavior:
// 1. Locate {cwd}/.claude/projects/* / progress.json (any nested project)
// 2. Read progress.json + sibling plan.md
// 3. Run `git log --oneline {session_start_sha}..HEAD`
// 4. For each commit, match against plan steps' commit_message_pattern
// 5. If derived current_step > stored current_step → write fresh checkpoint
// atomically (tmp + rename), monotonic only (current_step never decreases).
// 6. Always exit 0 — NEVER blocks compaction.
//
// v3.3.0:
// - atomicWrite extracted to lib/util/atomic-write.mjs for reuse
// - File reformatted (removed pre-existing leading-whitespace syntax error
// that silently broke the hook since v3.1.0; PreCompact swallowed it)
import { readFileSync, writeFileSync, renameSync, existsSync, readdirSync, statSync } from
'node:fs';
import { join, dirname } from 'node:path';
import { execSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
import { readFileSync, existsSync, readdirSync, statSync } from 'node:fs';
import { join, dirname } from 'node:path';
import { execSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
import { atomicWriteJson } from '../../lib/util/atomic-write.mjs';
const HERE = dirname(fileURLToPath(import.meta.url));
const PLUGIN_ROOT = join(HERE, '..', '..');
const HERE = dirname(fileURLToPath(import.meta.url));
const PLUGIN_ROOT = join(HERE, '..', '..');
function findProgressFiles(cwd) {
const projectsDir = join(cwd, '.claude', 'projects');
if (!existsSync(projectsDir) || !statSync(projectsDir).isDirectory()) return [];
const out = [];
for (const entry of readdirSync(projectsDir)) {
const projDir = join(projectsDir, entry);
if (!statSync(projDir).isDirectory()) continue;
const progPath = join(projDir, 'progress.json');
if (existsSync(progPath) && statSync(progPath).isFile()) {
out.push({ projDir, progPath, planPath: join(projDir, 'plan.md') });
}
}
return out;
}
function readJson(path) {
try { return JSON.parse(readFileSync(path, 'utf-8')); }
catch { return null; }
}
function readPlanCheckpointPatterns(planPath) {
if (!existsSync(planPath)) return new Map();
const text = readFileSync(planPath, 'utf-8');
const map = new Map();
const stepRe = /^### Step (\d+):/gm;
const checkpointRe = /\*\*Checkpoint:\*\*\s+`git commit -m "([^"]+)"`/;
const headings = [];
let m;
while ((m = stepRe.exec(text)) !== null) {
headings.push({ n: Number.parseInt(m[1], 10), idx: m.index });
}
for (let i = 0; i < headings.length; i++) {
const start = headings[i].idx;
const end = i + 1 < headings.length ? headings[i + 1].idx : text.length;
const body = text.slice(start, end);
const cp = body.match(checkpointRe);
if (cp) {
const msg = cp[1];
const conventionalPrefix = (msg.match(/^([a-z]+)\(([^)]+)\):/) || [])[0];
if (conventionalPrefix) map.set(headings[i].n, conventionalPrefix);
}
}
return map;
}
function gitLog(repoDir, baseSha) {
if (!baseSha) return [];
try {
const out = execSync(`git -C "${repoDir}" log --pretty=format:'%H %s' ${baseSha}..HEAD
2>/dev/null`, {
encoding: 'utf-8', timeout: 5000,
});
return out.trim().split('\n').filter(Boolean).map(line => {
const sp = line.indexOf(' ');
return { sha: line.slice(0, sp), subject: line.slice(sp + 1) };
});
} catch { return []; }
}
function deriveCurrentStep(progress, plan, gitCommits) {
if (!progress || !progress.steps || gitCommits.length === 0) return null;
const stored = progress.current_step || 0;
let highestMatched = stored;
for (const [stepN, prefix] of plan.entries()) {
const matchedCommit = gitCommits.find(c => c.subject.startsWith(prefix.replace(/\\/g,
'')));
if (matchedCommit && stepN > highestMatched) highestMatched = stepN;
}
return highestMatched;
}
function atomicWrite(path, obj) {
const tmp = path + '.tmp';
writeFileSync(tmp, JSON.stringify(obj, null, 2));
renameSync(tmp, path);
}
function repoRootOf(dir) {
try {
return execSync(`git -C "${dir}" rev-parse --show-toplevel 2>/dev/null`, { encoding:
'utf-8', timeout: 2000 }).trim();
} catch { return null; }
}
let stdinPayload = '';
try { stdinPayload = readFileSync(0, 'utf-8'); } catch { /* fine */ }
const cwd = process.env.CLAUDE_PROJECT_DIR || process.cwd();
const progressFiles = findProgressFiles(cwd);
if (progressFiles.length === 0) {
process.exit(0);
}
let mutationsMade = 0;
for (const { projDir, progPath, planPath } of progressFiles) {
const progress = readJson(progPath);
if (!progress || progress.status === 'completed') continue;
const repoRoot = repoRootOf(projDir);
if (!repoRoot) continue;
const plan = readPlanCheckpointPatterns(planPath);
if (plan.size === 0) continue;
const sessionStart = progress.session_start_sha;
if (!sessionStart) continue;
const commits = gitLog(repoRoot, sessionStart);
const derivedStep = deriveCurrentStep(progress, plan, commits);
if (derivedStep !== null && derivedStep > (progress.current_step || 0)) {
progress.current_step = derivedStep;
progress.updated_at = new Date().toISOString();
if (!progress.steps[String(derivedStep)]) {
progress.steps[String(derivedStep)] = {
status: 'completed', attempts: 1, error: null,
completed_at: progress.updated_at, commit: null, manifest_audit: 'n/a',
note: 'reconstructed by pre-compact-flush from git log',
};
}
atomicWrite(progPath, progress);
process.stderr.write(`[ultraplan-local] pre-compact flush: ${progPath}
current_step=${derivedStep}\n`);
mutationsMade++;
function findProgressFiles(cwd) {
const projectsDir = join(cwd, '.claude', 'projects');
if (!existsSync(projectsDir) || !statSync(projectsDir).isDirectory()) return [];
const out = [];
for (const entry of readdirSync(projectsDir)) {
const projDir = join(projectsDir, entry);
if (!statSync(projDir).isDirectory()) continue;
const progPath = join(projDir, 'progress.json');
if (existsSync(progPath) && statSync(progPath).isFile()) {
out.push({ projDir, progPath, planPath: join(projDir, 'plan.md') });
}
}
return out;
}
process.exit(0);
function readJson(path) {
try { return JSON.parse(readFileSync(path, 'utf-8')); }
catch { return null; }
}
function readPlanCheckpointPatterns(planPath) {
if (!existsSync(planPath)) return new Map();
const text = readFileSync(planPath, 'utf-8');
const map = new Map();
const stepRe = /^### Step (\d+):/gm;
const checkpointRe = /\*\*Checkpoint:\*\*\s+`git commit -m "([^"]+)"`/;
const headings = [];
let m;
while ((m = stepRe.exec(text)) !== null) {
headings.push({ n: Number.parseInt(m[1], 10), idx: m.index });
}
for (let i = 0; i < headings.length; i++) {
const start = headings[i].idx;
const end = i + 1 < headings.length ? headings[i + 1].idx : text.length;
const body = text.slice(start, end);
const cp = body.match(checkpointRe);
if (cp) {
const msg = cp[1];
const conventionalPrefix = (msg.match(/^([a-z]+)\(([^)]+)\):/) || [])[0];
if (conventionalPrefix) map.set(headings[i].n, conventionalPrefix);
}
}
return map;
}
function gitLog(repoDir, baseSha) {
if (!baseSha) return [];
try {
const out = execSync(`git -C "${repoDir}" log --pretty=format:'%H %s' ${baseSha}..HEAD 2>/dev/null`, {
encoding: 'utf-8', timeout: 5000,
});
return out.trim().split('\n').filter(Boolean).map(line => {
const sp = line.indexOf(' ');
return { sha: line.slice(0, sp), subject: line.slice(sp + 1) };
});
} catch { return []; }
}
function deriveCurrentStep(progress, plan, gitCommits) {
if (!progress || !progress.steps || gitCommits.length === 0) return null;
const stored = progress.current_step || 0;
let highestMatched = stored;
for (const [stepN, prefix] of plan.entries()) {
const matchedCommit = gitCommits.find(c => c.subject.startsWith(prefix.replace(/\\/g, '')));
if (matchedCommit && stepN > highestMatched) highestMatched = stepN;
}
return highestMatched;
}
function repoRootOf(dir) {
try {
return execSync(`git -C "${dir}" rev-parse --show-toplevel 2>/dev/null`, { encoding: 'utf-8', timeout: 2000 }).trim();
} catch { return null; }
}
let stdinPayload = '';
try { stdinPayload = readFileSync(0, 'utf-8'); } catch { /* fine */ }
const cwd = process.env.CLAUDE_PROJECT_DIR || process.cwd();
const progressFiles = findProgressFiles(cwd);
if (progressFiles.length === 0) {
process.exit(0);
}
let mutationsMade = 0;
for (const { projDir, progPath, planPath } of progressFiles) {
const progress = readJson(progPath);
if (!progress || progress.status === 'completed') continue;
const repoRoot = repoRootOf(projDir);
if (!repoRoot) continue;
const plan = readPlanCheckpointPatterns(planPath);
if (plan.size === 0) continue;
const sessionStart = progress.session_start_sha;
if (!sessionStart) continue;
const commits = gitLog(repoRoot, sessionStart);
const derivedStep = deriveCurrentStep(progress, plan, commits);
if (derivedStep !== null && derivedStep > (progress.current_step || 0)) {
progress.current_step = derivedStep;
progress.updated_at = new Date().toISOString();
if (!progress.steps[String(derivedStep)]) {
progress.steps[String(derivedStep)] = {
status: 'completed', attempts: 1, error: null,
completed_at: progress.updated_at, commit: null, manifest_audit: 'n/a',
note: 'reconstructed by pre-compact-flush from git log',
};
}
atomicWriteJson(progPath, progress);
process.stderr.write(`[ultraplan-local] pre-compact flush: ${progPath} → current_step=${derivedStep}\n`);
mutationsMade++;
}
}
process.exit(0);

View file

@ -0,0 +1,14 @@
// lib/util/atomic-write.mjs
// Atomic JSON file write — writes to {path}.tmp then renames to {path}.
// Crash-safe: a partial write leaves the original file untouched.
//
// Extracted from hooks/scripts/pre-compact-flush.mjs in v3.3.0 so that
// session-state writers and progress.json writers share one implementation.
import { writeFileSync, renameSync } from 'node:fs';
export function atomicWriteJson(path, obj) {
const tmp = path + '.tmp';
writeFileSync(tmp, JSON.stringify(obj, null, 2));
renameSync(tmp, path);
}

View file

@ -0,0 +1,61 @@
// tests/lib/atomic-write.test.mjs
// Unit tests for lib/util/atomic-write.mjs
import { test } from 'node:test';
import assert from 'node:assert/strict';
import { mkdtempSync, rmSync, readFileSync, existsSync, writeFileSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { atomicWriteJson } from '../../lib/util/atomic-write.mjs';
test('atomicWriteJson — writes valid JSON and round-trips', () => {
const dir = mkdtempSync(join(tmpdir(), 'aw-test-'));
try {
const path = join(dir, 'state.json');
const obj = { schema_version: 1, status: 'in_progress', items: [1, 2, 3] };
atomicWriteJson(path, obj);
const read = JSON.parse(readFileSync(path, 'utf-8'));
assert.deepEqual(read, obj);
} finally {
rmSync(dir, { recursive: true, force: true });
}
});
test('atomicWriteJson — leaves no .tmp orphan after success', () => {
const dir = mkdtempSync(join(tmpdir(), 'aw-test-'));
try {
const path = join(dir, 'state.json');
atomicWriteJson(path, { ok: true });
assert.equal(existsSync(path), true);
assert.equal(existsSync(path + '.tmp'), false);
} finally {
rmSync(dir, { recursive: true, force: true });
}
});
test('atomicWriteJson — overwrites existing file atomically', () => {
const dir = mkdtempSync(join(tmpdir(), 'aw-test-'));
try {
const path = join(dir, 'state.json');
writeFileSync(path, '{"old":true}');
atomicWriteJson(path, { new: true });
const read = JSON.parse(readFileSync(path, 'utf-8'));
assert.deepEqual(read, { new: true });
assert.equal(existsSync(path + '.tmp'), false);
} finally {
rmSync(dir, { recursive: true, force: true });
}
});
test('atomicWriteJson — pretty-prints with 2-space indent', () => {
const dir = mkdtempSync(join(tmpdir(), 'aw-test-'));
try {
const path = join(dir, 'state.json');
atomicWriteJson(path, { a: 1, b: { c: 2 } });
const text = readFileSync(path, 'utf-8');
assert.match(text, /\n {2}"a": 1/);
assert.match(text, /\n {4}"c": 2/);
} finally {
rmSync(dir, { recursive: true, force: true });
}
});