feat(voyage): add markdown-write.mjs + revision-guard.mjs + forward-compat policy comments — v4.2 Step 1
- lib/util/markdown-write.mjs: serializeFrontmatter (subset matches frontmatter.mjs parser), atomicWriteMarkdown (single tmp+rename, body bytes verbatim), readAndUpdate (read+mutate+write). - lib/util/revision-guard.mjs: revisionGuard(path, mutator, validator) — backup -> mutate -> validate -> restore-on-fail. Extracted from /trekrevise prompt so rollback can be unit-tested. - 12 tests for markdown-write, including 6-key source_annotations round-trip + walk-all-fixtures regression. - 6 tests for revision-guard: applied/rolled-back/mutator-failed/sha256 stability/pre-existing-bak abort. - Forward-compat policy comments in 3 validators (brief/plan/review) — non-functional pin against future strict-key refactors. Pass: 508/510 (was 490; +18 net from v4.2 Step 1, 2 skipped Docker)
This commit is contained in:
parent
8dc3090080
commit
dcf0c7ad02
7 changed files with 584 additions and 0 deletions
129
plugins/voyage/lib/util/markdown-write.mjs
Normal file
129
plugins/voyage/lib/util/markdown-write.mjs
Normal file
|
|
@ -0,0 +1,129 @@
|
|||
// lib/util/markdown-write.mjs
|
||||
// Markdown frontmatter serializer + atomic markdown writer.
|
||||
//
|
||||
// Companion to lib/util/frontmatter.mjs (parser-only) and lib/util/atomic-write.mjs
|
||||
// (JSON-only). Together they enable the /trekrevise in-place revision loop
|
||||
// (v4.2): read existing artifact -> mutate frontmatter+body -> atomic write.
|
||||
//
|
||||
// Subset constraint mirrors the parser at lib/util/frontmatter.mjs:
|
||||
// - Scalars: string, integer, float, boolean, null
|
||||
// - Arrays of scalars (block-style only — no flow-style [a, b])
|
||||
// - Arrays of dicts, one level deep (block-style only)
|
||||
// Anything outside this subset is silently dropped or quoted as a string.
|
||||
//
|
||||
// Why no js-yaml: zero-deps invariant. Templates emit only this subset.
|
||||
|
||||
import { writeFileSync, renameSync, unlinkSync, readFileSync } from 'node:fs';
|
||||
import { splitFrontmatter, parseDocument } from './frontmatter.mjs';
|
||||
|
||||
const SPECIAL_CHARS = /[:#\[\]{},&*!|>'"%@`]|^\s|\s$/;
|
||||
|
||||
function needsQuote(s) {
|
||||
if (s === '' || s === 'null' || s === '~' || s === 'true' || s === 'false') return true;
|
||||
if (s === '[]' || s === '{}') return true;
|
||||
if (/^-?\d+(\.\d+)?$/.test(s)) return true;
|
||||
if (SPECIAL_CHARS.test(s)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
function serializeScalar(v) {
|
||||
if (v === null || v === undefined) return 'null';
|
||||
if (typeof v === 'boolean') return v ? 'true' : 'false';
|
||||
if (typeof v === 'number') return String(v);
|
||||
if (typeof v === 'string') {
|
||||
if (needsQuote(v)) {
|
||||
const escaped = v.replace(/\\/g, '\\\\').replace(/"/g, '\\"').replace(/\n/g, '\\n').replace(/\r/g, '\\r').replace(/\t/g, '\\t');
|
||||
return `"${escaped}"`;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
return JSON.stringify(v);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a JS object to YAML frontmatter (subset only).
|
||||
* Returns the YAML body without --- delimiters.
|
||||
*/
|
||||
export function serializeFrontmatter(obj) {
|
||||
if (obj === null || obj === undefined || typeof obj !== 'object') return '';
|
||||
const lines = [];
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === undefined) continue;
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
lines.push(`${key}: []`);
|
||||
continue;
|
||||
}
|
||||
lines.push(`${key}:`);
|
||||
for (const item of value) {
|
||||
if (item !== null && typeof item === 'object' && !Array.isArray(item)) {
|
||||
// Dict in list — block style, one level deep
|
||||
const entries = Object.entries(item).filter(([, v]) => v !== undefined);
|
||||
if (entries.length === 0) {
|
||||
lines.push(` - {}`);
|
||||
continue;
|
||||
}
|
||||
const [firstK, firstV] = entries[0];
|
||||
lines.push(` - ${firstK}: ${serializeScalar(firstV)}`);
|
||||
for (let i = 1; i < entries.length; i++) {
|
||||
const [k, v] = entries[i];
|
||||
lines.push(` ${k}: ${serializeScalar(v)}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(` - ${serializeScalar(item)}`);
|
||||
}
|
||||
}
|
||||
} else if (value !== null && typeof value === 'object') {
|
||||
// Single-level dict — emit as multi-line key: \n subkey: value
|
||||
lines.push(`${key}:`);
|
||||
for (const [k, v] of Object.entries(value)) {
|
||||
if (v === undefined) continue;
|
||||
lines.push(` ${k}: ${serializeScalar(v)}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(`${key}: ${serializeScalar(value)}`);
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomically write a markdown file with frontmatter + body.
|
||||
* Reconstructs as: ---\n{serialized}\n---\n{body}
|
||||
* Single writeFileSync + renameSync for crash-safety. Body bytes preserved verbatim.
|
||||
*
|
||||
* @param {string} path - destination path
|
||||
* @param {object} frontmatter - object to serialize as YAML frontmatter
|
||||
* @param {string} body - markdown body, bytes-verbatim (no normalization)
|
||||
*/
|
||||
export function atomicWriteMarkdown(path, frontmatter, body) {
|
||||
const yaml = serializeFrontmatter(frontmatter);
|
||||
const content = `---\n${yaml}\n---\n${body}`;
|
||||
const tmp = path + '.tmp';
|
||||
try {
|
||||
writeFileSync(tmp, content);
|
||||
renameSync(tmp, path);
|
||||
} catch (e) {
|
||||
try { unlinkSync(tmp); } catch { /* tmp already gone */ }
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read + parse + mutate + write atomically.
|
||||
* mutator receives { frontmatter, body }, returns new { frontmatter, body }.
|
||||
*
|
||||
* @returns {Result} from parseDocument; if invalid, no write happens.
|
||||
*/
|
||||
export function readAndUpdate(path, mutator) {
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
const doc = parseDocument(text);
|
||||
if (!doc.valid) return doc;
|
||||
const { frontmatter, body } = doc.parsed;
|
||||
const next = mutator({ frontmatter, body });
|
||||
if (!next || typeof next !== 'object') {
|
||||
return { valid: false, errors: [{ code: 'MD_WRITE_MUTATOR_INVALID', message: 'mutator must return { frontmatter, body }' }], warnings: [], parsed: null };
|
||||
}
|
||||
atomicWriteMarkdown(path, next.frontmatter || {}, next.body || '');
|
||||
return { valid: true, errors: [], warnings: [], parsed: next };
|
||||
}
|
||||
110
plugins/voyage/lib/util/revision-guard.mjs
Normal file
110
plugins/voyage/lib/util/revision-guard.mjs
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
// lib/util/revision-guard.mjs
|
||||
// Pre-write backup -> mutate -> atomic write -> post-write validate ->
|
||||
// restore-on-fail orchestration for /trekrevise (v4.2).
|
||||
//
|
||||
// Extracted from commands/trekrevise.md so the rollback logic can be
|
||||
// unit-tested independently of the prompt-instruction file. The command
|
||||
// imports revisionGuard() and supplies the validator callback (one of
|
||||
// validateBrief / validatePlan / validateReview).
|
||||
//
|
||||
// Behavior:
|
||||
// 1. Compute sha256_before
|
||||
// 2. cp path path.local.bak (backup)
|
||||
// 3. readAndUpdate(path, mutator) (atomic)
|
||||
// 4. validator(path) — if validator says invalid, restore from bak
|
||||
// 5. delete bak on success; preserve bak + return rolled-back on failure
|
||||
//
|
||||
// Crash semantics: tmp+rename in atomicWriteMarkdown means a crash
|
||||
// between steps 2 and 3 leaves either the original (if rename hadn't
|
||||
// completed) or the new content (if rename had); bak file always reflects
|
||||
// the pre-revision state so manual recovery is possible.
|
||||
|
||||
import { copyFileSync, unlinkSync, readFileSync, existsSync } from 'node:fs';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { readAndUpdate } from './markdown-write.mjs';
|
||||
|
||||
function sha256(path) {
|
||||
if (!existsSync(path)) return null;
|
||||
const buf = readFileSync(path);
|
||||
return createHash('sha256').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Guard a markdown revision with pre-backup + post-validate + rollback.
|
||||
*
|
||||
* @param {string} path - markdown file to revise (in-place)
|
||||
* @param {Function} mutator - ({frontmatter, body}) => {frontmatter, body}
|
||||
* @param {Function} validator - (path) => {valid: bool, errors: [...], warnings: [...]}
|
||||
* @returns {{outcome: 'applied'|'rolled-back'|'mutator-failed',
|
||||
* validator_result, sha256_before, sha256_after,
|
||||
* bak_path?: string, error?: string}}
|
||||
*/
|
||||
export function revisionGuard(path, mutator, validator) {
|
||||
const sha256_before = sha256(path);
|
||||
if (sha256_before === null) {
|
||||
return { outcome: 'mutator-failed', error: `file does not exist: ${path}`, sha256_before: null, sha256_after: null };
|
||||
}
|
||||
|
||||
const bak = path + '.local.bak';
|
||||
if (existsSync(bak)) {
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `pre-existing backup at ${bak} — verify it is safe to overwrite, then delete it manually before re-running`,
|
||||
sha256_before,
|
||||
sha256_after: sha256_before,
|
||||
bak_path: bak,
|
||||
};
|
||||
}
|
||||
|
||||
copyFileSync(path, bak);
|
||||
|
||||
let mutateResult;
|
||||
try {
|
||||
mutateResult = readAndUpdate(path, mutator);
|
||||
} catch (e) {
|
||||
// mutator threw — restore from bak, preserve original byte-identical
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `mutator threw: ${e.message}`,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
if (!mutateResult.valid) {
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `mutator returned invalid result: ${(mutateResult.errors || []).map(e => e.code || e.message).join(', ')}`,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
const validator_result = validator(path);
|
||||
const sha256_after_write = sha256(path);
|
||||
|
||||
if (!validator_result.valid) {
|
||||
// Validator failed — restore from bak
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'rolled-back',
|
||||
validator_result,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
// Validator passed — keep new content, delete bak
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'applied',
|
||||
validator_result,
|
||||
sha256_before,
|
||||
sha256_after: sha256_after_write,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,5 +1,12 @@
|
|||
// lib/validators/brief-validator.mjs
|
||||
// Validate trekbrief frontmatter + body invariants.
|
||||
//
|
||||
// Schema is forward-compatible: unknown top-level frontmatter keys are
|
||||
// tolerated silently. Adding new optional fields (e.g. revision,
|
||||
// source_annotations, annotation_digest, revision_reason from v4.2) does
|
||||
// not require a brief_version bump (jf. source_findings precedent on
|
||||
// trekreview). Strict-key checks are intentionally avoided so the
|
||||
// /trekrevise revision-loop can extend frontmatter without re-versioning.
|
||||
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { parseDocument } from '../util/frontmatter.mjs';
|
||||
|
|
|
|||
|
|
@ -1,6 +1,13 @@
|
|||
// lib/validators/plan-validator.mjs
|
||||
// Wraps plan-schema (heading shape) + manifest-yaml (per-step Manifest blocks).
|
||||
// This is the JS equivalent of Phase 5.5 grep checks in planning-orchestrator.
|
||||
//
|
||||
// Schema is forward-compatible: unknown top-level frontmatter keys are
|
||||
// tolerated silently. Adding new optional fields (e.g. revision,
|
||||
// source_annotations, annotation_digest, revision_reason from v4.2) does
|
||||
// not require a plan_version bump (jf. source_findings precedent). Strict-key
|
||||
// checks are intentionally avoided so the /trekrevise revision-loop can
|
||||
// extend frontmatter without re-versioning.
|
||||
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { sliceSteps, validatePlanHeadings, extractPlanVersion } from '../parsers/plan-schema.mjs';
|
||||
|
|
|
|||
|
|
@ -1,6 +1,13 @@
|
|||
// lib/validators/review-validator.mjs
|
||||
// Validate trekreview frontmatter + body invariants.
|
||||
// 3-layer pattern (Content → File → CLI shim) mirroring brief-validator.
|
||||
//
|
||||
// Schema is forward-compatible: unknown top-level frontmatter keys are
|
||||
// tolerated silently. Adding new optional fields (e.g. revision,
|
||||
// source_annotations, annotation_digest, revision_reason from v4.2) does
|
||||
// not require a review_version bump (jf. source_findings precedent).
|
||||
// Strict-key checks are intentionally avoided so the /trekrevise
|
||||
// revision-loop can extend frontmatter without re-versioning.
|
||||
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { parseDocument } from '../util/frontmatter.mjs';
|
||||
|
|
|
|||
189
plugins/voyage/tests/lib/markdown-write.test.mjs
Normal file
189
plugins/voyage/tests/lib/markdown-write.test.mjs
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
// tests/lib/markdown-write.test.mjs
|
||||
// Unit tests for lib/util/markdown-write.mjs (v4.2)
|
||||
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync, readFileSync, existsSync, writeFileSync, readdirSync, statSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join, resolve, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import {
|
||||
serializeFrontmatter,
|
||||
atomicWriteMarkdown,
|
||||
readAndUpdate,
|
||||
} from '../../lib/util/markdown-write.mjs';
|
||||
import { parseFrontmatter, parseDocument } from '../../lib/util/frontmatter.mjs';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const FIXTURES_ROOT = resolve(__dirname, '..', 'fixtures');
|
||||
|
||||
test('serializeFrontmatter — empty object returns empty string', () => {
|
||||
assert.equal(serializeFrontmatter({}), '');
|
||||
});
|
||||
|
||||
test('serializeFrontmatter — round-trip fidelity for scalars + arrays + list-of-dicts', () => {
|
||||
const obj = {
|
||||
name: 'voyage-test',
|
||||
revision: 0,
|
||||
enabled: true,
|
||||
notes: null,
|
||||
tags: ['alpha', 'beta', 'gamma'],
|
||||
findings: [
|
||||
{ id: 'a', severity: 'major' },
|
||||
{ id: 'b', severity: 'minor' },
|
||||
],
|
||||
};
|
||||
const yaml = serializeFrontmatter(obj);
|
||||
const reparsed = parseFrontmatter(yaml).parsed;
|
||||
assert.deepEqual(reparsed, obj);
|
||||
});
|
||||
|
||||
test('serializeFrontmatter — block-style YAML for arrays (no flow style)', () => {
|
||||
const yaml = serializeFrontmatter({ tags: ['a', 'b'] });
|
||||
assert.ok(!yaml.includes('[a, b]'), 'flow-style array forbidden');
|
||||
assert.ok(yaml.includes('tags:\n - a\n - b'), 'block-style required');
|
||||
});
|
||||
|
||||
test('serializeFrontmatter — strings with colons are quoted', () => {
|
||||
const yaml = serializeFrontmatter({ task: 'Re-architect: phase 2' });
|
||||
assert.match(yaml, /task: ".*Re-architect.*phase 2.*"/);
|
||||
const reparsed = parseFrontmatter(yaml).parsed;
|
||||
assert.equal(reparsed.task, 'Re-architect: phase 2');
|
||||
});
|
||||
|
||||
test('serializeFrontmatter — integer revision: 0 emitted unquoted', () => {
|
||||
const yaml = serializeFrontmatter({ revision: 0 });
|
||||
assert.equal(yaml, 'revision: 0');
|
||||
});
|
||||
|
||||
test('serializeFrontmatter — round-trips 6-key source_annotations dict (v4.2 schema)', () => {
|
||||
const obj = {
|
||||
revision: 1,
|
||||
source_annotations: [
|
||||
{
|
||||
id: 'ANN-0001',
|
||||
target_artifact: 'plan.md',
|
||||
target_anchor: 'step-3',
|
||||
intent: 'change',
|
||||
comment: 'Reorder before step 4',
|
||||
timestamp: '2026-05-09T10:00:00Z',
|
||||
},
|
||||
{
|
||||
id: 'ANN-0002',
|
||||
target_artifact: 'plan.md',
|
||||
target_anchor: 'step-7',
|
||||
intent: 'fix',
|
||||
comment: 'typo in heading',
|
||||
timestamp: '2026-05-09T10:05:00Z',
|
||||
},
|
||||
],
|
||||
annotation_digest: 'abc123def4567890',
|
||||
};
|
||||
const yaml = serializeFrontmatter(obj);
|
||||
const reparsed = parseFrontmatter(yaml).parsed;
|
||||
assert.deepEqual(reparsed, obj, '6-key list-of-dict must round-trip');
|
||||
});
|
||||
|
||||
test('atomicWriteMarkdown — writes file with frontmatter + body', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'mdw-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7', revision: 0 }, '# Title\n\nBody.\n');
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
assert.match(text, /^---\nplan_version: "?1\.7"?\nrevision: 0\n---\n# Title\n\nBody\.\n$/);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('atomicWriteMarkdown — leaves no .tmp orphan after success', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'mdw-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { ok: true }, 'body');
|
||||
assert.ok(existsSync(path));
|
||||
assert.ok(!existsSync(path + '.tmp'));
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('atomicWriteMarkdown — overwrites existing file atomically', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'mdw-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
writeFileSync(path, 'old content');
|
||||
atomicWriteMarkdown(path, { new: true }, 'new body\n');
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
assert.match(text, /new: true/);
|
||||
assert.match(text, /new body/);
|
||||
assert.ok(!text.includes('old content'));
|
||||
assert.ok(!existsSync(path + '.tmp'));
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('atomicWriteMarkdown — preserves body bytes verbatim', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'mdw-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
const body = '# Title\n\n- item with `code`\n\n```yaml\nmanifest:\n expected_paths:\n - foo\n```\n\nTrailing text.';
|
||||
atomicWriteMarkdown(path, { v: 1 }, body);
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
const split = text.split('---\n');
|
||||
const recoveredBody = split.slice(2).join('---\n');
|
||||
assert.equal(recoveredBody, body);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('readAndUpdate — round-trips frontmatter + body via mutator', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'mdw-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7', revision: 0 }, '# Original\nBody.\n');
|
||||
const result = readAndUpdate(path, ({ frontmatter, body }) => ({
|
||||
frontmatter: { ...frontmatter, revision: 1 },
|
||||
body,
|
||||
}));
|
||||
assert.equal(result.valid, true);
|
||||
const re = parseDocument(readFileSync(path, 'utf-8'));
|
||||
assert.equal(re.parsed.frontmatter.revision, 1);
|
||||
assert.match(re.parsed.body, /# Original/);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
// Round-trip ALL existing fixture frontmatters (per risk-assessor C3).
|
||||
// Walk tests/fixtures/**, parse + serialize + parse, assert deep-equal.
|
||||
function walkMd(root, out = []) {
|
||||
if (!existsSync(root)) return out;
|
||||
for (const entry of readdirSync(root)) {
|
||||
const p = join(root, entry);
|
||||
const st = statSync(p);
|
||||
if (st.isDirectory()) walkMd(p, out);
|
||||
else if (entry.endsWith('.md')) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
test('serializeFrontmatter — round-trips ALL existing fixture frontmatters', () => {
|
||||
const fixtures = walkMd(FIXTURES_ROOT);
|
||||
let checked = 0;
|
||||
for (const path of fixtures) {
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
const parsed = parseDocument(text);
|
||||
if (!parsed.valid) continue; // some fixtures are intentionally malformed
|
||||
const fm = parsed.parsed.frontmatter;
|
||||
if (!fm || Object.keys(fm).length === 0) continue;
|
||||
const yaml = serializeFrontmatter(fm);
|
||||
const reparsed = parseFrontmatter(yaml);
|
||||
if (!reparsed.valid) continue; // skip malformed-on-purpose fixtures
|
||||
assert.deepEqual(reparsed.parsed, fm, `round-trip failed for fixture: ${path}`);
|
||||
checked++;
|
||||
}
|
||||
assert.ok(checked > 0, 'expected to round-trip at least one fixture');
|
||||
});
|
||||
135
plugins/voyage/tests/lib/revision-guard.test.mjs
Normal file
135
plugins/voyage/tests/lib/revision-guard.test.mjs
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
// tests/lib/revision-guard.test.mjs
|
||||
// Unit tests for lib/util/revision-guard.mjs (v4.2)
|
||||
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync, readFileSync, existsSync, writeFileSync, copyFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { revisionGuard } from '../../lib/util/revision-guard.mjs';
|
||||
import { atomicWriteMarkdown } from '../../lib/util/markdown-write.mjs';
|
||||
|
||||
function sha256(path) {
|
||||
return createHash('sha256').update(readFileSync(path)).digest('hex');
|
||||
}
|
||||
|
||||
const ALWAYS_VALID = () => ({ valid: true, errors: [], warnings: [] });
|
||||
const ALWAYS_INVALID = () => ({ valid: false, errors: [{ code: 'TEST', message: 'forced fail' }], warnings: [] });
|
||||
|
||||
test('revisionGuard — validator-PASS commits revision and deletes bak', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7', revision: 0 }, '# Hello\n');
|
||||
const r = revisionGuard(
|
||||
path,
|
||||
({ frontmatter, body }) => ({ frontmatter: { ...frontmatter, revision: 1 }, body }),
|
||||
ALWAYS_VALID,
|
||||
);
|
||||
assert.equal(r.outcome, 'applied');
|
||||
assert.ok(!existsSync(path + '.local.bak'), 'bak should be deleted on success');
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
assert.match(text, /revision: 1/);
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('revisionGuard — validator-FAIL rolls back to byte-identical pre-revision', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7', revision: 0 }, '# Hello\n');
|
||||
const before = sha256(path);
|
||||
const r = revisionGuard(
|
||||
path,
|
||||
({ frontmatter, body }) => ({ frontmatter: { ...frontmatter, revision: 1 }, body }),
|
||||
ALWAYS_INVALID,
|
||||
);
|
||||
assert.equal(r.outcome, 'rolled-back');
|
||||
const after = sha256(path);
|
||||
assert.equal(after, before, 'rollback must restore byte-identical content');
|
||||
assert.ok(!existsSync(path + '.local.bak'), 'bak should be cleaned up after rollback');
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('revisionGuard — pre-existing .local.bak aborts with operator guidance', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7' }, '# Hello\n');
|
||||
const bak = path + '.local.bak';
|
||||
writeFileSync(bak, 'stale backup from prior run');
|
||||
const r = revisionGuard(path, ({ frontmatter, body }) => ({ frontmatter, body }), ALWAYS_VALID);
|
||||
assert.equal(r.outcome, 'mutator-failed');
|
||||
assert.match(r.error, /pre-existing backup/);
|
||||
// Original file untouched, stale bak preserved for operator inspection
|
||||
assert.equal(readFileSync(bak, 'utf-8'), 'stale backup from prior run');
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('revisionGuard — mutator that throws restores original via bak', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7' }, '# Hello\n');
|
||||
const before = sha256(path);
|
||||
const r = revisionGuard(
|
||||
path,
|
||||
() => { throw new Error('boom'); },
|
||||
ALWAYS_VALID,
|
||||
);
|
||||
assert.equal(r.outcome, 'mutator-failed');
|
||||
assert.match(r.error, /boom/);
|
||||
const after = sha256(path);
|
||||
assert.equal(after, before, 'mutator-throw must preserve original');
|
||||
assert.ok(!existsSync(path + '.local.bak'), 'bak cleaned up after mutator-throw');
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('revisionGuard — mutator returns invalid object rejected before validator runs', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7' }, '# Hello\n');
|
||||
const before = sha256(path);
|
||||
let validatorCalled = false;
|
||||
const r = revisionGuard(
|
||||
path,
|
||||
() => null, // not an object
|
||||
() => { validatorCalled = true; return { valid: true, errors: [], warnings: [] }; },
|
||||
);
|
||||
assert.equal(r.outcome, 'mutator-failed');
|
||||
assert.equal(validatorCalled, false, 'validator must not run if mutator returned invalid result');
|
||||
const after = sha256(path);
|
||||
assert.equal(after, before, 'invalid mutator must preserve original');
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('revisionGuard — sha256 fields populated and stable', () => {
|
||||
const dir = mkdtempSync(join(tmpdir(), 'rg-test-'));
|
||||
try {
|
||||
const path = join(dir, 'plan.md');
|
||||
atomicWriteMarkdown(path, { plan_version: '1.7', revision: 0 }, '# Hello\n');
|
||||
const before = sha256(path);
|
||||
const r = revisionGuard(
|
||||
path,
|
||||
({ frontmatter, body }) => ({ frontmatter: { ...frontmatter, revision: 1 }, body }),
|
||||
ALWAYS_VALID,
|
||||
);
|
||||
assert.equal(r.sha256_before, before);
|
||||
assert.equal(typeof r.sha256_after, 'string');
|
||||
assert.notEqual(r.sha256_after, r.sha256_before, 'sha256 must change after applied revision');
|
||||
} finally {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue