chore(voyage): release v5.0.0 — remove bespoke playground + /trekrevise + Handover 8; render produced artifacts to HTML + link, annotate via /playground
The v4.2/v4.3 bespoke playground SPA (~388 KB), the /trekrevise command, Handover 8 (annotation → revision), the supporting lib/ modules (anchor-parser, annotation-digest, markdown-write, revision-guard), the Playwright e2e suite, and the @playwright/test / @axe-core/playwright devDeps are removed. A browser walkthrough found the playground borderline unusable, and it duplicated the official /playground plugin's document-critique / diff-review templates. In their place: scripts/render-artifact.mjs — a small, zero-dependency renderer that turns a brief/plan/review .md into a self-contained, design-system-styled, zero-network .html (frontmatter folded into a <details> block). /trekbrief, /trekplan, and /trekreview call it on their last step and print the file:// link; to annotate, run /playground (document-critique) on the .md and paste the generated prompt back. Resolves the v4.3.1-deferred findings as moot (their target files are deleted). npm test green: 509 tests, 507 pass, 0 fail, 2 skipped. Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
parent
0f197f6ff6
commit
916d30f63e
96 changed files with 620 additions and 14716 deletions
|
|
@ -1,129 +0,0 @@
|
|||
// lib/util/markdown-write.mjs
|
||||
// Markdown frontmatter serializer + atomic markdown writer.
|
||||
//
|
||||
// Companion to lib/util/frontmatter.mjs (parser-only) and lib/util/atomic-write.mjs
|
||||
// (JSON-only). Together they enable the /trekrevise in-place revision loop
|
||||
// (v4.2): read existing artifact -> mutate frontmatter+body -> atomic write.
|
||||
//
|
||||
// Subset constraint mirrors the parser at lib/util/frontmatter.mjs:
|
||||
// - Scalars: string, integer, float, boolean, null
|
||||
// - Arrays of scalars (block-style only — no flow-style [a, b])
|
||||
// - Arrays of dicts, one level deep (block-style only)
|
||||
// Anything outside this subset is silently dropped or quoted as a string.
|
||||
//
|
||||
// Why no js-yaml: zero-deps invariant. Templates emit only this subset.
|
||||
|
||||
import { writeFileSync, renameSync, unlinkSync, readFileSync } from 'node:fs';
|
||||
import { splitFrontmatter, parseDocument } from './frontmatter.mjs';
|
||||
|
||||
const SPECIAL_CHARS = /[:#\[\]{},&*!|>'"%@`]|^\s|\s$/;
|
||||
|
||||
function needsQuote(s) {
|
||||
if (s === '' || s === 'null' || s === '~' || s === 'true' || s === 'false') return true;
|
||||
if (s === '[]' || s === '{}') return true;
|
||||
if (/^-?\d+(\.\d+)?$/.test(s)) return true;
|
||||
if (SPECIAL_CHARS.test(s)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
function serializeScalar(v) {
|
||||
if (v === null || v === undefined) return 'null';
|
||||
if (typeof v === 'boolean') return v ? 'true' : 'false';
|
||||
if (typeof v === 'number') return String(v);
|
||||
if (typeof v === 'string') {
|
||||
if (needsQuote(v)) {
|
||||
const escaped = v.replace(/\\/g, '\\\\').replace(/"/g, '\\"').replace(/\n/g, '\\n').replace(/\r/g, '\\r').replace(/\t/g, '\\t');
|
||||
return `"${escaped}"`;
|
||||
}
|
||||
return v;
|
||||
}
|
||||
return JSON.stringify(v);
|
||||
}
|
||||
|
||||
/**
|
||||
* Serialize a JS object to YAML frontmatter (subset only).
|
||||
* Returns the YAML body without --- delimiters.
|
||||
*/
|
||||
export function serializeFrontmatter(obj) {
|
||||
if (obj === null || obj === undefined || typeof obj !== 'object') return '';
|
||||
const lines = [];
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === undefined) continue;
|
||||
if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
lines.push(`${key}: []`);
|
||||
continue;
|
||||
}
|
||||
lines.push(`${key}:`);
|
||||
for (const item of value) {
|
||||
if (item !== null && typeof item === 'object' && !Array.isArray(item)) {
|
||||
// Dict in list — block style, one level deep
|
||||
const entries = Object.entries(item).filter(([, v]) => v !== undefined);
|
||||
if (entries.length === 0) {
|
||||
lines.push(` - {}`);
|
||||
continue;
|
||||
}
|
||||
const [firstK, firstV] = entries[0];
|
||||
lines.push(` - ${firstK}: ${serializeScalar(firstV)}`);
|
||||
for (let i = 1; i < entries.length; i++) {
|
||||
const [k, v] = entries[i];
|
||||
lines.push(` ${k}: ${serializeScalar(v)}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(` - ${serializeScalar(item)}`);
|
||||
}
|
||||
}
|
||||
} else if (value !== null && typeof value === 'object') {
|
||||
// Single-level dict — emit as multi-line key: \n subkey: value
|
||||
lines.push(`${key}:`);
|
||||
for (const [k, v] of Object.entries(value)) {
|
||||
if (v === undefined) continue;
|
||||
lines.push(` ${k}: ${serializeScalar(v)}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(`${key}: ${serializeScalar(value)}`);
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Atomically write a markdown file with frontmatter + body.
|
||||
* Reconstructs as: ---\n{serialized}\n---\n{body}
|
||||
* Single writeFileSync + renameSync for crash-safety. Body bytes preserved verbatim.
|
||||
*
|
||||
* @param {string} path - destination path
|
||||
* @param {object} frontmatter - object to serialize as YAML frontmatter
|
||||
* @param {string} body - markdown body, bytes-verbatim (no normalization)
|
||||
*/
|
||||
export function atomicWriteMarkdown(path, frontmatter, body) {
|
||||
const yaml = serializeFrontmatter(frontmatter);
|
||||
const content = `---\n${yaml}\n---\n${body}`;
|
||||
const tmp = path + '.tmp';
|
||||
try {
|
||||
writeFileSync(tmp, content);
|
||||
renameSync(tmp, path);
|
||||
} catch (e) {
|
||||
try { unlinkSync(tmp); } catch { /* tmp already gone */ }
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read + parse + mutate + write atomically.
|
||||
* mutator receives { frontmatter, body }, returns new { frontmatter, body }.
|
||||
*
|
||||
* @returns {Result} from parseDocument; if invalid, no write happens.
|
||||
*/
|
||||
export function readAndUpdate(path, mutator) {
|
||||
const text = readFileSync(path, 'utf-8');
|
||||
const doc = parseDocument(text);
|
||||
if (!doc.valid) return doc;
|
||||
const { frontmatter, body } = doc.parsed;
|
||||
const next = mutator({ frontmatter, body });
|
||||
if (!next || typeof next !== 'object') {
|
||||
return { valid: false, errors: [{ code: 'MD_WRITE_MUTATOR_INVALID', message: 'mutator must return { frontmatter, body }' }], warnings: [], parsed: null };
|
||||
}
|
||||
atomicWriteMarkdown(path, next.frontmatter || {}, next.body || '');
|
||||
return { valid: true, errors: [], warnings: [], parsed: next };
|
||||
}
|
||||
|
|
@ -1,110 +0,0 @@
|
|||
// lib/util/revision-guard.mjs
|
||||
// Pre-write backup -> mutate -> atomic write -> post-write validate ->
|
||||
// restore-on-fail orchestration for /trekrevise (v4.2).
|
||||
//
|
||||
// Extracted from commands/trekrevise.md so the rollback logic can be
|
||||
// unit-tested independently of the prompt-instruction file. The command
|
||||
// imports revisionGuard() and supplies the validator callback (one of
|
||||
// validateBrief / validatePlan / validateReview).
|
||||
//
|
||||
// Behavior:
|
||||
// 1. Compute sha256_before
|
||||
// 2. cp path path.local.bak (backup)
|
||||
// 3. readAndUpdate(path, mutator) (atomic)
|
||||
// 4. validator(path) — if validator says invalid, restore from bak
|
||||
// 5. delete bak on success; preserve bak + return rolled-back on failure
|
||||
//
|
||||
// Crash semantics: tmp+rename in atomicWriteMarkdown means a crash
|
||||
// between steps 2 and 3 leaves either the original (if rename hadn't
|
||||
// completed) or the new content (if rename had); bak file always reflects
|
||||
// the pre-revision state so manual recovery is possible.
|
||||
|
||||
import { copyFileSync, unlinkSync, readFileSync, existsSync } from 'node:fs';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { readAndUpdate } from './markdown-write.mjs';
|
||||
|
||||
function sha256(path) {
|
||||
if (!existsSync(path)) return null;
|
||||
const buf = readFileSync(path);
|
||||
return createHash('sha256').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Guard a markdown revision with pre-backup + post-validate + rollback.
|
||||
*
|
||||
* @param {string} path - markdown file to revise (in-place)
|
||||
* @param {Function} mutator - ({frontmatter, body}) => {frontmatter, body}
|
||||
* @param {Function} validator - (path) => {valid: bool, errors: [...], warnings: [...]}
|
||||
* @returns {{outcome: 'applied'|'rolled-back'|'mutator-failed',
|
||||
* validator_result, sha256_before, sha256_after,
|
||||
* bak_path?: string, error?: string}}
|
||||
*/
|
||||
export function revisionGuard(path, mutator, validator) {
|
||||
const sha256_before = sha256(path);
|
||||
if (sha256_before === null) {
|
||||
return { outcome: 'mutator-failed', error: `file does not exist: ${path}`, sha256_before: null, sha256_after: null };
|
||||
}
|
||||
|
||||
const bak = path + '.local.bak';
|
||||
if (existsSync(bak)) {
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `pre-existing backup at ${bak} — verify it is safe to overwrite, then delete it manually before re-running`,
|
||||
sha256_before,
|
||||
sha256_after: sha256_before,
|
||||
bak_path: bak,
|
||||
};
|
||||
}
|
||||
|
||||
copyFileSync(path, bak);
|
||||
|
||||
let mutateResult;
|
||||
try {
|
||||
mutateResult = readAndUpdate(path, mutator);
|
||||
} catch (e) {
|
||||
// mutator threw — restore from bak, preserve original byte-identical
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `mutator threw: ${e.message}`,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
if (!mutateResult.valid) {
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'mutator-failed',
|
||||
error: `mutator returned invalid result: ${(mutateResult.errors || []).map(e => e.code || e.message).join(', ')}`,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
const validator_result = validator(path);
|
||||
const sha256_after_write = sha256(path);
|
||||
|
||||
if (!validator_result.valid) {
|
||||
// Validator failed — restore from bak
|
||||
copyFileSync(bak, path);
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'rolled-back',
|
||||
validator_result,
|
||||
sha256_before,
|
||||
sha256_after: sha256(path),
|
||||
};
|
||||
}
|
||||
|
||||
// Validator passed — keep new content, delete bak
|
||||
unlinkSync(bak);
|
||||
return {
|
||||
outcome: 'applied',
|
||||
validator_result,
|
||||
sha256_before,
|
||||
sha256_after: sha256_after_write,
|
||||
};
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue