feat(ms-ai-architect): rewrite weekly-kb-cron with portable paths, auth-mode-aware pre-flight, lock+backup+rollback [skip-docs]
This commit is contained in:
parent
03c77b6452
commit
a0528e6ef7
2 changed files with 614 additions and 77 deletions
|
|
@ -1,99 +1,361 @@
|
|||
#!/usr/bin/env node
|
||||
// weekly-kb-cron.mjs — Local cron wrapper for weekly KB maintenance.
|
||||
// Runs sitemap polling + change report. If critical/high findings exist,
|
||||
// spawns a local Claude Code session to update stale reference files.
|
||||
// weekly-kb-cron.mjs — Cross-OS scheduler entrypoint for weekly KB-update.
|
||||
//
|
||||
// Crontab: 23 4 * * 3 node /path/to/weekly-kb-cron.mjs >> /tmp/kb-cron.log 2>&1
|
||||
// Pipeline:
|
||||
// 1. Parse flags (--dry-run, --force, --discover, --budget-usd=N).
|
||||
// 2. Resolve cross-platform log/cache/state/backup dirs via lib/cross-platform-paths.mjs.
|
||||
// 3. Rotate the log file before first write (lib/log-rotate.mjs, 10 MB default).
|
||||
// 4. If --dry-run: print plan, write status (last_run_status: dry-run), exit 0.
|
||||
// 5. Pre-flight: git --version, which claude, detectAuthMode + validateAuthForCron,
|
||||
// ~/.claude.json onboarding flags, soft-warn on missing microsoft-learn MCP,
|
||||
// git status --porcelain clean check.
|
||||
// 6. Acquire lock (lib/lock-file.mjs). Capture runStartTs (Unix ms).
|
||||
// 7. Run scripts/kb-update/run-weekly-update.mjs (existing pattern).
|
||||
// 8. Read change-report.json. updateFiles = critical+high only.
|
||||
// 9. Pre-flight cost-estimate (lib/cost-estimat.mjs). Abort with budget_exceeded
|
||||
// if api-key auth and usd > budget. Subscription auth: kvote_warn, proceed.
|
||||
// 10. Backup skills/ via lib/backup.mjs#backupDir.
|
||||
// 11. Spawn Claude with NEW flag stack: dontAsk + scoped allowedTools +
|
||||
// --output-format json + --model claude-sonnet-4-6.
|
||||
// 12. Parse stdout JSON for total_cost_usd, session_id, max_turns_hit.
|
||||
// 13. Post-run verification: git log --since=@<unixSeconds> commit count vs
|
||||
// updateFiles.length. Branch: success / partial / failure.
|
||||
// 14. On failure: rollback via backup#restore. On partial: keep commits.
|
||||
// On success: optionally git push (auto_push_eligible).
|
||||
// 15. Cleanup: release lock, cleanupOldBackups.
|
||||
// 16. Exit 0 on success / dry-run / partial; 1 on failure / budget_exceeded.
|
||||
//
|
||||
// Zero npm dependencies. Uses only node builtins.
|
||||
// Status file: <getCacheDir('ms-ai-architect')>/kb-update-status.json
|
||||
// (rewritten atomically per Status File Schema in plan.md L122-153).
|
||||
//
|
||||
// Crontab one-liner is still supported for direct cron use, but the recommended
|
||||
// install path is `node ../install-kb-cron.mjs` which generates a launchd plist
|
||||
// (macOS), systemd .timer + .service (Linux), or Windows Task Scheduler entry.
|
||||
|
||||
import { execFileSync, execSync } from 'node:child_process';
|
||||
import { readFileSync, existsSync, appendFileSync } from 'node:fs';
|
||||
import { execFileSync, spawnSync } from 'node:child_process';
|
||||
import { readFileSync, existsSync } from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import { homedir, platform as osPlatform } from 'node:os';
|
||||
|
||||
import { getCacheDir, getLogDir, getBackupDir } from './lib/cross-platform-paths.mjs';
|
||||
import { atomicWriteJson } from './lib/atomic-write.mjs';
|
||||
import { rotateLog } from './lib/log-rotate.mjs';
|
||||
import { detectAuthMode, validateAuthForCron, readClaudeJson } from './lib/auth-mode.mjs';
|
||||
import { acquireLock } from './lib/lock-file.mjs';
|
||||
import { estimateCost } from './lib/cost-estimat.mjs';
|
||||
import { backupDir, cleanupOldBackups } from './lib/backup.mjs';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const APP = 'ms-ai-architect';
|
||||
const PLUGIN_ROOT = join(__dirname, '..', '..');
|
||||
const DATA_DIR = join(__dirname, 'data');
|
||||
const CLAUDE_BIN = '/Users/ktg/.local/bin/claude';
|
||||
const LOG_FILE = '/tmp/kb-cron.log';
|
||||
const SKILLS_DIR = join(PLUGIN_ROOT, 'skills');
|
||||
|
||||
const MAX_UPDATE_FILES = Infinity;
|
||||
const DEFAULT_BUDGET_USD = 5;
|
||||
const KB_BACKUP_DAYS = 7;
|
||||
|
||||
// ---------- Arg parsing ----------
|
||||
|
||||
function parseArgs(argv) {
|
||||
const args = {
|
||||
dryRun: false,
|
||||
force: false,
|
||||
discover: true, // run-weekly-update default
|
||||
budgetUsd: Number(process.env.KB_UPDATE_BUDGET_USD) || DEFAULT_BUDGET_USD,
|
||||
};
|
||||
for (const a of argv) {
|
||||
if (a === '--dry-run') args.dryRun = true;
|
||||
else if (a === '--force') args.force = true;
|
||||
else if (a === '--no-discover') args.discover = false;
|
||||
else if (a.startsWith('--budget-usd=')) {
|
||||
const n = Number(a.slice('--budget-usd='.length));
|
||||
if (Number.isFinite(n) && n > 0) args.budgetUsd = n;
|
||||
}
|
||||
}
|
||||
return args;
|
||||
}
|
||||
|
||||
const ARGS = parseArgs(process.argv.slice(2));
|
||||
|
||||
// ---------- Logging ----------
|
||||
|
||||
function fsTimestamp(date = new Date()) {
|
||||
// ISO timestamp made filesystem-safe (colons → dashes; macOS+Windows reject ':' in filenames).
|
||||
return date.toISOString().replace(/:/g, '-');
|
||||
}
|
||||
|
||||
const FS_TS = fsTimestamp();
|
||||
const LOG_DIR = getLogDir(APP);
|
||||
const LOG_FILE = join(LOG_DIR, `kb-update-${FS_TS}.log`);
|
||||
|
||||
// Rotate the *active* log if it exists and exceeds the size cap, BEFORE the
|
||||
// first write of this run. Per-run log files (timestamped) won't actually
|
||||
// overflow during a single run, but rotateLog also tolerates missing files.
|
||||
rotateLog(LOG_FILE, { maxSizeBytes: 10 * 1024 * 1024, maxGenerations: 5 });
|
||||
|
||||
function log(msg) {
|
||||
const ts = new Date().toISOString();
|
||||
const line = `[${ts}] ${msg}`;
|
||||
console.log(line);
|
||||
console.log(`[${ts}] ${msg}`);
|
||||
}
|
||||
|
||||
function run(script, args = []) {
|
||||
const fullPath = join(__dirname, script);
|
||||
log(`Running ${script} ${args.join(' ')}`);
|
||||
// ---------- Status file ----------
|
||||
|
||||
const CACHE_DIR = getCacheDir(APP);
|
||||
const STATUS_FILE = join(CACHE_DIR, 'kb-update-status.json');
|
||||
|
||||
function writeStatus(partial) {
|
||||
const base = {
|
||||
schema_version: 1,
|
||||
last_run_status: 'unknown',
|
||||
last_run_ts: new Date().toISOString(),
|
||||
duration_seconds: null,
|
||||
auth_mode: 'unauthenticated',
|
||||
log_file: LOG_FILE,
|
||||
files_planned: null,
|
||||
files_committed: null,
|
||||
session_id: null,
|
||||
total_cost_usd: null,
|
||||
tokens_input: null,
|
||||
tokens_output: null,
|
||||
max_turns_hit: false,
|
||||
diagnostic: null,
|
||||
};
|
||||
atomicWriteJson(STATUS_FILE, { ...base, ...partial });
|
||||
}
|
||||
|
||||
// ---------- Dry-run early exit ----------
|
||||
|
||||
if (ARGS.dryRun) {
|
||||
log('=== DRY RUN — Weekly KB Cron ===');
|
||||
log(`Plugin root: ${PLUGIN_ROOT}`);
|
||||
log(`Log file: ${LOG_FILE}`);
|
||||
log(`Status file: ${STATUS_FILE}`);
|
||||
log(`Budget cap: $${ARGS.budgetUsd.toFixed(2)} USD (api-key auth only)`);
|
||||
log('Pipeline plan (would execute):');
|
||||
log(' 1. run-weekly-update.mjs --force' + (ARGS.discover ? ' --discover' : ''));
|
||||
log(' 2. read change-report.json → critical + high files');
|
||||
log(' 3. cost-estimate via lib/cost-estimat.mjs');
|
||||
log(' 4. backup skills/ → .kb-backup/<ts>/');
|
||||
log(' 5. spawn claude -p with --permission-mode dontAsk + scoped allowedTools');
|
||||
log(' 6. post-run verify: git log --since=@<runStart> commit count');
|
||||
log(' 7. branch on status: success / partial / failure / budget_exceeded');
|
||||
if (existsSync(join(DATA_DIR, 'change-report.json'))) {
|
||||
try {
|
||||
const rp = JSON.parse(readFileSync(join(DATA_DIR, 'change-report.json'), 'utf8'));
|
||||
const c = rp?.by_priority?.critical ?? 0;
|
||||
const h = rp?.by_priority?.high ?? 0;
|
||||
log(`Current change-report: ${c} critical + ${h} high (would be planned)`);
|
||||
} catch {
|
||||
log('Current change-report: (unreadable)');
|
||||
}
|
||||
} else {
|
||||
log('Current change-report: (none — would be generated by run-weekly-update.mjs)');
|
||||
}
|
||||
// Auth-mode is lazy in dry-run: detect but never validate so a dev can
|
||||
// sanity-check the plan without having a cron-safe credential set up yet.
|
||||
let mode = 'unauthenticated';
|
||||
try {
|
||||
execFileSync('node', [fullPath, ...args], {
|
||||
mode = detectAuthMode();
|
||||
} catch {
|
||||
// detectAuthMode shouldn't throw, but be defensive.
|
||||
}
|
||||
writeStatus({
|
||||
last_run_status: 'dry-run',
|
||||
auth_mode: mode,
|
||||
diagnostic: null,
|
||||
});
|
||||
log('=== DRY RUN COMPLETE ===');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// ---------- Pre-flight ----------
|
||||
|
||||
function which(cmd) {
|
||||
const finder = osPlatform() === 'win32' ? 'where' : 'which';
|
||||
try {
|
||||
const out = execFileSync(finder, [cmd], { encoding: 'utf8', stdio: ['ignore', 'pipe', 'ignore'] });
|
||||
return out.split(/\r?\n/)[0].trim() || null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function preflight() {
|
||||
// git --version
|
||||
try {
|
||||
execFileSync('git', ['--version'], { stdio: 'ignore' });
|
||||
} catch {
|
||||
const e = new Error('git not found in PATH');
|
||||
e.code = 'ENOGIT';
|
||||
throw e;
|
||||
}
|
||||
|
||||
// which claude
|
||||
const claudeBin = process.env.CLAUDE_BIN || which('claude');
|
||||
if (!claudeBin) {
|
||||
const e = new Error('claude CLI not found in PATH (set CLAUDE_BIN to override)');
|
||||
e.code = 'ENOCLAUDE';
|
||||
throw e;
|
||||
}
|
||||
|
||||
// auth-mode detection + validation
|
||||
const authMode = detectAuthMode();
|
||||
validateAuthForCron(authMode); // throws EAUTHCRON if not safe
|
||||
|
||||
// ~/.claude.json onboarding flag (informational)
|
||||
const claudeJson = readClaudeJson(join(homedir(), '.claude.json'));
|
||||
const onboarded = claudeJson?.hasCompletedOnboarding === true;
|
||||
if (!onboarded) {
|
||||
log('WARN: ~/.claude.json missing or onboarding incomplete — cron may prompt');
|
||||
}
|
||||
|
||||
// microsoft-learn MCP soft-warn
|
||||
const mcpJsonPath = join(PLUGIN_ROOT, '.mcp.json');
|
||||
if (!existsSync(mcpJsonPath)) {
|
||||
log('WARN: plugin .mcp.json missing — Claude session may lack microsoft-learn');
|
||||
}
|
||||
|
||||
// git status --porcelain clean check
|
||||
let porcelain = '';
|
||||
try {
|
||||
porcelain = execFileSync('git', ['status', '--porcelain'], {
|
||||
cwd: PLUGIN_ROOT,
|
||||
encoding: 'utf8',
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
}).trim();
|
||||
} catch (err) {
|
||||
const e = new Error(`git status failed: ${err.message}`);
|
||||
e.code = 'EGITSTATUS';
|
||||
throw e;
|
||||
}
|
||||
if (porcelain) {
|
||||
const e = new Error(`Working tree not clean:\n${porcelain}`);
|
||||
e.code = 'EDIRTY';
|
||||
throw e;
|
||||
}
|
||||
|
||||
return { claudeBin, authMode };
|
||||
}
|
||||
|
||||
// ---------- Main ----------
|
||||
|
||||
const runStartTs = Date.now();
|
||||
let lockHandle = null;
|
||||
let backupHandle = null;
|
||||
let authMode = 'unauthenticated';
|
||||
let claudeBin = null;
|
||||
let updateFiles = [];
|
||||
|
||||
function bail(status, diagnostic, extra = {}) {
|
||||
const duration = Math.round((Date.now() - runStartTs) / 1000);
|
||||
writeStatus({
|
||||
last_run_status: status,
|
||||
auth_mode: authMode,
|
||||
duration_seconds: duration,
|
||||
diagnostic,
|
||||
...extra,
|
||||
});
|
||||
if (backupHandle && status === 'failure') {
|
||||
try {
|
||||
log('Rolling back skills/ from backup...');
|
||||
backupHandle.restore();
|
||||
log('Rollback complete.');
|
||||
} catch (err) {
|
||||
log(`Rollback failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
if (lockHandle) {
|
||||
try { lockHandle.release(); } catch { /* best-effort */ }
|
||||
}
|
||||
process.exit(status === 'success' || status === 'partial' ? 0 : 1);
|
||||
}
|
||||
|
||||
try {
|
||||
log('=== Weekly KB Cron Start ===');
|
||||
log(`Plugin root: ${PLUGIN_ROOT}`);
|
||||
log(`Log file: ${LOG_FILE}`);
|
||||
|
||||
// Pre-flight
|
||||
const pf = preflight();
|
||||
claudeBin = pf.claudeBin;
|
||||
authMode = pf.authMode;
|
||||
log(`Auth mode: ${authMode}`);
|
||||
log(`Claude bin: ${claudeBin}`);
|
||||
|
||||
// Lock
|
||||
lockHandle = acquireLock(undefined, { staleThresholdMs: 2 * 60 * 60 * 1000 });
|
||||
log(`Lock acquired: ${lockHandle.lockPath}`);
|
||||
|
||||
// Pipeline step 1: poll + report (+ optional discover)
|
||||
const updateScript = join(__dirname, 'run-weekly-update.mjs');
|
||||
const updateArgs = ['--force'];
|
||||
if (ARGS.discover) updateArgs.push('--discover');
|
||||
log(`Running ${updateScript} ${updateArgs.join(' ')}`);
|
||||
try {
|
||||
execFileSync('node', [updateScript, ...updateArgs], {
|
||||
stdio: 'inherit',
|
||||
timeout: 10 * 60 * 1000,
|
||||
cwd: PLUGIN_ROOT,
|
||||
});
|
||||
} catch (err) {
|
||||
log(`ERROR: ${script} failed: ${err.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Step 1: Sitemap polling + discovery ---
|
||||
log('=== Weekly KB Cron Start ===');
|
||||
run('run-weekly-update.mjs', ['--force', '--discover']);
|
||||
|
||||
// --- Step 2: Read change report ---
|
||||
const reportPath = join(DATA_DIR, 'change-report.json');
|
||||
if (!existsSync(reportPath)) {
|
||||
log('No change report found. Exiting.');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const report = JSON.parse(readFileSync(reportPath, 'utf8'));
|
||||
const { critical = 0, high = 0, medium = 0, low = 0 } = report.by_priority || {};
|
||||
log(`Change report: ${critical} critical, ${high} high, ${medium} medium, ${low} low`);
|
||||
|
||||
// --- Step 3: If critical/high exist, spawn Claude for updates ---
|
||||
if (critical + high === 0) {
|
||||
log('No critical/high findings. Committing data updates only.');
|
||||
|
||||
try {
|
||||
execSync('git add scripts/kb-update/data/', { cwd: PLUGIN_ROOT, stdio: 'pipe' });
|
||||
const status = execSync('git status --porcelain scripts/kb-update/data/', { cwd: PLUGIN_ROOT, encoding: 'utf8' });
|
||||
if (status.trim()) {
|
||||
execSync(
|
||||
'git commit -m "docs(architect): weekly KB poll — no stale files"',
|
||||
{ cwd: PLUGIN_ROOT, stdio: 'pipe' }
|
||||
);
|
||||
execSync('git push origin main', { cwd: PLUGIN_ROOT, stdio: 'pipe' });
|
||||
log('Data committed and pushed.');
|
||||
} else {
|
||||
log('No data changes to commit.');
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Git error: ${err.message}`);
|
||||
bail('failure', `run-weekly-update.mjs failed: ${err.message}`);
|
||||
}
|
||||
|
||||
log('=== Weekly KB Cron Done ===');
|
||||
process.exit(0);
|
||||
}
|
||||
// Read change report
|
||||
const reportPath = join(DATA_DIR, 'change-report.json');
|
||||
if (!existsSync(reportPath)) {
|
||||
log('No change report produced. Treating as success (nothing to do).');
|
||||
bail('success', null, { files_planned: 0, files_committed: 0 });
|
||||
}
|
||||
const report = JSON.parse(readFileSync(reportPath, 'utf8'));
|
||||
const counts = report.by_priority || {};
|
||||
log(`Change report: ${counts.critical || 0} critical, ${counts.high || 0} high, ${counts.medium || 0} medium`);
|
||||
|
||||
// Build list of stale files (critical + high only, max MAX_UPDATE_FILES)
|
||||
const staleFiles = (report.files || [])
|
||||
.filter(f => f.priority === 'critical' || f.priority === 'high' || f.priority === 'medium')
|
||||
.slice(0, MAX_UPDATE_FILES);
|
||||
// Build updateFiles = critical + high (medium/low excluded per brief)
|
||||
updateFiles = (report.files || []).filter(
|
||||
(f) => f.priority === 'critical' || f.priority === 'high'
|
||||
);
|
||||
log(`Files to update: ${updateFiles.length} (critical + high)`);
|
||||
|
||||
log(`Spawning Claude to update ${staleFiles.length} stale files...`);
|
||||
if (updateFiles.length === 0) {
|
||||
log('Nothing critical/high to update. Exiting clean.');
|
||||
bail('success', null, { files_planned: 0, files_committed: 0 });
|
||||
}
|
||||
|
||||
const fileList = staleFiles.map(f => {
|
||||
const urls = (f.changed_urls || []).slice(0, 5).join('\n ');
|
||||
return `- ${f.path} [${f.priority}]\n Changed URLs:\n ${urls}`;
|
||||
}).join('\n');
|
||||
// Cost estimate + budget check
|
||||
const cost = estimateCost(counts, { authMode });
|
||||
log(`Estimated cost: ${cost.usd === null ? '(quota; subscription)' : `$${cost.usd.toFixed(2)}`} ` +
|
||||
`(${cost.tokens_input} in / ${cost.tokens_output} out)`);
|
||||
if (cost.kvote_warn) {
|
||||
log('NOTE: Subscription auth — quota-bound, no $-cap applied.');
|
||||
}
|
||||
if (authMode === 'api-key' && cost.usd !== null && cost.usd > ARGS.budgetUsd) {
|
||||
log(`Cost $${cost.usd.toFixed(2)} exceeds budget $${ARGS.budgetUsd.toFixed(2)} — aborting.`);
|
||||
bail('budget_exceeded',
|
||||
`Estimated $${cost.usd.toFixed(2)} > budget $${ARGS.budgetUsd.toFixed(2)}`,
|
||||
{
|
||||
files_planned: updateFiles.length,
|
||||
files_committed: 0,
|
||||
tokens_input: cost.tokens_input,
|
||||
tokens_output: cost.tokens_output,
|
||||
total_cost_usd: cost.usd,
|
||||
});
|
||||
}
|
||||
|
||||
const prompt = `Du er Cosmo Skyberg. Oppdater ${staleFiles.length} stale kunnskapsreferanser i ms-ai-architect pluginen.
|
||||
// Backup skills/
|
||||
const backupRoot = getBackupDir(PLUGIN_ROOT);
|
||||
log(`Backing up ${SKILLS_DIR} → ${backupRoot}/<ts>/...`);
|
||||
backupHandle = backupDir(SKILLS_DIR, backupRoot, { retentionDays: KB_BACKUP_DAYS });
|
||||
log(`Backup: ${backupHandle.backupPath}`);
|
||||
|
||||
// Build prompt
|
||||
const fileList = updateFiles.map((f) => {
|
||||
const urls = (f.changed_urls || []).slice(0, 5).join('\n ');
|
||||
return `- ${f.path} [${f.priority}]\n Changed URLs:\n ${urls}`;
|
||||
}).join('\n');
|
||||
const yyyymm = new Date().toISOString().slice(0, 7);
|
||||
const prompt = `Du er Cosmo Skyberg. Oppdater ${updateFiles.length} stale kunnskapsreferanser i ms-ai-architect pluginen.
|
||||
|
||||
Arbeidsmappe: ${PLUGIN_ROOT}
|
||||
|
||||
|
|
@ -107,42 +369,191 @@ ${fileList}
|
|||
2. Bruk microsoft_docs_fetch på de endrede kilde-URLene listet over
|
||||
3. Bruk microsoft_docs_search for supplerende info
|
||||
4. Oppdater filen med Edit:
|
||||
- Oppdater "Last updated" til ${new Date().toISOString().slice(0, 7)}
|
||||
- Oppdater "Last updated" til ${yyyymm}
|
||||
- Oppdater utdaterte fakta, priser, datoer
|
||||
- Bevar eksisterende struktur og seksjoner
|
||||
- Marker oppdatert innhold med "Verified (MCP ${new Date().toISOString().slice(0, 7)})"
|
||||
- Marker oppdatert innhold med "Verified (MCP ${yyyymm})"
|
||||
|
||||
## Etter alle oppdateringer
|
||||
|
||||
1. Kjør: node scripts/kb-update/build-registry.mjs --merge
|
||||
2. Kjør: node scripts/kb-update/report-changes.mjs
|
||||
3. git add skills/ scripts/kb-update/data/
|
||||
4. git commit -m "docs(architect): weekly KB update — ${staleFiles.length} files refreshed
|
||||
4. git commit -m "docs(architect): weekly KB update — ${updateFiles.length} files refreshed
|
||||
|
||||
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>"
|
||||
5. git push origin main
|
||||
|
||||
## Regler
|
||||
- Aldri slett filer, kun oppdater
|
||||
- Bruk Edit, ikke Write
|
||||
- Bevar all eksisterende struktur`;
|
||||
- Bevar all eksisterende struktur
|
||||
- Commit én gang ved slutt — ikke per fil`;
|
||||
|
||||
try {
|
||||
execFileSync(CLAUDE_BIN, [
|
||||
// Spawn Claude (NEW flag stack)
|
||||
const allowedTools = [
|
||||
'Read', 'Edit', 'Write',
|
||||
'Bash(git add:*)', 'Bash(git commit:*)', 'Bash(git push:*)',
|
||||
'Bash(git status:*)', 'Bash(git diff:*)', 'Bash(git log:*)',
|
||||
'mcp__microsoft-learn__microsoft_docs_search',
|
||||
'mcp__microsoft-learn__microsoft_docs_fetch',
|
||||
].join(',');
|
||||
|
||||
log(`Spawning Claude (model claude-sonnet-4-6, max-turns 200) with ${allowedTools.split(',').length} allowed tools...`);
|
||||
const claudeResult = spawnSync(claudeBin, [
|
||||
'-p', prompt,
|
||||
'--model', 'sonnet',
|
||||
'--permission-mode', 'acceptEdits',
|
||||
'--allowedTools', 'Read,Edit,Bash,Glob,Grep,mcp__microsoft-learn__microsoft_docs_search,mcp__microsoft-learn__microsoft_docs_fetch',
|
||||
'--permission-mode', 'dontAsk',
|
||||
'--allowedTools', allowedTools,
|
||||
'--max-turns', '200',
|
||||
'--output-format', 'json',
|
||||
'--model', 'claude-sonnet-4-6',
|
||||
], {
|
||||
stdio: 'inherit',
|
||||
timeout: 60 * 60 * 1000,
|
||||
cwd: PLUGIN_ROOT,
|
||||
encoding: 'utf8',
|
||||
timeout: 60 * 60 * 1000,
|
||||
maxBuffer: 32 * 1024 * 1024,
|
||||
});
|
||||
log('Claude session completed.');
|
||||
|
||||
// Parse output
|
||||
let sessionMeta = {};
|
||||
let maxTurnsHit = false;
|
||||
if (claudeResult.stdout) {
|
||||
try {
|
||||
// --output-format json yields a single JSON object on stdout.
|
||||
sessionMeta = JSON.parse(claudeResult.stdout);
|
||||
const resultStr = String(sessionMeta.result ?? sessionMeta.stop_reason ?? '');
|
||||
if (resultStr.includes('max_turns')) maxTurnsHit = true;
|
||||
} catch (err) {
|
||||
log(`WARN: could not parse Claude JSON output: ${err.message}`);
|
||||
}
|
||||
}
|
||||
if (claudeResult.stderr) {
|
||||
process.stderr.write(claudeResult.stderr);
|
||||
}
|
||||
|
||||
// Post-run verification: count git commits since runStart
|
||||
const runStartUnixSec = Math.floor(runStartTs / 1000);
|
||||
let commitCount = 0;
|
||||
try {
|
||||
const log_out = execFileSync('git', ['log', `--since=@${runStartUnixSec}`, '--oneline'], {
|
||||
cwd: PLUGIN_ROOT,
|
||||
encoding: 'utf8',
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
commitCount = log_out.split('\n').filter((l) => l.trim().length > 0).length;
|
||||
} catch (err) {
|
||||
log(`WARN: git log post-run failed: ${err.message}`);
|
||||
}
|
||||
log(`Post-run: ${commitCount} commit(s) since runStart, planned ${updateFiles.length}.`);
|
||||
|
||||
// Branching
|
||||
const claudeOk = claudeResult.status === 0;
|
||||
let status = 'success';
|
||||
let diagnostic = null;
|
||||
|
||||
if (!claudeOk) {
|
||||
status = 'failure';
|
||||
diagnostic = `claude exited ${claudeResult.status}` +
|
||||
(claudeResult.signal ? ` (signal ${claudeResult.signal})` : '');
|
||||
} else if (commitCount === 0 && updateFiles.length > 0) {
|
||||
status = 'failure';
|
||||
diagnostic = 'No commits produced despite expected files';
|
||||
} else if (commitCount > 0 && commitCount < updateFiles.length && maxTurnsHit) {
|
||||
status = 'partial';
|
||||
diagnostic = `Hit max_turns: ${commitCount}/${updateFiles.length} files committed; rest will retry next week`;
|
||||
} else if (commitCount > 0 && commitCount < updateFiles.length) {
|
||||
// Partial without max_turns hit — treat as partial (Claude completed but
|
||||
// some files weren't actionable). Conservative: don't roll back.
|
||||
status = 'partial';
|
||||
diagnostic = `Claude completed but only ${commitCount}/${updateFiles.length} files committed`;
|
||||
} else {
|
||||
status = 'success';
|
||||
}
|
||||
|
||||
const totalCostUsd = typeof sessionMeta.total_cost_usd === 'number'
|
||||
? sessionMeta.total_cost_usd
|
||||
: null;
|
||||
const sessionId = typeof sessionMeta.session_id === 'string'
|
||||
? sessionMeta.session_id
|
||||
: null;
|
||||
const tokensIn = typeof sessionMeta?.usage?.input_tokens === 'number'
|
||||
? sessionMeta.usage.input_tokens
|
||||
: null;
|
||||
const tokensOut = typeof sessionMeta?.usage?.output_tokens === 'number'
|
||||
? sessionMeta.usage.output_tokens
|
||||
: null;
|
||||
|
||||
const statusExtra = {
|
||||
files_planned: updateFiles.length,
|
||||
files_committed: commitCount,
|
||||
session_id: sessionId,
|
||||
total_cost_usd: totalCostUsd,
|
||||
tokens_input: tokensIn,
|
||||
tokens_output: tokensOut,
|
||||
max_turns_hit: maxTurnsHit,
|
||||
};
|
||||
|
||||
if (status === 'failure') {
|
||||
bail('failure', diagnostic, statusExtra);
|
||||
}
|
||||
|
||||
// success or partial: keep commits + optionally push
|
||||
if (status === 'success' && autoPushEligible()) {
|
||||
try {
|
||||
execFileSync('git', ['push', 'origin', 'main'], {
|
||||
cwd: PLUGIN_ROOT,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
log('Pushed origin/main.');
|
||||
} catch (err) {
|
||||
log(`WARN: git push failed (commits remain local): ${err.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup old backups (best-effort, post-success)
|
||||
try {
|
||||
const cleanup = cleanupOldBackups(backupRoot, KB_BACKUP_DAYS);
|
||||
if (cleanup.deleted.length > 0) {
|
||||
log(`Cleaned up ${cleanup.deleted.length} old backup(s).`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`WARN: cleanupOldBackups failed: ${err.message}`);
|
||||
}
|
||||
|
||||
log(`=== Weekly KB Cron Done (${status}) ===`);
|
||||
bail(status, diagnostic, statusExtra);
|
||||
|
||||
} catch (err) {
|
||||
log(`Claude session error: ${err.message}`);
|
||||
process.exit(1);
|
||||
// Pre-flight or unexpected error before pipeline started.
|
||||
const code = err && err.code ? err.code : 'EUNKNOWN';
|
||||
log(`Pre-flight/error: [${code}] ${err.message}`);
|
||||
if (err.stack) {
|
||||
log(err.stack.split('\n').slice(1, 4).join('\n'));
|
||||
}
|
||||
bail('failure', `[${code}] ${err.message}`);
|
||||
}
|
||||
|
||||
log('=== Weekly KB Cron Done ===');
|
||||
// ---------- Helpers ----------
|
||||
|
||||
function autoPushEligible() {
|
||||
// Two gates: a configured user.email + a reachable origin.
|
||||
try {
|
||||
const email = execFileSync('git', ['config', '--get', 'user.email'], {
|
||||
cwd: PLUGIN_ROOT,
|
||||
encoding: 'utf8',
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
}).trim();
|
||||
if (!email) return false;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
execFileSync('git', ['ls-remote', 'origin', '--exit-code', 'HEAD'], {
|
||||
cwd: PLUGIN_ROOT,
|
||||
stdio: 'ignore',
|
||||
timeout: 10_000,
|
||||
});
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,126 @@
|
|||
// tests/kb-update/test-weekly-kb-cron-flags.test.mjs
|
||||
// Subprocess-based flag-parsing tests for scripts/kb-update/weekly-kb-cron.mjs
|
||||
// (Step 9). Avoids real Claude spawn by exercising --dry-run + auth-failure
|
||||
// fast-path. Full e2e is reserved for Wave 6 live-test.
|
||||
//
|
||||
// The cron writes its status file to <getCacheDir('ms-ai-architect')>, which
|
||||
// on darwin resolves to $HOME/Library/Caches/ms-ai-architect/. Setting HOME
|
||||
// in the subprocess env therefore points all path resolution at a tmp dir,
|
||||
// keeping the test isolated from the real machine.
|
||||
|
||||
import { test } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { spawnSync } from 'node:child_process';
|
||||
import { mkdtempSync, rmSync, existsSync, readFileSync } from 'node:fs';
|
||||
import { tmpdir, platform as osPlatform } from 'node:os';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const CRON = join(__dirname, '..', '..', 'scripts', 'kb-update', 'weekly-kb-cron.mjs');
|
||||
|
||||
function mkSandbox() {
|
||||
return mkdtempSync(join(tmpdir(), 'cron-test-'));
|
||||
}
|
||||
|
||||
function runCron(extraArgs, env = {}) {
|
||||
return spawnSync('node', [CRON, ...extraArgs], {
|
||||
env: { PATH: process.env.PATH, ...env },
|
||||
encoding: 'utf8',
|
||||
timeout: 30_000,
|
||||
});
|
||||
}
|
||||
|
||||
function statusFilePath(home) {
|
||||
if (osPlatform() === 'darwin') {
|
||||
return join(home, 'Library', 'Caches', 'ms-ai-architect', 'kb-update-status.json');
|
||||
}
|
||||
if (osPlatform() === 'win32') {
|
||||
return join(home, 'AppData', 'Local', 'ms-ai-architect', 'Cache', 'kb-update-status.json');
|
||||
}
|
||||
return join(home, '.cache', 'ms-ai-architect', 'kb-update-status.json');
|
||||
}
|
||||
|
||||
test('--dry-run exits 0 with dry-run status, no Claude spawn', () => {
|
||||
const home = mkSandbox();
|
||||
try {
|
||||
const result = runCron(['--dry-run'], {
|
||||
HOME: home,
|
||||
ANTHROPIC_API_KEY: '',
|
||||
CLAUDE_CODE_OAUTH_TOKEN: '',
|
||||
});
|
||||
assert.equal(result.status, 0, `stderr: ${result.stderr}\nstdout: ${result.stdout}`);
|
||||
assert.match(result.stdout, /DRY RUN/i);
|
||||
|
||||
const sf = statusFilePath(home);
|
||||
assert.equal(existsSync(sf), true, `status file missing at ${sf}`);
|
||||
const status = JSON.parse(readFileSync(sf, 'utf8'));
|
||||
assert.equal(status.schema_version, 1);
|
||||
assert.equal(status.last_run_status, 'dry-run');
|
||||
assert.equal(typeof status.last_run_ts, 'string');
|
||||
assert.match(status.last_run_ts, /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/);
|
||||
assert.equal(typeof status.auth_mode, 'string');
|
||||
assert.equal(typeof status.log_file, 'string');
|
||||
} finally {
|
||||
rmSync(home, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('missing auth (no --dry-run) fails fast with auth-related error', () => {
|
||||
const home = mkSandbox();
|
||||
try {
|
||||
const result = runCron([], {
|
||||
HOME: home,
|
||||
ANTHROPIC_API_KEY: '',
|
||||
CLAUDE_CODE_OAUTH_TOKEN: '',
|
||||
});
|
||||
assert.notEqual(result.status, 0, 'cron should exit non-zero on missing auth');
|
||||
const combined = (result.stdout || '') + '\n' + (result.stderr || '');
|
||||
assert.match(
|
||||
combined,
|
||||
/not safe for cron|unauthenticated|EAUTHCRON|auth/i,
|
||||
`expected auth error in output. stdout: ${result.stdout}\nstderr: ${result.stderr}`
|
||||
);
|
||||
} finally {
|
||||
rmSync(home, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('--budget-usd flag parsed and reflected in dry-run plan', () => {
|
||||
const home = mkSandbox();
|
||||
try {
|
||||
const result = runCron(['--dry-run', '--budget-usd=12.50'], {
|
||||
HOME: home,
|
||||
ANTHROPIC_API_KEY: '',
|
||||
CLAUDE_CODE_OAUTH_TOKEN: '',
|
||||
});
|
||||
assert.equal(result.status, 0, `stderr: ${result.stderr}`);
|
||||
assert.match(
|
||||
result.stdout,
|
||||
/(budget|Budget)[^\n]*12\.50|12\.5/,
|
||||
`expected 12.50 in dry-run output: ${result.stdout}`
|
||||
);
|
||||
} finally {
|
||||
rmSync(home, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('--dry-run writes status file even with no change-report present', () => {
|
||||
const home = mkSandbox();
|
||||
try {
|
||||
const result = runCron(['--dry-run'], {
|
||||
HOME: home,
|
||||
ANTHROPIC_API_KEY: '',
|
||||
CLAUDE_CODE_OAUTH_TOKEN: '',
|
||||
});
|
||||
assert.equal(result.status, 0);
|
||||
const sf = statusFilePath(home);
|
||||
const status = JSON.parse(readFileSync(sf, 'utf8'));
|
||||
// Required fields per Status File Schema (plan.md L122-153)
|
||||
for (const key of ['schema_version', 'last_run_status', 'last_run_ts', 'auth_mode', 'log_file', 'diagnostic']) {
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(status, key), `missing required field: ${key}`);
|
||||
}
|
||||
} finally {
|
||||
rmSync(home, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue