559 lines
19 KiB
JavaScript
Executable file
559 lines
19 KiB
JavaScript
Executable file
#!/usr/bin/env node
|
|
// weekly-kb-cron.mjs — Cross-OS scheduler entrypoint for weekly KB-update.
|
|
//
|
|
// Pipeline:
|
|
// 1. Parse flags (--dry-run, --force, --discover, --budget-usd=N).
|
|
// 2. Resolve cross-platform log/cache/state/backup dirs via lib/cross-platform-paths.mjs.
|
|
// 3. Rotate the log file before first write (lib/log-rotate.mjs, 10 MB default).
|
|
// 4. If --dry-run: print plan, write status (last_run_status: dry-run), exit 0.
|
|
// 5. Pre-flight: git --version, which claude, detectAuthMode + validateAuthForCron,
|
|
// ~/.claude.json onboarding flags, soft-warn on missing microsoft-learn MCP,
|
|
// git status --porcelain clean check.
|
|
// 6. Acquire lock (lib/lock-file.mjs). Capture runStartTs (Unix ms).
|
|
// 7. Run scripts/kb-update/run-weekly-update.mjs (existing pattern).
|
|
// 8. Read change-report.json. updateFiles = critical+high only.
|
|
// 9. Pre-flight cost-estimate (lib/cost-estimat.mjs). Abort with budget_exceeded
|
|
// if api-key auth and usd > budget. Subscription auth: kvote_warn, proceed.
|
|
// 10. Backup skills/ via lib/backup.mjs#backupDir.
|
|
// 11. Spawn Claude with NEW flag stack: dontAsk + scoped allowedTools +
|
|
// --output-format json + --model claude-sonnet-4-6.
|
|
// 12. Parse stdout JSON for total_cost_usd, session_id, max_turns_hit.
|
|
// 13. Post-run verification: git log --since=@<unixSeconds> commit count vs
|
|
// updateFiles.length. Branch: success / partial / failure.
|
|
// 14. On failure: rollback via backup#restore. On partial: keep commits.
|
|
// On success: optionally git push (auto_push_eligible).
|
|
// 15. Cleanup: release lock, cleanupOldBackups.
|
|
// 16. Exit 0 on success / dry-run / partial; 1 on failure / budget_exceeded.
|
|
//
|
|
// Status file: <getCacheDir('ms-ai-architect')>/kb-update-status.json
|
|
// (rewritten atomically per Status File Schema in plan.md L122-153).
|
|
//
|
|
// Crontab one-liner is still supported for direct cron use, but the recommended
|
|
// install path is `node ../install-kb-cron.mjs` which generates a launchd plist
|
|
// (macOS), systemd .timer + .service (Linux), or Windows Task Scheduler entry.
|
|
|
|
import { execFileSync, spawnSync } from 'node:child_process';
|
|
import { readFileSync, existsSync } from 'node:fs';
|
|
import { join, dirname } from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { homedir, platform as osPlatform } from 'node:os';
|
|
|
|
import { getCacheDir, getLogDir, getBackupDir } from './lib/cross-platform-paths.mjs';
|
|
import { atomicWriteJson } from './lib/atomic-write.mjs';
|
|
import { rotateLog } from './lib/log-rotate.mjs';
|
|
import { detectAuthMode, validateAuthForCron, readClaudeJson } from './lib/auth-mode.mjs';
|
|
import { acquireLock } from './lib/lock-file.mjs';
|
|
import { estimateCost } from './lib/cost-estimat.mjs';
|
|
import { backupDir, cleanupOldBackups } from './lib/backup.mjs';
|
|
|
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
const APP = 'ms-ai-architect';
|
|
const PLUGIN_ROOT = join(__dirname, '..', '..');
|
|
const DATA_DIR = join(__dirname, 'data');
|
|
const SKILLS_DIR = join(PLUGIN_ROOT, 'skills');
|
|
|
|
const DEFAULT_BUDGET_USD = 5;
|
|
const KB_BACKUP_DAYS = 7;
|
|
|
|
// ---------- Arg parsing ----------
|
|
|
|
function parseArgs(argv) {
|
|
const args = {
|
|
dryRun: false,
|
|
force: false,
|
|
discover: true, // run-weekly-update default
|
|
budgetUsd: Number(process.env.KB_UPDATE_BUDGET_USD) || DEFAULT_BUDGET_USD,
|
|
};
|
|
for (const a of argv) {
|
|
if (a === '--dry-run') args.dryRun = true;
|
|
else if (a === '--force') args.force = true;
|
|
else if (a === '--no-discover') args.discover = false;
|
|
else if (a.startsWith('--budget-usd=')) {
|
|
const n = Number(a.slice('--budget-usd='.length));
|
|
if (Number.isFinite(n) && n > 0) args.budgetUsd = n;
|
|
}
|
|
}
|
|
return args;
|
|
}
|
|
|
|
const ARGS = parseArgs(process.argv.slice(2));
|
|
|
|
// ---------- Logging ----------
|
|
|
|
function fsTimestamp(date = new Date()) {
|
|
// ISO timestamp made filesystem-safe (colons → dashes; macOS+Windows reject ':' in filenames).
|
|
return date.toISOString().replace(/:/g, '-');
|
|
}
|
|
|
|
const FS_TS = fsTimestamp();
|
|
const LOG_DIR = getLogDir(APP);
|
|
const LOG_FILE = join(LOG_DIR, `kb-update-${FS_TS}.log`);
|
|
|
|
// Rotate the *active* log if it exists and exceeds the size cap, BEFORE the
|
|
// first write of this run. Per-run log files (timestamped) won't actually
|
|
// overflow during a single run, but rotateLog also tolerates missing files.
|
|
rotateLog(LOG_FILE, { maxSizeBytes: 10 * 1024 * 1024, maxGenerations: 5 });
|
|
|
|
function log(msg) {
|
|
const ts = new Date().toISOString();
|
|
console.log(`[${ts}] ${msg}`);
|
|
}
|
|
|
|
// ---------- Status file ----------
|
|
|
|
const CACHE_DIR = getCacheDir(APP);
|
|
const STATUS_FILE = join(CACHE_DIR, 'kb-update-status.json');
|
|
|
|
function writeStatus(partial) {
|
|
const base = {
|
|
schema_version: 1,
|
|
last_run_status: 'unknown',
|
|
last_run_ts: new Date().toISOString(),
|
|
duration_seconds: null,
|
|
auth_mode: 'unauthenticated',
|
|
log_file: LOG_FILE,
|
|
files_planned: null,
|
|
files_committed: null,
|
|
session_id: null,
|
|
total_cost_usd: null,
|
|
tokens_input: null,
|
|
tokens_output: null,
|
|
max_turns_hit: false,
|
|
diagnostic: null,
|
|
};
|
|
atomicWriteJson(STATUS_FILE, { ...base, ...partial });
|
|
}
|
|
|
|
// ---------- Dry-run early exit ----------
|
|
|
|
if (ARGS.dryRun) {
|
|
log('=== DRY RUN — Weekly KB Cron ===');
|
|
log(`Plugin root: ${PLUGIN_ROOT}`);
|
|
log(`Log file: ${LOG_FILE}`);
|
|
log(`Status file: ${STATUS_FILE}`);
|
|
log(`Budget cap: $${ARGS.budgetUsd.toFixed(2)} USD (api-key auth only)`);
|
|
log('Pipeline plan (would execute):');
|
|
log(' 1. run-weekly-update.mjs --force' + (ARGS.discover ? ' --discover' : ''));
|
|
log(' 2. read change-report.json → critical + high files');
|
|
log(' 3. cost-estimate via lib/cost-estimat.mjs');
|
|
log(' 4. backup skills/ → .kb-backup/<ts>/');
|
|
log(' 5. spawn claude -p with --permission-mode dontAsk + scoped allowedTools');
|
|
log(' 6. post-run verify: git log --since=@<runStart> commit count');
|
|
log(' 7. branch on status: success / partial / failure / budget_exceeded');
|
|
if (existsSync(join(DATA_DIR, 'change-report.json'))) {
|
|
try {
|
|
const rp = JSON.parse(readFileSync(join(DATA_DIR, 'change-report.json'), 'utf8'));
|
|
const c = rp?.by_priority?.critical ?? 0;
|
|
const h = rp?.by_priority?.high ?? 0;
|
|
log(`Current change-report: ${c} critical + ${h} high (would be planned)`);
|
|
} catch {
|
|
log('Current change-report: (unreadable)');
|
|
}
|
|
} else {
|
|
log('Current change-report: (none — would be generated by run-weekly-update.mjs)');
|
|
}
|
|
// Auth-mode is lazy in dry-run: detect but never validate so a dev can
|
|
// sanity-check the plan without having a cron-safe credential set up yet.
|
|
let mode = 'unauthenticated';
|
|
try {
|
|
mode = detectAuthMode();
|
|
} catch {
|
|
// detectAuthMode shouldn't throw, but be defensive.
|
|
}
|
|
writeStatus({
|
|
last_run_status: 'dry-run',
|
|
auth_mode: mode,
|
|
diagnostic: null,
|
|
});
|
|
log('=== DRY RUN COMPLETE ===');
|
|
process.exit(0);
|
|
}
|
|
|
|
// ---------- Pre-flight ----------
|
|
|
|
function which(cmd) {
|
|
const finder = osPlatform() === 'win32' ? 'where' : 'which';
|
|
try {
|
|
const out = execFileSync(finder, [cmd], { encoding: 'utf8', stdio: ['ignore', 'pipe', 'ignore'] });
|
|
return out.split(/\r?\n/)[0].trim() || null;
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function preflight() {
|
|
// git --version
|
|
try {
|
|
execFileSync('git', ['--version'], { stdio: 'ignore' });
|
|
} catch {
|
|
const e = new Error('git not found in PATH');
|
|
e.code = 'ENOGIT';
|
|
throw e;
|
|
}
|
|
|
|
// which claude
|
|
const claudeBin = process.env.CLAUDE_BIN || which('claude');
|
|
if (!claudeBin) {
|
|
const e = new Error('claude CLI not found in PATH (set CLAUDE_BIN to override)');
|
|
e.code = 'ENOCLAUDE';
|
|
throw e;
|
|
}
|
|
|
|
// auth-mode detection + validation
|
|
const authMode = detectAuthMode();
|
|
validateAuthForCron(authMode); // throws EAUTHCRON if not safe
|
|
|
|
// ~/.claude.json onboarding flag (informational)
|
|
const claudeJson = readClaudeJson(join(homedir(), '.claude.json'));
|
|
const onboarded = claudeJson?.hasCompletedOnboarding === true;
|
|
if (!onboarded) {
|
|
log('WARN: ~/.claude.json missing or onboarding incomplete — cron may prompt');
|
|
}
|
|
|
|
// microsoft-learn MCP soft-warn
|
|
const mcpJsonPath = join(PLUGIN_ROOT, '.mcp.json');
|
|
if (!existsSync(mcpJsonPath)) {
|
|
log('WARN: plugin .mcp.json missing — Claude session may lack microsoft-learn');
|
|
}
|
|
|
|
// git status --porcelain clean check
|
|
let porcelain = '';
|
|
try {
|
|
porcelain = execFileSync('git', ['status', '--porcelain'], {
|
|
cwd: PLUGIN_ROOT,
|
|
encoding: 'utf8',
|
|
stdio: ['ignore', 'pipe', 'pipe'],
|
|
}).trim();
|
|
} catch (err) {
|
|
const e = new Error(`git status failed: ${err.message}`);
|
|
e.code = 'EGITSTATUS';
|
|
throw e;
|
|
}
|
|
if (porcelain) {
|
|
const e = new Error(`Working tree not clean:\n${porcelain}`);
|
|
e.code = 'EDIRTY';
|
|
throw e;
|
|
}
|
|
|
|
return { claudeBin, authMode };
|
|
}
|
|
|
|
// ---------- Main ----------
|
|
|
|
const runStartTs = Date.now();
|
|
let lockHandle = null;
|
|
let backupHandle = null;
|
|
let authMode = 'unauthenticated';
|
|
let claudeBin = null;
|
|
let updateFiles = [];
|
|
|
|
function bail(status, diagnostic, extra = {}) {
|
|
const duration = Math.round((Date.now() - runStartTs) / 1000);
|
|
writeStatus({
|
|
last_run_status: status,
|
|
auth_mode: authMode,
|
|
duration_seconds: duration,
|
|
diagnostic,
|
|
...extra,
|
|
});
|
|
if (backupHandle && status === 'failure') {
|
|
try {
|
|
log('Rolling back skills/ from backup...');
|
|
backupHandle.restore();
|
|
log('Rollback complete.');
|
|
} catch (err) {
|
|
log(`Rollback failed: ${err.message}`);
|
|
}
|
|
}
|
|
if (lockHandle) {
|
|
try { lockHandle.release(); } catch { /* best-effort */ }
|
|
}
|
|
process.exit(status === 'success' || status === 'partial' ? 0 : 1);
|
|
}
|
|
|
|
try {
|
|
log('=== Weekly KB Cron Start ===');
|
|
log(`Plugin root: ${PLUGIN_ROOT}`);
|
|
log(`Log file: ${LOG_FILE}`);
|
|
|
|
// Pre-flight
|
|
const pf = preflight();
|
|
claudeBin = pf.claudeBin;
|
|
authMode = pf.authMode;
|
|
log(`Auth mode: ${authMode}`);
|
|
log(`Claude bin: ${claudeBin}`);
|
|
|
|
// Lock
|
|
lockHandle = acquireLock(undefined, { staleThresholdMs: 2 * 60 * 60 * 1000 });
|
|
log(`Lock acquired: ${lockHandle.lockPath}`);
|
|
|
|
// Pipeline step 1: poll + report (+ optional discover)
|
|
const updateScript = join(__dirname, 'run-weekly-update.mjs');
|
|
const updateArgs = ['--force'];
|
|
if (ARGS.discover) updateArgs.push('--discover');
|
|
log(`Running ${updateScript} ${updateArgs.join(' ')}`);
|
|
try {
|
|
execFileSync('node', [updateScript, ...updateArgs], {
|
|
stdio: 'inherit',
|
|
timeout: 10 * 60 * 1000,
|
|
cwd: PLUGIN_ROOT,
|
|
});
|
|
} catch (err) {
|
|
bail('failure', `run-weekly-update.mjs failed: ${err.message}`);
|
|
}
|
|
|
|
// Read change report
|
|
const reportPath = join(DATA_DIR, 'change-report.json');
|
|
if (!existsSync(reportPath)) {
|
|
log('No change report produced. Treating as success (nothing to do).');
|
|
bail('success', null, { files_planned: 0, files_committed: 0 });
|
|
}
|
|
const report = JSON.parse(readFileSync(reportPath, 'utf8'));
|
|
const counts = report.by_priority || {};
|
|
log(`Change report: ${counts.critical || 0} critical, ${counts.high || 0} high, ${counts.medium || 0} medium`);
|
|
|
|
// Build updateFiles = critical + high (medium/low excluded per brief)
|
|
updateFiles = (report.files || []).filter(
|
|
(f) => f.priority === 'critical' || f.priority === 'high'
|
|
);
|
|
log(`Files to update: ${updateFiles.length} (critical + high)`);
|
|
|
|
if (updateFiles.length === 0) {
|
|
log('Nothing critical/high to update. Exiting clean.');
|
|
bail('success', null, { files_planned: 0, files_committed: 0 });
|
|
}
|
|
|
|
// Cost estimate + budget check
|
|
const cost = estimateCost(counts, { authMode });
|
|
log(`Estimated cost: ${cost.usd === null ? '(quota; subscription)' : `$${cost.usd.toFixed(2)}`} ` +
|
|
`(${cost.tokens_input} in / ${cost.tokens_output} out)`);
|
|
if (cost.kvote_warn) {
|
|
log('NOTE: Subscription auth — quota-bound, no $-cap applied.');
|
|
}
|
|
if (authMode === 'api-key' && cost.usd !== null && cost.usd > ARGS.budgetUsd) {
|
|
log(`Cost $${cost.usd.toFixed(2)} exceeds budget $${ARGS.budgetUsd.toFixed(2)} — aborting.`);
|
|
bail('budget_exceeded',
|
|
`Estimated $${cost.usd.toFixed(2)} > budget $${ARGS.budgetUsd.toFixed(2)}`,
|
|
{
|
|
files_planned: updateFiles.length,
|
|
files_committed: 0,
|
|
tokens_input: cost.tokens_input,
|
|
tokens_output: cost.tokens_output,
|
|
total_cost_usd: cost.usd,
|
|
});
|
|
}
|
|
|
|
// Backup skills/
|
|
const backupRoot = getBackupDir(PLUGIN_ROOT);
|
|
log(`Backing up ${SKILLS_DIR} → ${backupRoot}/<ts>/...`);
|
|
backupHandle = backupDir(SKILLS_DIR, backupRoot, { retentionDays: KB_BACKUP_DAYS });
|
|
log(`Backup: ${backupHandle.backupPath}`);
|
|
|
|
// Build prompt
|
|
const fileList = updateFiles.map((f) => {
|
|
const urls = (f.changed_urls || []).slice(0, 5).join('\n ');
|
|
return `- ${f.path} [${f.priority}]\n Changed URLs:\n ${urls}`;
|
|
}).join('\n');
|
|
const yyyymm = new Date().toISOString().slice(0, 7);
|
|
const prompt = `Du er Cosmo Skyberg. Oppdater ${updateFiles.length} stale kunnskapsreferanser i ms-ai-architect pluginen.
|
|
|
|
Arbeidsmappe: ${PLUGIN_ROOT}
|
|
|
|
## Filer å oppdatere
|
|
|
|
${fileList}
|
|
|
|
## For HVER fil
|
|
|
|
1. Les filen med Read
|
|
2. Bruk microsoft_docs_fetch på de endrede kilde-URLene listet over
|
|
3. Bruk microsoft_docs_search for supplerende info
|
|
4. Oppdater filen med Edit:
|
|
- Oppdater "Last updated" til ${yyyymm}
|
|
- Oppdater utdaterte fakta, priser, datoer
|
|
- Bevar eksisterende struktur og seksjoner
|
|
- Marker oppdatert innhold med "Verified (MCP ${yyyymm})"
|
|
|
|
## Etter alle oppdateringer
|
|
|
|
1. Kjør: node scripts/kb-update/build-registry.mjs --merge
|
|
2. Kjør: node scripts/kb-update/report-changes.mjs
|
|
3. git add skills/ scripts/kb-update/data/
|
|
4. git commit -m "docs(architect): weekly KB update — ${updateFiles.length} files refreshed
|
|
|
|
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>"
|
|
|
|
## Regler
|
|
- Aldri slett filer, kun oppdater
|
|
- Bruk Edit, ikke Write
|
|
- Bevar all eksisterende struktur
|
|
- Commit én gang ved slutt — ikke per fil`;
|
|
|
|
// Spawn Claude (NEW flag stack)
|
|
const allowedTools = [
|
|
'Read', 'Edit', 'Write',
|
|
'Bash(git add:*)', 'Bash(git commit:*)', 'Bash(git push:*)',
|
|
'Bash(git status:*)', 'Bash(git diff:*)', 'Bash(git log:*)',
|
|
'mcp__microsoft-learn__microsoft_docs_search',
|
|
'mcp__microsoft-learn__microsoft_docs_fetch',
|
|
].join(',');
|
|
|
|
log(`Spawning Claude (model claude-sonnet-4-6, max-turns 200) with ${allowedTools.split(',').length} allowed tools...`);
|
|
const claudeResult = spawnSync(claudeBin, [
|
|
'-p', prompt,
|
|
'--permission-mode', 'dontAsk',
|
|
'--allowedTools', allowedTools,
|
|
'--max-turns', '200',
|
|
'--output-format', 'json',
|
|
'--model', 'claude-sonnet-4-6',
|
|
], {
|
|
cwd: PLUGIN_ROOT,
|
|
encoding: 'utf8',
|
|
timeout: 60 * 60 * 1000,
|
|
maxBuffer: 32 * 1024 * 1024,
|
|
});
|
|
|
|
// Parse output
|
|
let sessionMeta = {};
|
|
let maxTurnsHit = false;
|
|
if (claudeResult.stdout) {
|
|
try {
|
|
// --output-format json yields a single JSON object on stdout.
|
|
sessionMeta = JSON.parse(claudeResult.stdout);
|
|
const resultStr = String(sessionMeta.result ?? sessionMeta.stop_reason ?? '');
|
|
if (resultStr.includes('max_turns')) maxTurnsHit = true;
|
|
} catch (err) {
|
|
log(`WARN: could not parse Claude JSON output: ${err.message}`);
|
|
}
|
|
}
|
|
if (claudeResult.stderr) {
|
|
process.stderr.write(claudeResult.stderr);
|
|
}
|
|
|
|
// Post-run verification: count git commits since runStart
|
|
const runStartUnixSec = Math.floor(runStartTs / 1000);
|
|
let commitCount = 0;
|
|
try {
|
|
const log_out = execFileSync('git', ['log', `--since=@${runStartUnixSec}`, '--oneline'], {
|
|
cwd: PLUGIN_ROOT,
|
|
encoding: 'utf8',
|
|
stdio: ['ignore', 'pipe', 'pipe'],
|
|
});
|
|
commitCount = log_out.split('\n').filter((l) => l.trim().length > 0).length;
|
|
} catch (err) {
|
|
log(`WARN: git log post-run failed: ${err.message}`);
|
|
}
|
|
log(`Post-run: ${commitCount} commit(s) since runStart, planned ${updateFiles.length}.`);
|
|
|
|
// Branching
|
|
const claudeOk = claudeResult.status === 0;
|
|
let status = 'success';
|
|
let diagnostic = null;
|
|
|
|
if (!claudeOk) {
|
|
status = 'failure';
|
|
diagnostic = `claude exited ${claudeResult.status}` +
|
|
(claudeResult.signal ? ` (signal ${claudeResult.signal})` : '');
|
|
} else if (commitCount === 0 && updateFiles.length > 0) {
|
|
status = 'failure';
|
|
diagnostic = 'No commits produced despite expected files';
|
|
} else if (commitCount > 0 && commitCount < updateFiles.length && maxTurnsHit) {
|
|
status = 'partial';
|
|
diagnostic = `Hit max_turns: ${commitCount}/${updateFiles.length} files committed; rest will retry next week`;
|
|
} else if (commitCount > 0 && commitCount < updateFiles.length) {
|
|
// Partial without max_turns hit — treat as partial (Claude completed but
|
|
// some files weren't actionable). Conservative: don't roll back.
|
|
status = 'partial';
|
|
diagnostic = `Claude completed but only ${commitCount}/${updateFiles.length} files committed`;
|
|
} else {
|
|
status = 'success';
|
|
}
|
|
|
|
const totalCostUsd = typeof sessionMeta.total_cost_usd === 'number'
|
|
? sessionMeta.total_cost_usd
|
|
: null;
|
|
const sessionId = typeof sessionMeta.session_id === 'string'
|
|
? sessionMeta.session_id
|
|
: null;
|
|
const tokensIn = typeof sessionMeta?.usage?.input_tokens === 'number'
|
|
? sessionMeta.usage.input_tokens
|
|
: null;
|
|
const tokensOut = typeof sessionMeta?.usage?.output_tokens === 'number'
|
|
? sessionMeta.usage.output_tokens
|
|
: null;
|
|
|
|
const statusExtra = {
|
|
files_planned: updateFiles.length,
|
|
files_committed: commitCount,
|
|
session_id: sessionId,
|
|
total_cost_usd: totalCostUsd,
|
|
tokens_input: tokensIn,
|
|
tokens_output: tokensOut,
|
|
max_turns_hit: maxTurnsHit,
|
|
};
|
|
|
|
if (status === 'failure') {
|
|
bail('failure', diagnostic, statusExtra);
|
|
}
|
|
|
|
// success or partial: keep commits + optionally push
|
|
if (status === 'success' && autoPushEligible()) {
|
|
try {
|
|
execFileSync('git', ['push', 'origin', 'main'], {
|
|
cwd: PLUGIN_ROOT,
|
|
stdio: 'inherit',
|
|
});
|
|
log('Pushed origin/main.');
|
|
} catch (err) {
|
|
log(`WARN: git push failed (commits remain local): ${err.message}`);
|
|
}
|
|
}
|
|
|
|
// Cleanup old backups (best-effort, post-success)
|
|
try {
|
|
const cleanup = cleanupOldBackups(backupRoot, KB_BACKUP_DAYS);
|
|
if (cleanup.deleted.length > 0) {
|
|
log(`Cleaned up ${cleanup.deleted.length} old backup(s).`);
|
|
}
|
|
} catch (err) {
|
|
log(`WARN: cleanupOldBackups failed: ${err.message}`);
|
|
}
|
|
|
|
log(`=== Weekly KB Cron Done (${status}) ===`);
|
|
bail(status, diagnostic, statusExtra);
|
|
|
|
} catch (err) {
|
|
// Pre-flight or unexpected error before pipeline started.
|
|
const code = err && err.code ? err.code : 'EUNKNOWN';
|
|
log(`Pre-flight/error: [${code}] ${err.message}`);
|
|
if (err.stack) {
|
|
log(err.stack.split('\n').slice(1, 4).join('\n'));
|
|
}
|
|
bail('failure', `[${code}] ${err.message}`);
|
|
}
|
|
|
|
// ---------- Helpers ----------
|
|
|
|
function autoPushEligible() {
|
|
// Two gates: a configured user.email + a reachable origin.
|
|
try {
|
|
const email = execFileSync('git', ['config', '--get', 'user.email'], {
|
|
cwd: PLUGIN_ROOT,
|
|
encoding: 'utf8',
|
|
stdio: ['ignore', 'pipe', 'ignore'],
|
|
}).trim();
|
|
if (!email) return false;
|
|
} catch {
|
|
return false;
|
|
}
|
|
try {
|
|
execFileSync('git', ['ls-remote', 'origin', '--exit-code', 'HEAD'], {
|
|
cwd: PLUGIN_ROOT,
|
|
stdio: 'ignore',
|
|
timeout: 10_000,
|
|
});
|
|
return true;
|
|
} catch {
|
|
return false;
|
|
}
|
|
}
|