Harden git clone attack surface for remote scans with defense-in-depth: Layer 1 (all platforms): 8 git config flags disable hooks, symlinks, filter/smudge drivers, fsmonitor, local file protocol. 4 env vars isolate from system/user git config and block interactive prompts. Layer 2 (OS sandbox): macOS sandbox-exec and Linux bubblewrap (bwrap) restrict file writes to only the specific temp directory. bwrap probe-tests availability before use. Graceful fallback on Windows and Ubuntu 24.04+ (git config hardening only). Additional: post-clone 100MB size check, UUID-unique evidence filenames, evidence file cleanup, cleanup guarantee in scan/plugin-audit commands. 32 new tests (1147 total). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
406 lines
12 KiB
JavaScript
406 lines
12 KiB
JavaScript
#!/usr/bin/env node
|
|
// dashboard-aggregator.mjs — Cross-project security dashboard
|
|
// Discovers Claude Code projects, runs posture-scanner on each, aggregates results.
|
|
// Machine grade = weakest link (lowest grade across all projects).
|
|
//
|
|
// Standalone CLI: node scanners/dashboard-aggregator.mjs [--no-cache] [--max-depth N]
|
|
// Library: import { aggregate, discoverProjects } from './dashboard-aggregator.mjs'
|
|
//
|
|
// Cache: ~/.cache/llm-security/dashboard-latest.json (24h staleness by default)
|
|
// Zero external dependencies — Node.js builtins only.
|
|
|
|
import { readFile, writeFile, readdir, stat, mkdir, access } from 'node:fs/promises';
|
|
import { join, resolve, basename, relative } from 'node:path';
|
|
import { homedir } from 'node:os';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { scan } from './posture-scanner.mjs';
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Constants
|
|
// ---------------------------------------------------------------------------
|
|
|
|
const VERSION = '5.1.0';
|
|
|
|
/** Cache location */
|
|
const CACHE_DIR = join(homedir(), '.cache', 'llm-security');
|
|
const CACHE_FILE = join(CACHE_DIR, 'dashboard-latest.json');
|
|
|
|
/** Default staleness threshold (24 hours in ms) */
|
|
const STALENESS_MS = 24 * 60 * 60 * 1000;
|
|
|
|
/** Default max directory traversal depth from home */
|
|
const DEFAULT_MAX_DEPTH = 3;
|
|
|
|
/** Directories to skip during discovery */
|
|
const SKIP_DIRS = new Set([
|
|
'node_modules', '.git', '.hg', '.svn',
|
|
'__pycache__', '.pytest_cache', '.mypy_cache',
|
|
'dist', 'build', '.next', '.nuxt',
|
|
'.venv', 'venv', 'env',
|
|
'coverage', '.nyc_output',
|
|
'.angular', '.cache', '.Trash',
|
|
'Library', 'Applications', 'Pictures', 'Music', 'Movies', 'Downloads',
|
|
'Documents', 'Desktop', 'Public',
|
|
]);
|
|
|
|
/** Markers that indicate a Claude Code project */
|
|
const PROJECT_MARKERS = ['.claude', 'CLAUDE.md', '.claude-plugin'];
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Helpers
|
|
// ---------------------------------------------------------------------------
|
|
|
|
async function fileExists(filePath) {
|
|
try { await access(filePath); return true; }
|
|
catch { return false; }
|
|
}
|
|
|
|
async function readJson(filePath) {
|
|
try {
|
|
const raw = await readFile(filePath, 'utf-8');
|
|
return JSON.parse(raw);
|
|
} catch { return null; }
|
|
}
|
|
|
|
async function writeJson(filePath, data) {
|
|
await mkdir(join(filePath, '..'), { recursive: true });
|
|
await writeFile(filePath, JSON.stringify(data, null, 2) + '\n');
|
|
}
|
|
|
|
async function isDirectory(dirPath) {
|
|
try {
|
|
const s = await stat(dirPath);
|
|
return s.isDirectory();
|
|
} catch { return false; }
|
|
}
|
|
|
|
/**
|
|
* Derive a short display name for a project path.
|
|
* @param {string} absPath
|
|
* @returns {string}
|
|
*/
|
|
function projectDisplayName(absPath) {
|
|
const home = homedir();
|
|
if (absPath.startsWith(home)) {
|
|
return '~/' + relative(home, absPath);
|
|
}
|
|
return absPath;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Project Discovery
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/**
|
|
* Check if a directory is a Claude Code project (has any marker).
|
|
* @param {string} dirPath - Absolute path
|
|
* @returns {Promise<boolean>}
|
|
*/
|
|
async function isClaudeProject(dirPath) {
|
|
for (const marker of PROJECT_MARKERS) {
|
|
if (await fileExists(join(dirPath, marker))) return true;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
/**
|
|
* Recursively discover Claude Code projects under a root directory.
|
|
* @param {string} root - Absolute path to start searching
|
|
* @param {number} maxDepth - Max directory depth to traverse
|
|
* @param {number} [currentDepth=0]
|
|
* @returns {Promise<string[]>} - Array of absolute paths to project roots
|
|
*/
|
|
async function walkForProjects(root, maxDepth, currentDepth = 0) {
|
|
if (currentDepth > maxDepth) return [];
|
|
|
|
const projects = [];
|
|
|
|
// Check if this directory itself is a project
|
|
if (await isClaudeProject(root)) {
|
|
projects.push(root);
|
|
// Don't recurse into sub-dirs of a found project (avoid duplicates)
|
|
return projects;
|
|
}
|
|
|
|
// Recurse into children
|
|
let entries;
|
|
try {
|
|
entries = await readdir(root, { withFileTypes: true });
|
|
} catch {
|
|
return projects;
|
|
}
|
|
|
|
for (const entry of entries) {
|
|
if (!entry.isDirectory()) continue;
|
|
if (SKIP_DIRS.has(entry.name)) continue;
|
|
if (entry.name.startsWith('.') && entry.name !== '.claude') continue;
|
|
|
|
const childPath = join(root, entry.name);
|
|
const childProjects = await walkForProjects(childPath, maxDepth, currentDepth + 1);
|
|
projects.push(...childProjects);
|
|
}
|
|
|
|
return projects;
|
|
}
|
|
|
|
/**
|
|
* Discover plugins installed via ~/.claude/plugins/.
|
|
* Each marketplace/plugin-name/ directory is a potential project root,
|
|
* but also check individual plugins/ sub-dirs within marketplaces.
|
|
* @returns {Promise<string[]>}
|
|
*/
|
|
async function discoverPlugins() {
|
|
const pluginsRoot = join(homedir(), '.claude', 'plugins');
|
|
const projects = [];
|
|
|
|
if (!await isDirectory(pluginsRoot)) return projects;
|
|
|
|
// Check marketplaces
|
|
const marketplaces = await readdir(pluginsRoot, { withFileTypes: true }).catch(() => []);
|
|
for (const mp of marketplaces) {
|
|
if (!mp.isDirectory()) continue;
|
|
const mpPath = join(pluginsRoot, mp.name);
|
|
|
|
// Check if marketplace itself is a project
|
|
if (await isClaudeProject(mpPath)) {
|
|
projects.push(mpPath);
|
|
}
|
|
|
|
// Check plugins within marketplace (e.g., plugins/llm-security/)
|
|
const pluginsDirPath = join(mpPath, 'plugins');
|
|
if (await isDirectory(pluginsDirPath)) {
|
|
const plugins = await readdir(pluginsDirPath, { withFileTypes: true }).catch(() => []);
|
|
for (const plugin of plugins) {
|
|
if (!plugin.isDirectory()) continue;
|
|
const pluginPath = join(pluginsDirPath, plugin.name);
|
|
if (await isClaudeProject(pluginPath)) {
|
|
projects.push(pluginPath);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Check direct plugin dirs (non-marketplace structure)
|
|
const directPlugins = await readdir(mpPath, { withFileTypes: true }).catch(() => []);
|
|
for (const dp of directPlugins) {
|
|
if (!dp.isDirectory() || dp.name === 'plugins') continue;
|
|
const dpPath = join(mpPath, dp.name);
|
|
if (await isClaudeProject(dpPath) && !projects.includes(dpPath)) {
|
|
projects.push(dpPath);
|
|
}
|
|
}
|
|
}
|
|
|
|
return projects;
|
|
}
|
|
|
|
/**
|
|
* Discover all Claude Code projects.
|
|
* Searches ~/ (depth-limited) and ~/.claude/plugins/.
|
|
* @param {object} [opts]
|
|
* @param {number} [opts.maxDepth=3] - Max depth for home directory traversal
|
|
* @param {string[]} [opts.extraPaths] - Additional paths to check
|
|
* @returns {Promise<string[]>} - Deduplicated array of absolute project paths
|
|
*/
|
|
export async function discoverProjects(opts = {}) {
|
|
const maxDepth = opts.maxDepth ?? DEFAULT_MAX_DEPTH;
|
|
const extraPaths = opts.extraPaths || [];
|
|
|
|
const [homeProjects, pluginProjects] = await Promise.all([
|
|
walkForProjects(homedir(), maxDepth),
|
|
discoverPlugins(),
|
|
]);
|
|
|
|
// Check extra paths
|
|
const extraProjects = [];
|
|
for (const p of extraPaths) {
|
|
const abs = resolve(p);
|
|
if (await isClaudeProject(abs)) {
|
|
extraProjects.push(abs);
|
|
}
|
|
}
|
|
|
|
// Deduplicate by absolute path
|
|
const seen = new Set();
|
|
const all = [...homeProjects, ...pluginProjects, ...extraProjects];
|
|
const unique = [];
|
|
for (const p of all) {
|
|
const resolved = resolve(p);
|
|
if (!seen.has(resolved)) {
|
|
seen.add(resolved);
|
|
unique.push(resolved);
|
|
}
|
|
}
|
|
|
|
return unique.sort();
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Aggregation
|
|
// ---------------------------------------------------------------------------
|
|
|
|
/** Grade ordering for comparison (lower index = better) */
|
|
const GRADE_ORDER = ['A', 'B', 'C', 'D', 'F'];
|
|
|
|
/**
|
|
* Get the worse of two grades.
|
|
* @param {string} a
|
|
* @param {string} b
|
|
* @returns {string}
|
|
*/
|
|
function worseGrade(a, b) {
|
|
const ia = GRADE_ORDER.indexOf(a);
|
|
const ib = GRADE_ORDER.indexOf(b);
|
|
return ia >= ib ? a : b;
|
|
}
|
|
|
|
/**
|
|
* Find the worst category (lowest status) in a posture result.
|
|
* @param {object} postureResult - Result from posture-scanner scan()
|
|
* @returns {{ name: string, status: string } | null}
|
|
*/
|
|
function worstCategory(postureResult) {
|
|
const statusOrder = ['FAIL', 'PARTIAL', 'N_A', 'PASS'];
|
|
let worst = null;
|
|
let worstIdx = statusOrder.length;
|
|
|
|
for (const cat of postureResult.categories || []) {
|
|
const idx = statusOrder.indexOf(cat.status);
|
|
if (idx < worstIdx) {
|
|
worstIdx = idx;
|
|
worst = { name: cat.name, status: cat.status };
|
|
}
|
|
}
|
|
return worst;
|
|
}
|
|
|
|
/**
|
|
* Run posture-scanner on all discovered projects and aggregate results.
|
|
* @param {object} [opts]
|
|
* @param {number} [opts.maxDepth=3] - Max depth for home directory traversal
|
|
* @param {string[]} [opts.extraPaths] - Additional paths to check
|
|
* @param {boolean} [opts.useCache=true] - Use cached results if fresh
|
|
* @param {number} [opts.stalenessMs=86400000] - Cache staleness threshold
|
|
* @returns {Promise<object>} - Aggregated dashboard result
|
|
*/
|
|
export async function aggregate(opts = {}) {
|
|
const useCache = opts.useCache !== false;
|
|
const stalenessMs = opts.stalenessMs ?? STALENESS_MS;
|
|
|
|
// Check cache first
|
|
if (useCache) {
|
|
const cached = await readJson(CACHE_FILE);
|
|
if (cached && cached.meta?.timestamp) {
|
|
const age = Date.now() - new Date(cached.meta.timestamp).getTime();
|
|
if (age < stalenessMs) {
|
|
return { ...cached, meta: { ...cached.meta, from_cache: true } };
|
|
}
|
|
}
|
|
}
|
|
|
|
const startMs = Date.now();
|
|
|
|
// Discover projects
|
|
const projectPaths = await discoverProjects({
|
|
maxDepth: opts.maxDepth,
|
|
extraPaths: opts.extraPaths,
|
|
});
|
|
|
|
// Scan each project
|
|
const projectResults = [];
|
|
let machineGrade = 'A';
|
|
const errors = [];
|
|
|
|
for (const projectPath of projectPaths) {
|
|
try {
|
|
const result = await scan(projectPath);
|
|
const worst = worstCategory(result);
|
|
|
|
const entry = {
|
|
path: projectPath,
|
|
display_name: projectDisplayName(projectPath),
|
|
grade: result.scoring.grade,
|
|
pass_rate: result.scoring.pass_rate,
|
|
risk_score: result.risk.score,
|
|
risk_band: result.risk.band,
|
|
verdict: result.risk.verdict,
|
|
worst_category: worst ? worst.name : null,
|
|
worst_status: worst ? worst.status : null,
|
|
findings_count: result.findings.length,
|
|
counts: result.counts,
|
|
duration_ms: result.duration_ms,
|
|
};
|
|
projectResults.push(entry);
|
|
machineGrade = worseGrade(machineGrade, result.scoring.grade);
|
|
} catch (err) {
|
|
errors.push({
|
|
path: projectPath,
|
|
display_name: projectDisplayName(projectPath),
|
|
error: err.message,
|
|
});
|
|
}
|
|
}
|
|
|
|
// Aggregate counts
|
|
const aggCounts = { critical: 0, high: 0, medium: 0, low: 0, info: 0 };
|
|
for (const p of projectResults) {
|
|
for (const sev of Object.keys(aggCounts)) {
|
|
aggCounts[sev] += p.counts[sev] || 0;
|
|
}
|
|
}
|
|
|
|
const totalFindings = projectResults.reduce((sum, p) => sum + p.findings_count, 0);
|
|
const durationMs = Date.now() - startMs;
|
|
|
|
const result = {
|
|
meta: {
|
|
scanner: 'dashboard-aggregator',
|
|
version: VERSION,
|
|
timestamp: new Date().toISOString(),
|
|
duration_ms: durationMs,
|
|
from_cache: false,
|
|
},
|
|
machine: {
|
|
grade: machineGrade,
|
|
projects_scanned: projectResults.length,
|
|
projects_errored: errors.length,
|
|
total_findings: totalFindings,
|
|
counts: aggCounts,
|
|
},
|
|
projects: projectResults,
|
|
errors,
|
|
};
|
|
|
|
// Write cache
|
|
try {
|
|
await writeJson(CACHE_FILE, result);
|
|
} catch {
|
|
// Cache write failure is non-fatal
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// CLI entry point
|
|
// ---------------------------------------------------------------------------
|
|
|
|
const isMain = process.argv[1] && resolve(process.argv[1]) === resolve(fileURLToPath(import.meta.url));
|
|
|
|
if (isMain) {
|
|
const args = process.argv.slice(2);
|
|
const noCache = args.includes('--no-cache');
|
|
const maxDepthIdx = args.indexOf('--max-depth');
|
|
const maxDepth = maxDepthIdx >= 0 ? parseInt(args[maxDepthIdx + 1], 10) : DEFAULT_MAX_DEPTH;
|
|
|
|
try {
|
|
const result = await aggregate({
|
|
useCache: !noCache,
|
|
maxDepth,
|
|
});
|
|
process.stdout.write(JSON.stringify(result, null, 2) + '\n');
|
|
process.exit(result.machine.grade === 'F' ? 1 : 0);
|
|
} catch (err) {
|
|
process.stderr.write(`Error: ${err.message}\n`);
|
|
process.exit(2);
|
|
}
|
|
}
|