readActiveMcpServers now resolves tool count via:
1. In-config tools array
2. Cached tools/list at \$HOME/.claude/config-audit/mcp-cache/<name>.json
3. node_modules/<pkg>/package.json (resolved from npx <pkg>)
4. Fallback: { toolCount: null, toolCountUnknown: true }
estimateTokens uses detected toolCount (heavy server > light server).
New fixture: mcp-tool-heavy/ with mocked node_modules/mcp-heavy/package.json (20 tools).
576 → 580 tests, all green.
915 lines
34 KiB
JavaScript
915 lines
34 KiB
JavaScript
/**
|
|
* Active Config Reader — enumerates everything Claude Code actually loads for a repo.
|
|
* Read-only helper used by `scanners/whats-active.mjs` and the `whats-active` command.
|
|
*
|
|
* All functions are async and side-effect-free (no writes).
|
|
* Zero external dependencies.
|
|
*/
|
|
|
|
import { readFile, readdir, stat, realpath } from 'node:fs/promises';
|
|
import { join, resolve, dirname, basename, isAbsolute, sep } from 'node:path';
|
|
import { parseFrontmatter, parseJson, findImports } from './yaml-parser.mjs';
|
|
import { lineCount, normalizePath } from './string-utils.mjs';
|
|
import { discoverPlugins } from '../plugin-health-scanner.mjs';
|
|
|
|
const SCHEMA_VERSION = '1.0.0';
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Token estimation
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Estimate tokens for a given byte count and content kind.
|
|
* Deterministic heuristic — see feature plan §4 for rationale.
|
|
*
|
|
* MCP (v5 F2): an active MCP server consumes a base overhead of ~500 tokens
|
|
* for protocol metadata + tool schemas, even before any tool is described.
|
|
* When tool count is known (Step 14 wires this up), we estimate ~200 tokens
|
|
* per tool description.
|
|
*
|
|
* @param {number} bytes - Byte count (or item count for kind='item')
|
|
* @param {'markdown'|'frontmatter'|'json'|'item'|'mcp'} kind
|
|
* @param {{toolCount?: number}} [opts] - kind-specific options (mcp: toolCount)
|
|
* @returns {number} Integer token count (rounded up)
|
|
*/
|
|
export function estimateTokens(bytes, kind = 'markdown', opts = {}) {
|
|
if (kind === 'item') return 15;
|
|
if (kind === 'mcp') {
|
|
const base = 500;
|
|
const perTool = 200;
|
|
const toolCount = typeof opts.toolCount === 'number' && opts.toolCount > 0 ? opts.toolCount : 0;
|
|
const safeBytes = typeof bytes === 'number' && bytes > 0 && Number.isFinite(bytes) ? bytes : 0;
|
|
const fromBytes = Math.ceil(safeBytes / 3.5);
|
|
const fromTools = base + toolCount * perTool;
|
|
return Math.max(base, fromBytes, fromTools);
|
|
}
|
|
if (typeof bytes !== 'number' || bytes < 0 || !Number.isFinite(bytes)) return 0;
|
|
if (kind === 'frontmatter') {
|
|
const capped = Math.min(bytes, 600);
|
|
return Math.ceil(capped / 4);
|
|
}
|
|
if (kind === 'json') return Math.ceil(bytes / 3.5);
|
|
// default: markdown
|
|
return Math.ceil(bytes / 4);
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Git root detection
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Walk up from startPath looking for a .git directory (or .git file for worktrees).
|
|
* @param {string} startPath
|
|
* @returns {Promise<string | null>} absolute path to git root, or null if none
|
|
*/
|
|
export async function detectGitRoot(startPath) {
|
|
let current = resolve(startPath);
|
|
const root = resolve('/');
|
|
while (current !== root) {
|
|
try {
|
|
await stat(join(current, '.git'));
|
|
return current;
|
|
} catch { /* not here */ }
|
|
const parent = dirname(current);
|
|
if (parent === current) break;
|
|
current = parent;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// CLAUDE.md cascade
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Enumerate all CLAUDE.md files that load for a given repo path, in load order:
|
|
* managed → user (~/.claude/CLAUDE.md) → ancestor CLAUDE.md (walking up to $HOME) →
|
|
* repo CLAUDE.md → @imports (recursive, deduped).
|
|
*
|
|
* Each file in the result includes absolute path, scope, bytes, lines, and parent.
|
|
* Imports are marked with scope='import' and `parent` is the absolute path of the
|
|
* file that imported them.
|
|
*
|
|
* @param {string} repoPath
|
|
* @returns {Promise<{ files: Array<{path:string, scope:string, bytes:number, lines:number, parent:string|null}>, totalBytes:number, totalLines:number, estimatedTokens:number }>}
|
|
*/
|
|
export async function walkClaudeMdCascade(repoPath) {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
const absRepoPath = resolve(repoPath);
|
|
const files = [];
|
|
const seen = new Set();
|
|
|
|
// Managed locations (platform-dependent, best effort)
|
|
const managedCandidates = [
|
|
'/Library/Application Support/ClaudeCode/CLAUDE.md',
|
|
'/etc/claude-code/CLAUDE.md',
|
|
];
|
|
for (const p of managedCandidates) {
|
|
await tryAddClaudeMd(p, 'managed', null, files, seen);
|
|
}
|
|
|
|
// User: ~/.claude/CLAUDE.md
|
|
if (home) {
|
|
await tryAddClaudeMd(join(home, '.claude', 'CLAUDE.md'), 'user', null, files, seen);
|
|
}
|
|
|
|
// Ancestors between $HOME and repoPath (exclusive of $HOME, inclusive of repoPath)
|
|
const ancestorChain = buildAncestorChain(absRepoPath, home);
|
|
for (const ancestor of ancestorChain) {
|
|
const candidate = join(ancestor, 'CLAUDE.md');
|
|
const scope = ancestor === absRepoPath ? 'project' : 'project';
|
|
await tryAddClaudeMd(candidate, scope, null, files, seen);
|
|
// Also project-local variant
|
|
if (ancestor === absRepoPath) {
|
|
await tryAddClaudeMd(join(ancestor, 'CLAUDE.local.md'), 'local', null, files, seen);
|
|
}
|
|
}
|
|
|
|
// Recursively resolve @imports from all files found so far
|
|
const queue = files.slice();
|
|
while (queue.length > 0) {
|
|
const parent = queue.shift();
|
|
let content;
|
|
try {
|
|
content = await readFile(parent.path, 'utf-8');
|
|
} catch { continue; }
|
|
const imports = findImports(content);
|
|
for (const imp of imports) {
|
|
const resolved = resolveImportPath(imp.path, parent.path, home);
|
|
if (!resolved || seen.has(resolved)) continue;
|
|
const added = await tryAddClaudeMd(resolved, 'import', parent.path, files, seen);
|
|
if (added) queue.push(added);
|
|
}
|
|
}
|
|
|
|
const totalBytes = files.reduce((sum, f) => sum + f.bytes, 0);
|
|
const totalLines = files.reduce((sum, f) => sum + f.lines, 0);
|
|
const estimatedTokens = estimateTokens(totalBytes, 'markdown');
|
|
|
|
return { files, totalBytes, totalLines, estimatedTokens };
|
|
}
|
|
|
|
async function tryAddClaudeMd(absPath, scope, parent, files, seen) {
|
|
if (seen.has(absPath)) return null;
|
|
try {
|
|
const s = await stat(absPath);
|
|
if (!s.isFile()) return null;
|
|
const content = await readFile(absPath, 'utf-8');
|
|
const entry = {
|
|
path: absPath,
|
|
scope,
|
|
bytes: s.size,
|
|
lines: lineCount(content),
|
|
parent,
|
|
};
|
|
files.push(entry);
|
|
seen.add(absPath);
|
|
return entry;
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function buildAncestorChain(absRepoPath, home) {
|
|
const chain = [];
|
|
let current = absRepoPath;
|
|
const normalizedHome = home ? resolve(home) : null;
|
|
const fsRoot = resolve('/');
|
|
while (current !== fsRoot) {
|
|
if (normalizedHome && current === normalizedHome) break;
|
|
chain.push(current);
|
|
const parent = dirname(current);
|
|
if (parent === current) break;
|
|
current = parent;
|
|
}
|
|
// Load order: outer → inner (so we reverse the walked-up chain)
|
|
return chain.reverse();
|
|
}
|
|
|
|
function resolveImportPath(importPath, fromFile, home) {
|
|
let p = importPath.trim();
|
|
if (!p) return null;
|
|
if (p.startsWith('~/')) p = join(home, p.slice(2));
|
|
else if (p.startsWith('~')) p = join(home, p.slice(1));
|
|
if (!isAbsolute(p)) p = resolve(dirname(fromFile), p);
|
|
return p;
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// .claude.json project slice
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Read ~/.claude.json and return the best-matching projects slice for repoPath.
|
|
* Uses longest-prefix matching — if two keys match, the deeper one wins.
|
|
* Paths are normalized (trailing slashes stripped) before comparison.
|
|
*
|
|
* @param {string} repoPath
|
|
* @returns {Promise<{ projectKey: string|null, mcpServers: object, enabledMcpjsonServers: string[], disabledMcpjsonServers: string[], enabledPlugins: object, raw: object|null }>}
|
|
*/
|
|
export async function readClaudeJsonProjectSlice(repoPath) {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
const claudeJsonPath = join(home, '.claude.json');
|
|
const empty = {
|
|
projectKey: null,
|
|
mcpServers: {},
|
|
enabledMcpjsonServers: [],
|
|
disabledMcpjsonServers: [],
|
|
enabledPlugins: {},
|
|
raw: null,
|
|
};
|
|
|
|
let content;
|
|
try {
|
|
const s = await stat(claudeJsonPath);
|
|
// Safety: skip pathologically large files (>10MB)
|
|
if (s.size > 10 * 1024 * 1024) return empty;
|
|
content = await readFile(claudeJsonPath, 'utf-8');
|
|
} catch {
|
|
return empty;
|
|
}
|
|
|
|
const parsed = parseJson(content);
|
|
if (!parsed) return empty;
|
|
|
|
const target = normalizePath(resolve(repoPath));
|
|
const projects = parsed.projects || {};
|
|
const keys = Object.keys(projects);
|
|
|
|
// Exact match first, then longest prefix (with path-boundary check)
|
|
let best = null;
|
|
let bestLen = -1;
|
|
for (const key of keys) {
|
|
const normKey = normalizePath(key);
|
|
if (normKey === target) { best = key; bestLen = normKey.length; break; }
|
|
// ancestor prefix: target must start with key followed by sep
|
|
if (target === normKey || target.startsWith(normKey + sep)) {
|
|
if (normKey.length > bestLen) {
|
|
best = key;
|
|
bestLen = normKey.length;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (!best) return { ...empty, raw: parsed };
|
|
|
|
const slice = projects[best] || {};
|
|
return {
|
|
projectKey: best,
|
|
mcpServers: slice.mcpServers || {},
|
|
enabledMcpjsonServers: Array.isArray(slice.enabledMcpjsonServers) ? slice.enabledMcpjsonServers : [],
|
|
disabledMcpjsonServers: Array.isArray(slice.disabledMcpjsonServers) ? slice.disabledMcpjsonServers : [],
|
|
enabledPlugins: slice.enabledPlugins || {},
|
|
raw: parsed,
|
|
};
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Plugin enumeration
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Enumerate all plugins installed under ~/.claude/plugins/marketplaces.
|
|
* For each plugin: counts commands, agents, skills, hooks, rules; reads version from plugin.json.
|
|
*
|
|
* @returns {Promise<Array<{name:string, path:string, version:string|null, commands:number, agents:number, skills:number, hooks:number, rules:number, totalBytes:number, estimatedTokens:number}>>}
|
|
*/
|
|
export async function enumeratePlugins() {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
if (!home) return [];
|
|
|
|
const marketplacesRoot = join(home, '.claude', 'plugins', 'marketplaces');
|
|
const pluginRoots = await discoverAllPluginsUnder(marketplacesRoot);
|
|
|
|
// Dedupe via realpath (symlinks are common)
|
|
const seen = new Set();
|
|
const results = [];
|
|
for (const root of pluginRoots) {
|
|
let canonical = root;
|
|
try { canonical = await realpath(root); } catch { /* ignore */ }
|
|
if (seen.has(canonical)) continue;
|
|
seen.add(canonical);
|
|
|
|
const info = await countPluginItems(root);
|
|
let version = null;
|
|
let name = basename(root);
|
|
try {
|
|
const pluginJson = await readFile(join(root, '.claude-plugin', 'plugin.json'), 'utf-8');
|
|
const parsed = parseJson(pluginJson);
|
|
if (parsed) {
|
|
version = parsed.version || null;
|
|
if (parsed.name) name = parsed.name;
|
|
}
|
|
} catch { /* no plugin.json */ }
|
|
|
|
results.push({
|
|
name,
|
|
path: root,
|
|
version,
|
|
commands: info.commands,
|
|
agents: info.agents,
|
|
skills: info.skills,
|
|
hooks: info.hooks,
|
|
rules: info.rules,
|
|
totalBytes: info.totalBytes,
|
|
estimatedTokens: info.estimatedTokens,
|
|
});
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
async function discoverAllPluginsUnder(marketplacesRoot) {
|
|
const results = [];
|
|
let marketplaces;
|
|
try {
|
|
marketplaces = await readdir(marketplacesRoot, { withFileTypes: true });
|
|
} catch {
|
|
return results;
|
|
}
|
|
for (const m of marketplaces) {
|
|
if (!m.isDirectory()) continue;
|
|
const mpDir = join(marketplacesRoot, m.name);
|
|
// A marketplace has either a `plugins/` dir or plugins directly
|
|
const pluginsDir = join(mpDir, 'plugins');
|
|
const found = await discoverPlugins(pluginsDir).catch(() => []);
|
|
if (found.length > 0) {
|
|
results.push(...found);
|
|
} else {
|
|
// Fallback: treat marketplace itself as plugin root to scan
|
|
const alt = await discoverPlugins(mpDir).catch(() => []);
|
|
results.push(...alt);
|
|
}
|
|
}
|
|
return results;
|
|
}
|
|
|
|
async function countPluginItems(pluginRoot) {
|
|
const counts = { commands: 0, agents: 0, skills: 0, hooks: 0, rules: 0, totalBytes: 0, estimatedTokens: 0 };
|
|
|
|
// Commands (frontmatter — only small portion loaded at startup)
|
|
const commandsDir = join(pluginRoot, 'commands');
|
|
const commandFiles = await listMarkdownFiles(commandsDir);
|
|
counts.commands = commandFiles.length;
|
|
for (const f of commandFiles) {
|
|
counts.totalBytes += f.size;
|
|
counts.estimatedTokens += estimateTokens(f.size, 'frontmatter');
|
|
}
|
|
|
|
// Agents (frontmatter similarly)
|
|
const agentsDir = join(pluginRoot, 'agents');
|
|
const agentFiles = await listMarkdownFiles(agentsDir);
|
|
counts.agents = agentFiles.length;
|
|
for (const f of agentFiles) {
|
|
counts.totalBytes += f.size;
|
|
counts.estimatedTokens += estimateTokens(f.size, 'frontmatter');
|
|
}
|
|
|
|
// Skills (SKILL.md bodies)
|
|
const skillsDir = join(pluginRoot, 'skills');
|
|
const skillFiles = await findSkillMdFiles(skillsDir);
|
|
counts.skills = skillFiles.length;
|
|
for (const f of skillFiles) {
|
|
counts.totalBytes += f.size;
|
|
counts.estimatedTokens += estimateTokens(f.size, 'markdown');
|
|
}
|
|
|
|
// Hooks (hooks.json — count entries)
|
|
const hooksJsonPath = join(pluginRoot, 'hooks', 'hooks.json');
|
|
try {
|
|
const s = await stat(hooksJsonPath);
|
|
const content = await readFile(hooksJsonPath, 'utf-8');
|
|
const parsed = parseJson(content);
|
|
if (parsed && parsed.hooks && typeof parsed.hooks === 'object') {
|
|
for (const event of Object.keys(parsed.hooks)) {
|
|
const arr = parsed.hooks[event];
|
|
if (Array.isArray(arr)) {
|
|
for (const entry of arr) {
|
|
if (entry && Array.isArray(entry.hooks)) {
|
|
counts.hooks += entry.hooks.length;
|
|
} else {
|
|
counts.hooks += 1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
counts.totalBytes += s.size;
|
|
counts.estimatedTokens += estimateTokens(s.size, 'json');
|
|
} catch { /* no hooks */ }
|
|
|
|
// Rules
|
|
const rulesDir = join(pluginRoot, 'rules');
|
|
const altRulesDir = join(pluginRoot, '.claude', 'rules');
|
|
for (const d of [rulesDir, altRulesDir]) {
|
|
const rules = await listMarkdownFiles(d);
|
|
counts.rules += rules.length;
|
|
for (const f of rules) {
|
|
counts.totalBytes += f.size;
|
|
counts.estimatedTokens += estimateTokens(f.size, 'markdown');
|
|
}
|
|
}
|
|
|
|
return counts;
|
|
}
|
|
|
|
async function listMarkdownFiles(dir) {
|
|
const out = [];
|
|
let entries;
|
|
try { entries = await readdir(dir, { withFileTypes: true }); } catch { return out; }
|
|
for (const e of entries) {
|
|
if (!e.isFile()) continue;
|
|
if (!e.name.endsWith('.md')) continue;
|
|
const full = join(dir, e.name);
|
|
try {
|
|
const s = await stat(full);
|
|
out.push({ path: full, size: s.size });
|
|
} catch { /* skip */ }
|
|
}
|
|
return out;
|
|
}
|
|
|
|
async function findSkillMdFiles(dir) {
|
|
const out = [];
|
|
async function walk(d, depth) {
|
|
if (depth > 3) return;
|
|
let entries;
|
|
try { entries = await readdir(d, { withFileTypes: true }); } catch { return; }
|
|
for (const e of entries) {
|
|
const full = join(d, e.name);
|
|
if (e.isDirectory()) {
|
|
await walk(full, depth + 1);
|
|
} else if (e.isFile() && /^SKILL\.md$/i.test(e.name)) {
|
|
try {
|
|
const s = await stat(full);
|
|
out.push({ path: full, size: s.size });
|
|
} catch { /* skip */ }
|
|
}
|
|
}
|
|
}
|
|
await walk(dir, 0);
|
|
return out;
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Skills (user + plugin)
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Enumerate SKILL.md files available to Claude Code: user skills under ~/.claude/skills
|
|
* plus all skills discovered via enumeratePlugins results.
|
|
*
|
|
* @param {Array<{name:string, path:string}>} pluginList
|
|
* @returns {Promise<Array<{name:string, source:'user'|'plugin', pluginName:string|null, path:string, bytes:number, estimatedTokens:number}>>}
|
|
*/
|
|
export async function enumerateSkills(pluginList = []) {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
const out = [];
|
|
|
|
if (home) {
|
|
const userSkillsDir = join(home, '.claude', 'skills');
|
|
const userSkills = await findSkillMdFiles(userSkillsDir);
|
|
for (const f of userSkills) {
|
|
out.push({
|
|
name: basename(dirname(f.path)),
|
|
source: 'user',
|
|
pluginName: null,
|
|
path: f.path,
|
|
bytes: f.size,
|
|
estimatedTokens: estimateTokens(f.size, 'markdown'),
|
|
});
|
|
}
|
|
}
|
|
|
|
for (const p of pluginList) {
|
|
const skillsDir = join(p.path, 'skills');
|
|
const skills = await findSkillMdFiles(skillsDir);
|
|
for (const f of skills) {
|
|
out.push({
|
|
name: basename(dirname(f.path)),
|
|
source: 'plugin',
|
|
pluginName: p.name,
|
|
path: f.path,
|
|
bytes: f.size,
|
|
estimatedTokens: estimateTokens(f.size, 'markdown'),
|
|
});
|
|
}
|
|
}
|
|
|
|
return out;
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Hooks (user + project + plugin)
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Read active hooks from user settings, project settings, and plugin hooks.json files.
|
|
* Does NOT dedupe — a hook loaded from two scopes is reported twice (different source).
|
|
*
|
|
* @param {string} repoPath
|
|
* @param {Array<{name:string, path:string}>} [pluginList]
|
|
* @returns {Promise<Array<{event:string, matcher:string|null, command:string, source:string, sourcePath:string, estimatedTokens:number}>>}
|
|
*/
|
|
export async function readActiveHooks(repoPath, pluginList = []) {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
const out = [];
|
|
|
|
// User settings
|
|
if (home) {
|
|
const userSettings = join(home, '.claude', 'settings.json');
|
|
await collectHooksFromSettings(userSettings, 'user', out);
|
|
}
|
|
|
|
// Project settings
|
|
const projSettings = join(repoPath, '.claude', 'settings.json');
|
|
const projLocal = join(repoPath, '.claude', 'settings.local.json');
|
|
await collectHooksFromSettings(projSettings, 'project', out);
|
|
await collectHooksFromSettings(projLocal, 'local', out);
|
|
|
|
// Plugin hooks.json
|
|
for (const p of pluginList) {
|
|
const hooksJson = join(p.path, 'hooks', 'hooks.json');
|
|
await collectHooksFromHooksJson(hooksJson, `plugin:${p.name}`, out);
|
|
}
|
|
|
|
return out;
|
|
}
|
|
|
|
async function collectHooksFromSettings(settingsPath, source, out) {
|
|
let content;
|
|
try { content = await readFile(settingsPath, 'utf-8'); } catch { return; }
|
|
const parsed = parseJson(content);
|
|
if (!parsed || !parsed.hooks || typeof parsed.hooks !== 'object') return;
|
|
collectHookEntries(parsed.hooks, source, settingsPath, out);
|
|
}
|
|
|
|
async function collectHooksFromHooksJson(hooksPath, source, out) {
|
|
let content;
|
|
try { content = await readFile(hooksPath, 'utf-8'); } catch { return; }
|
|
const parsed = parseJson(content);
|
|
if (!parsed || !parsed.hooks || typeof parsed.hooks !== 'object') return;
|
|
collectHookEntries(parsed.hooks, source, hooksPath, out);
|
|
}
|
|
|
|
function collectHookEntries(hooksObj, source, sourcePath, out) {
|
|
for (const event of Object.keys(hooksObj)) {
|
|
const arr = hooksObj[event];
|
|
if (!Array.isArray(arr)) continue;
|
|
for (const entry of arr) {
|
|
if (!entry) continue;
|
|
const matcher = entry.matcher || null;
|
|
const inner = Array.isArray(entry.hooks) ? entry.hooks : [entry];
|
|
for (const h of inner) {
|
|
if (!h) continue;
|
|
out.push({
|
|
event,
|
|
matcher,
|
|
command: h.command || h.script || '',
|
|
source,
|
|
sourcePath,
|
|
estimatedTokens: estimateTokens(0, 'item'),
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// MCP servers (project .mcp.json + ~/.claude.json + plugin)
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Enumerate active MCP servers from project .mcp.json, ~/.claude.json project slice, and plugin .mcp.json.
|
|
* Honors disabledMcpjsonServers / disabledMcpServers lists.
|
|
*
|
|
* @param {string} repoPath
|
|
* @param {object} [claudeJsonSlice] - result of readClaudeJsonProjectSlice
|
|
* @param {Array<{name:string, path:string}>} [pluginList]
|
|
* @returns {Promise<Array<{name:string, source:string, command:string, enabled:boolean, disabledBy:string|null, estimatedTokens:number}>>}
|
|
*/
|
|
export async function readActiveMcpServers(repoPath, claudeJsonSlice = null, pluginList = []) {
|
|
const out = [];
|
|
const slice = claudeJsonSlice || await readClaudeJsonProjectSlice(repoPath);
|
|
const disabled = new Set(slice.disabledMcpjsonServers || []);
|
|
|
|
// Project .mcp.json
|
|
const projMcp = join(repoPath, '.mcp.json');
|
|
await collectMcpFromFile(projMcp, '.mcp.json', disabled, out, repoPath);
|
|
|
|
// ~/.claude.json project slice
|
|
for (const [name, def] of Object.entries(slice.mcpServers || {})) {
|
|
const detected = await detectMcpToolCount(name, def, repoPath);
|
|
const toolCount = detected.toolCount;
|
|
out.push({
|
|
name,
|
|
source: '~/.claude.json:projects',
|
|
command: describeMcpCommand(def),
|
|
enabled: !disabled.has(name),
|
|
disabledBy: disabled.has(name) ? 'disabledMcpjsonServers' : null,
|
|
toolCount,
|
|
toolCountUnknown: detected.toolCountUnknown,
|
|
estimatedTokens: estimateTokens(0, 'mcp', { toolCount: toolCount ?? 0 }),
|
|
});
|
|
}
|
|
|
|
// Plugin .mcp.json files
|
|
for (const p of pluginList) {
|
|
const pluginMcp = join(p.path, '.mcp.json');
|
|
await collectMcpFromFile(pluginMcp, `plugin:${p.name}`, disabled, out, repoPath);
|
|
}
|
|
|
|
return out;
|
|
}
|
|
|
|
async function collectMcpFromFile(path, source, disabled, out, repoPath) {
|
|
let content;
|
|
try { content = await readFile(path, 'utf-8'); } catch { return; }
|
|
const parsed = parseJson(content);
|
|
if (!parsed || !parsed.mcpServers || typeof parsed.mcpServers !== 'object') return;
|
|
for (const [name, def] of Object.entries(parsed.mcpServers)) {
|
|
const detected = await detectMcpToolCount(name, def, repoPath);
|
|
const toolCount = detected.toolCount;
|
|
out.push({
|
|
name,
|
|
source,
|
|
command: describeMcpCommand(def),
|
|
enabled: !disabled.has(name),
|
|
disabledBy: disabled.has(name) ? 'disabledMcpjsonServers' : null,
|
|
toolCount,
|
|
toolCountUnknown: detected.toolCountUnknown,
|
|
estimatedTokens: estimateTokens(0, 'mcp', { toolCount: toolCount ?? 0 }),
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Detect tool count for an MCP server in this priority order (v5 M1):
|
|
* 1. Explicit `tools` array on the server definition (legacy in-config form)
|
|
* 2. Cached `tools/list` response at $HOME/.claude/config-audit/mcp-cache/<name>.json
|
|
* 3. `tools` array in the npm package's package.json (resolved from
|
|
* <repoPath>/node_modules/<pkg>/package.json when the command is `npx <pkg>`)
|
|
* 4. Fallback: { toolCount: null, toolCountUnknown: true }
|
|
*
|
|
* @param {string} name
|
|
* @param {object} def
|
|
* @param {string} repoPath
|
|
* @returns {Promise<{toolCount: number|null, toolCountUnknown: boolean}>}
|
|
*/
|
|
async function detectMcpToolCount(name, def, repoPath) {
|
|
// 1. In-config tools array
|
|
if (Array.isArray(def?.tools)) {
|
|
return { toolCount: def.tools.length, toolCountUnknown: false };
|
|
}
|
|
|
|
// 2. Cached tools/list response
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
if (home) {
|
|
const cachePath = join(home, '.claude', 'config-audit', 'mcp-cache', `${name}.json`);
|
|
try {
|
|
const cacheContent = await readFile(cachePath, 'utf-8');
|
|
const parsedCache = parseJson(cacheContent);
|
|
if (parsedCache && Array.isArray(parsedCache.tools)) {
|
|
return { toolCount: parsedCache.tools.length, toolCountUnknown: false };
|
|
}
|
|
} catch { /* cache miss */ }
|
|
}
|
|
|
|
// 3. node_modules package.json
|
|
const pkgName = extractNpmPackageName(def);
|
|
if (pkgName) {
|
|
const pkgPath = join(repoPath, 'node_modules', pkgName, 'package.json');
|
|
try {
|
|
const pkgContent = await readFile(pkgPath, 'utf-8');
|
|
const parsedPkg = parseJson(pkgContent);
|
|
if (parsedPkg && Array.isArray(parsedPkg.tools)) {
|
|
return { toolCount: parsedPkg.tools.length, toolCountUnknown: false };
|
|
}
|
|
} catch { /* not installed */ }
|
|
}
|
|
|
|
// 4. Unknown
|
|
return { toolCount: null, toolCountUnknown: true };
|
|
}
|
|
|
|
/**
|
|
* Extract npm package name from an MCP server definition launched via npx.
|
|
* Skips npx flags (`-y`, `--yes`, `--package=...`); returns the first arg
|
|
* that looks like a package name.
|
|
*/
|
|
function extractNpmPackageName(def) {
|
|
if (!def || typeof def !== 'object') return null;
|
|
if (def.command !== 'npx' || !Array.isArray(def.args)) return null;
|
|
for (const a of def.args) {
|
|
if (typeof a !== 'string') continue;
|
|
if (a.startsWith('-')) continue;
|
|
return a;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
function describeMcpCommand(def) {
|
|
if (!def || typeof def !== 'object') return '';
|
|
if (def.type === 'http' || def.type === 'sse') return def.url || '';
|
|
if (def.command) {
|
|
const args = Array.isArray(def.args) ? def.args.join(' ') : '';
|
|
return args ? `${def.command} ${args}` : def.command;
|
|
}
|
|
return '';
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Settings cascade
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
async function readSettingsCascade(repoPath) {
|
|
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
const entries = [
|
|
{ scope: 'user', path: home ? join(home, '.claude', 'settings.json') : null },
|
|
{ scope: 'project', path: join(repoPath, '.claude', 'settings.json') },
|
|
{ scope: 'local', path: join(repoPath, '.claude', 'settings.local.json') },
|
|
];
|
|
const cascade = [];
|
|
for (const e of entries) {
|
|
if (!e.path) continue;
|
|
let exists = false;
|
|
let keyCount = 0;
|
|
try {
|
|
const content = await readFile(e.path, 'utf-8');
|
|
exists = true;
|
|
const parsed = parseJson(content);
|
|
if (parsed && typeof parsed === 'object') {
|
|
keyCount = Object.keys(parsed).length;
|
|
}
|
|
} catch { /* missing */ }
|
|
cascade.push({ scope: e.scope, path: e.path, exists, keyCount });
|
|
}
|
|
return cascade;
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// Suggest disables (deterministic signals)
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
function buildSuggestDisables({ plugins, skills, mcpServers, claudeMdBodies }) {
|
|
const candidates = [];
|
|
|
|
// 1. Already disabled MCP servers
|
|
for (const m of mcpServers) {
|
|
if (!m.enabled) {
|
|
candidates.push({
|
|
kind: 'mcp',
|
|
name: m.name,
|
|
reason: `already disabled via ${m.disabledBy || 'config'}`,
|
|
confidence: 'high',
|
|
});
|
|
}
|
|
}
|
|
|
|
// 2. Plugin with zero items
|
|
for (const p of plugins) {
|
|
const total = p.commands + p.agents + p.skills + p.hooks;
|
|
if (total === 0) {
|
|
candidates.push({
|
|
kind: 'plugin',
|
|
name: p.name,
|
|
reason: 'plugin contains no commands, agents, skills, or hooks',
|
|
confidence: 'high',
|
|
});
|
|
}
|
|
}
|
|
|
|
// 3. Plugin unreferenced in CLAUDE.md cascade
|
|
const corpus = claudeMdBodies.join('\n').toLowerCase();
|
|
for (const p of plugins) {
|
|
if (p.commands + p.agents + p.skills + p.hooks === 0) continue;
|
|
if (!corpus.includes(p.name.toLowerCase())) {
|
|
candidates.push({
|
|
kind: 'plugin',
|
|
name: p.name,
|
|
reason: 'plugin name not mentioned in any CLAUDE.md in the cascade',
|
|
confidence: 'medium',
|
|
});
|
|
}
|
|
}
|
|
|
|
// 4. Skill from plugin whose plugin is missing
|
|
const pluginNames = new Set(plugins.map(p => p.name));
|
|
for (const s of skills) {
|
|
if (s.source === 'plugin' && s.pluginName && !pluginNames.has(s.pluginName)) {
|
|
candidates.push({
|
|
kind: 'skill',
|
|
name: s.name,
|
|
reason: `skill references plugin "${s.pluginName}" which is not installed`,
|
|
confidence: 'high',
|
|
});
|
|
}
|
|
}
|
|
|
|
return { candidates };
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// One-shot readActiveConfig
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
/**
|
|
* Produce a full ActiveConfig snapshot for repoPath.
|
|
* Runs component enumerators in parallel where possible. Targets <2s wall-clock.
|
|
*
|
|
* @param {string} repoPath
|
|
* @param {object} [opts]
|
|
* @param {boolean} [opts.verbose=false]
|
|
* @param {boolean} [opts.suggestDisables=false]
|
|
* @returns {Promise<object>} see feature plan §3 for shape
|
|
*/
|
|
export async function readActiveConfig(repoPath, opts = {}) {
|
|
const start = Date.now();
|
|
const absRepoPath = resolve(repoPath);
|
|
|
|
const [
|
|
gitRoot,
|
|
claudeMd,
|
|
claudeJsonSlice,
|
|
plugins,
|
|
settingsCascade,
|
|
] = await Promise.all([
|
|
detectGitRoot(absRepoPath),
|
|
walkClaudeMdCascade(absRepoPath),
|
|
readClaudeJsonProjectSlice(absRepoPath),
|
|
enumeratePlugins(),
|
|
readSettingsCascade(absRepoPath),
|
|
]);
|
|
|
|
// Skills depend on plugins
|
|
const [skills, hooks, mcpServers] = await Promise.all([
|
|
enumerateSkills(plugins),
|
|
readActiveHooks(absRepoPath, plugins),
|
|
readActiveMcpServers(absRepoPath, claudeJsonSlice, plugins),
|
|
]);
|
|
|
|
// Totals
|
|
const totals = {
|
|
plugins: plugins.length,
|
|
skills: skills.length,
|
|
mcpServers: mcpServers.length,
|
|
hooks: hooks.length,
|
|
claudeMdFiles: claudeMd.files.length,
|
|
estimatedTokens: {
|
|
claudeMd: claudeMd.estimatedTokens,
|
|
plugins: plugins.reduce((s, p) => s + p.estimatedTokens, 0),
|
|
skills: skills.reduce((s, k) => s + k.estimatedTokens, 0),
|
|
mcpServers: mcpServers.reduce((s, m) => s + m.estimatedTokens, 0),
|
|
hooks: hooks.reduce((s, h) => s + h.estimatedTokens, 0),
|
|
grandTotal: 0,
|
|
},
|
|
};
|
|
totals.estimatedTokens.grandTotal =
|
|
totals.estimatedTokens.claudeMd +
|
|
totals.estimatedTokens.plugins +
|
|
totals.estimatedTokens.skills +
|
|
totals.estimatedTokens.mcpServers +
|
|
totals.estimatedTokens.hooks;
|
|
|
|
const warnings = [];
|
|
|
|
let suggestDisables = null;
|
|
if (opts.suggestDisables) {
|
|
const claudeMdBodies = await Promise.all(
|
|
claudeMd.files.map(async f => {
|
|
try { return await readFile(f.path, 'utf-8'); } catch { return ''; }
|
|
}),
|
|
);
|
|
suggestDisables = buildSuggestDisables({ plugins, skills, mcpServers, claudeMdBodies });
|
|
}
|
|
|
|
const result = {
|
|
meta: {
|
|
tool: 'config-audit:whats-active',
|
|
version: SCHEMA_VERSION,
|
|
generatedAt: new Date().toISOString(),
|
|
repoPath: absRepoPath,
|
|
gitRoot,
|
|
projectKey: claudeJsonSlice.projectKey,
|
|
durationMs: Date.now() - start,
|
|
},
|
|
claudeMd,
|
|
plugins,
|
|
skills,
|
|
mcpServers,
|
|
hooks,
|
|
settings: { cascade: settingsCascade },
|
|
totals,
|
|
suggestDisables,
|
|
warnings,
|
|
};
|
|
|
|
// In non-verbose mode, drop per-file detail nobody asked for
|
|
if (!opts.verbose) {
|
|
// Keep claudeMd.files entries but strip `lines` to reduce noise. Actually
|
|
// plan says verbose adds per-file bytes/lines — so non-verbose still shows
|
|
// them in tables; we keep as-is. This block intentionally left empty.
|
|
}
|
|
|
|
return result;
|
|
}
|