Differentiate MCP servers from generic 'item' (flat 15) — they actually
cost 500+ tokens per turn for protocol metadata and tool schemas.
estimateTokens(bytes, 'mcp', {toolCount}) returns max of:
- 500 token floor (base overhead)
- ceil(bytes / 3.5) (json-rate when bytes known)
- 500 + toolCount * 200 (when tool count is detected; Step 14 wires this)
Caller-side migration in next commit (Step 5).
Tests: +4 cases for mcp kind.
621 lines
24 KiB
JavaScript
621 lines
24 KiB
JavaScript
import { describe, it, before, after, beforeEach, afterEach } from 'node:test';
|
|
import assert from 'node:assert/strict';
|
|
import { join, dirname, resolve } from 'node:path';
|
|
import { mkdir, writeFile, rm, readFile } from 'node:fs/promises';
|
|
import { tmpdir } from 'node:os';
|
|
import {
|
|
estimateTokens,
|
|
detectGitRoot,
|
|
walkClaudeMdCascade,
|
|
readClaudeJsonProjectSlice,
|
|
enumeratePlugins,
|
|
enumerateSkills,
|
|
readActiveHooks,
|
|
readActiveMcpServers,
|
|
readActiveConfig,
|
|
} from '../../scanners/lib/active-config-reader.mjs';
|
|
|
|
function uniqueDir(suffix) {
|
|
return join(tmpdir(), `config-audit-acr-${suffix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`);
|
|
}
|
|
|
|
/**
|
|
* Build a rich-repo fixture under `root`.
|
|
* Layout mirrors feature plan §8 — git-repo, CLAUDE.md cascade, settings layers,
|
|
* .mcp.json, fake-home with plugins + .claude.json.
|
|
*/
|
|
async function buildRichRepo(root) {
|
|
const fakeHome = join(root, 'fake-home');
|
|
// Repo marker
|
|
await mkdir(join(root, '.git'), { recursive: true });
|
|
await writeFile(join(root, '.git', 'HEAD'), 'ref: refs/heads/main\n');
|
|
|
|
// Project CLAUDE.md with @import
|
|
await mkdir(join(root, 'docs'), { recursive: true });
|
|
await writeFile(
|
|
join(root, 'CLAUDE.md'),
|
|
'# Project Instructions\n\n@docs/conv.md\n\nBuild with care.\n',
|
|
);
|
|
await writeFile(join(root, 'docs', 'conv.md'), '# Conventions\n\nUse conventional commits.\n');
|
|
|
|
// Settings cascade
|
|
await mkdir(join(root, '.claude', 'rules'), { recursive: true });
|
|
await writeFile(
|
|
join(root, '.claude', 'settings.json'),
|
|
JSON.stringify({
|
|
permissions: { allow: ['Read', 'Write'] },
|
|
hooks: {
|
|
PreToolUse: [
|
|
{ matcher: 'Bash', hooks: [{ type: 'command', command: 'check.sh' }] },
|
|
],
|
|
},
|
|
}, null, 2),
|
|
);
|
|
await writeFile(
|
|
join(root, '.claude', 'settings.local.json'),
|
|
JSON.stringify({ env: { DEBUG: 'true' } }, null, 2),
|
|
);
|
|
await writeFile(join(root, '.claude', 'rules', 'team.md'), '# Team Rule\n');
|
|
|
|
// Project .mcp.json
|
|
await writeFile(
|
|
join(root, '.mcp.json'),
|
|
JSON.stringify({
|
|
mcpServers: {
|
|
alpha: { command: 'npx', args: ['alpha-server'] },
|
|
beta: { command: 'npx', args: ['beta-server'] },
|
|
},
|
|
}, null, 2),
|
|
);
|
|
|
|
// Fake HOME — user CLAUDE.md, settings, plugins, .claude.json
|
|
await mkdir(join(fakeHome, '.claude'), { recursive: true });
|
|
await writeFile(
|
|
join(fakeHome, '.claude', 'CLAUDE.md'),
|
|
'# User Instructions\n\nBe terse.\n',
|
|
);
|
|
await writeFile(
|
|
join(fakeHome, '.claude', 'settings.json'),
|
|
JSON.stringify({
|
|
hooks: {
|
|
Stop: [{ hooks: [{ type: 'command', command: 'reminder.sh' }] }],
|
|
},
|
|
}, null, 2),
|
|
);
|
|
|
|
// Plugin: demo plugin with 1 command, 1 skill, 1 hook
|
|
const pluginRoot = join(
|
|
fakeHome, '.claude', 'plugins', 'marketplaces', 'mp', 'plugins', 'demo',
|
|
);
|
|
await mkdir(join(pluginRoot, '.claude-plugin'), { recursive: true });
|
|
await writeFile(
|
|
join(pluginRoot, '.claude-plugin', 'plugin.json'),
|
|
JSON.stringify({ name: 'demo', description: 'test plugin', version: '0.1.0' }, null, 2),
|
|
);
|
|
await mkdir(join(pluginRoot, 'commands'), { recursive: true });
|
|
await writeFile(
|
|
join(pluginRoot, 'commands', 'foo.md'),
|
|
'---\nname: demo:foo\ndescription: foo\nmodel: sonnet\n---\n\nFoo command.\n',
|
|
);
|
|
await mkdir(join(pluginRoot, 'skills', 'bar'), { recursive: true });
|
|
await writeFile(
|
|
join(pluginRoot, 'skills', 'bar', 'SKILL.md'),
|
|
'---\nname: bar\ndescription: bar skill\n---\n\nBar skill body.\n',
|
|
);
|
|
await mkdir(join(pluginRoot, 'hooks'), { recursive: true });
|
|
await writeFile(
|
|
join(pluginRoot, 'hooks', 'hooks.json'),
|
|
JSON.stringify({
|
|
hooks: {
|
|
PostToolUse: [{ hooks: [{ type: 'command', command: 'demo-hook.sh' }] }],
|
|
},
|
|
}, null, 2),
|
|
);
|
|
|
|
// ~/.claude.json with projects slice matching the repo root
|
|
await writeFile(
|
|
join(fakeHome, '.claude.json'),
|
|
JSON.stringify({
|
|
projects: {
|
|
[root]: {
|
|
mcpServers: {
|
|
gamma: { command: 'gamma-server' },
|
|
},
|
|
disabledMcpjsonServers: ['beta'],
|
|
},
|
|
},
|
|
}, null, 2),
|
|
);
|
|
|
|
return { root, fakeHome, pluginRoot };
|
|
}
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// estimateTokens
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('estimateTokens', () => {
|
|
it('markdown: 4 chars per token, rounded up', () => {
|
|
assert.equal(estimateTokens(400, 'markdown'), 100);
|
|
assert.equal(estimateTokens(401, 'markdown'), 101);
|
|
assert.equal(estimateTokens(0, 'markdown'), 0);
|
|
});
|
|
|
|
it('json: 3.5 chars per token, rounded up', () => {
|
|
assert.equal(estimateTokens(350, 'json'), 100);
|
|
assert.equal(estimateTokens(100, 'json'), 29);
|
|
});
|
|
|
|
it('frontmatter: caps at 600 bytes / 150 tokens', () => {
|
|
assert.equal(estimateTokens(100, 'frontmatter'), 25);
|
|
assert.equal(estimateTokens(600, 'frontmatter'), 150);
|
|
assert.equal(estimateTokens(10_000, 'frontmatter'), 150);
|
|
});
|
|
|
|
it('item: flat 15 regardless of bytes', () => {
|
|
assert.equal(estimateTokens(0, 'item'), 15);
|
|
assert.equal(estimateTokens(9999, 'item'), 15);
|
|
});
|
|
|
|
it('defaults to markdown when kind omitted', () => {
|
|
assert.equal(estimateTokens(400), 100);
|
|
});
|
|
|
|
it('handles invalid bytes gracefully', () => {
|
|
assert.equal(estimateTokens(-1, 'markdown'), 0);
|
|
assert.equal(estimateTokens(NaN, 'markdown'), 0);
|
|
});
|
|
|
|
// v5 F2: differentiated MCP estimate
|
|
it('mcp: 0 bytes → at least 500 (base overhead floor)', () => {
|
|
assert.ok(estimateTokens(0, 'mcp') >= 500,
|
|
`expected >= 500, got ${estimateTokens(0, 'mcp')}`);
|
|
});
|
|
|
|
it('mcp: with toolCount: 10 → at least 2000', () => {
|
|
assert.ok(estimateTokens(0, 'mcp', { toolCount: 10 }) >= 2000,
|
|
`expected >= 2000, got ${estimateTokens(0, 'mcp', { toolCount: 10 })}`);
|
|
});
|
|
|
|
it('mcp: ratio mcp/item ≥ 30 for 10-tool server', () => {
|
|
const mcp = estimateTokens(0, 'mcp', { toolCount: 10 });
|
|
const item = estimateTokens(0, 'item');
|
|
assert.ok(mcp / item >= 30,
|
|
`expected ratio >= 30, got mcp=${mcp} item=${item} ratio=${mcp / item}`);
|
|
});
|
|
|
|
it('mcp: with bytes uses json-rate floor', () => {
|
|
// 700 bytes JSON ≈ 200 tokens, but mcp keeps 500 floor
|
|
assert.equal(estimateTokens(700, 'mcp'), 500);
|
|
// 3500 bytes JSON = 1000 tokens, exceeds floor
|
|
assert.equal(estimateTokens(3500, 'mcp'), 1000);
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// detectGitRoot
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('detectGitRoot', () => {
|
|
let root;
|
|
before(async () => {
|
|
root = uniqueDir('git');
|
|
await mkdir(join(root, '.git'), { recursive: true });
|
|
await mkdir(join(root, 'src', 'deep'), { recursive: true });
|
|
await writeFile(join(root, '.git', 'HEAD'), '\n');
|
|
});
|
|
after(async () => { await rm(root, { recursive: true, force: true }); });
|
|
|
|
it('finds .git in start dir', async () => {
|
|
const result = await detectGitRoot(root);
|
|
assert.equal(result, resolve(root));
|
|
});
|
|
|
|
it('walks up to find .git', async () => {
|
|
const result = await detectGitRoot(join(root, 'src', 'deep'));
|
|
assert.equal(result, resolve(root));
|
|
});
|
|
|
|
it('returns null when no .git in chain', async () => {
|
|
const noGit = uniqueDir('nogit');
|
|
await mkdir(noGit, { recursive: true });
|
|
try {
|
|
const result = await detectGitRoot(noGit);
|
|
// Could resolve to outer repo (the plugin repo) if tmpdir happens to be nested.
|
|
// Accept null OR a path that is NOT noGit itself.
|
|
if (result !== null) {
|
|
assert.notEqual(result, resolve(noGit));
|
|
}
|
|
} finally {
|
|
await rm(noGit, { recursive: true, force: true });
|
|
}
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// walkClaudeMdCascade
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('walkClaudeMdCascade', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('cascade'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('returns files in load order (user first, then project, then imports)', async () => {
|
|
const result = await walkClaudeMdCascade(fixture.root);
|
|
const scopes = result.files.map(f => f.scope);
|
|
assert.ok(scopes.includes('user'), 'expected user scope');
|
|
assert.ok(scopes.includes('project'), 'expected project scope');
|
|
assert.ok(scopes.includes('import'), 'expected import scope');
|
|
|
|
// user CLAUDE.md should come before project CLAUDE.md
|
|
const userIdx = result.files.findIndex(f => f.scope === 'user');
|
|
const projIdx = result.files.findIndex(f => f.scope === 'project');
|
|
assert.ok(userIdx < projIdx, 'user scope must come before project');
|
|
});
|
|
|
|
it('resolves @imports and marks them with parent', async () => {
|
|
const result = await walkClaudeMdCascade(fixture.root);
|
|
const imp = result.files.find(f => f.path.endsWith('docs/conv.md'));
|
|
assert.ok(imp, 'import should be discovered');
|
|
assert.equal(imp.scope, 'import');
|
|
assert.ok(imp.parent && imp.parent.endsWith('CLAUDE.md'));
|
|
});
|
|
|
|
it('counts bytes and lines', async () => {
|
|
const result = await walkClaudeMdCascade(fixture.root);
|
|
assert.ok(result.totalBytes > 0);
|
|
assert.ok(result.totalLines > 0);
|
|
for (const f of result.files) {
|
|
assert.ok(f.bytes > 0);
|
|
assert.ok(f.lines > 0);
|
|
}
|
|
});
|
|
|
|
it('computes estimatedTokens via markdown heuristic', async () => {
|
|
const result = await walkClaudeMdCascade(fixture.root);
|
|
assert.equal(result.estimatedTokens, Math.ceil(result.totalBytes / 4));
|
|
});
|
|
|
|
it('handles missing user CLAUDE.md gracefully', async () => {
|
|
// Remove user CLAUDE.md
|
|
await rm(join(fixture.fakeHome, '.claude', 'CLAUDE.md'));
|
|
const result = await walkClaudeMdCascade(fixture.root);
|
|
const userFiles = result.files.filter(f => f.scope === 'user');
|
|
assert.equal(userFiles.length, 0);
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// readClaudeJsonProjectSlice
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('readClaudeJsonProjectSlice', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('slice'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('finds exact-match project key', async () => {
|
|
const slice = await readClaudeJsonProjectSlice(fixture.root);
|
|
assert.equal(slice.projectKey, fixture.root);
|
|
assert.deepEqual(slice.disabledMcpjsonServers, ['beta']);
|
|
assert.ok('gamma' in slice.mcpServers);
|
|
});
|
|
|
|
it('returns empty slice when no .claude.json exists', async () => {
|
|
await rm(join(fixture.fakeHome, '.claude.json'));
|
|
const slice = await readClaudeJsonProjectSlice(fixture.root);
|
|
assert.equal(slice.projectKey, null);
|
|
assert.deepEqual(slice.mcpServers, {});
|
|
});
|
|
|
|
it('longest-prefix match: deeper key wins over shallower', async () => {
|
|
// Rewrite .claude.json with two keys — ancestor and the repo
|
|
const parent = dirname(fixture.root);
|
|
const content = JSON.stringify({
|
|
projects: {
|
|
[parent]: { mcpServers: { shallow: { command: 'shallow' } } },
|
|
[fixture.root]: { mcpServers: { deep: { command: 'deep' } } },
|
|
},
|
|
}, null, 2);
|
|
await writeFile(join(fixture.fakeHome, '.claude.json'), content);
|
|
|
|
const slice = await readClaudeJsonProjectSlice(fixture.root);
|
|
assert.equal(slice.projectKey, fixture.root);
|
|
assert.ok('deep' in slice.mcpServers);
|
|
assert.ok(!('shallow' in slice.mcpServers));
|
|
});
|
|
|
|
it('ancestor prefix matches when target is a subdir of a key', async () => {
|
|
const parent = dirname(fixture.root);
|
|
await writeFile(
|
|
join(fixture.fakeHome, '.claude.json'),
|
|
JSON.stringify({ projects: { [parent]: { mcpServers: { anc: {} } } } }, null, 2),
|
|
);
|
|
const slice = await readClaudeJsonProjectSlice(fixture.root);
|
|
assert.equal(slice.projectKey, parent);
|
|
});
|
|
|
|
it('returns null projectKey when no key matches', async () => {
|
|
await writeFile(
|
|
join(fixture.fakeHome, '.claude.json'),
|
|
JSON.stringify({ projects: { '/some/other/path': {} } }, null, 2),
|
|
);
|
|
const slice = await readClaudeJsonProjectSlice(fixture.root);
|
|
assert.equal(slice.projectKey, null);
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// enumeratePlugins
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('enumeratePlugins', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('plugins'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('discovers plugin and reads plugin.json version', async () => {
|
|
const plugins = await enumeratePlugins();
|
|
assert.ok(plugins.length >= 1);
|
|
const demo = plugins.find(p => p.name === 'demo');
|
|
assert.ok(demo, 'demo plugin should be discovered');
|
|
assert.equal(demo.version, '0.1.0');
|
|
});
|
|
|
|
it('counts commands, skills, hooks', async () => {
|
|
const plugins = await enumeratePlugins();
|
|
const demo = plugins.find(p => p.name === 'demo');
|
|
assert.equal(demo.commands, 1);
|
|
assert.equal(demo.skills, 1);
|
|
assert.equal(demo.hooks, 1);
|
|
});
|
|
|
|
it('returns empty array when HOME has no plugins', async () => {
|
|
process.env.HOME = uniqueDir('empty');
|
|
await mkdir(process.env.HOME, { recursive: true });
|
|
try {
|
|
const plugins = await enumeratePlugins();
|
|
assert.deepEqual(plugins, []);
|
|
} finally {
|
|
await rm(process.env.HOME, { recursive: true, force: true });
|
|
}
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// enumerateSkills
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('enumerateSkills', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('skills'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('finds plugin skills', async () => {
|
|
const plugins = await enumeratePlugins();
|
|
const skills = await enumerateSkills(plugins);
|
|
const bar = skills.find(s => s.name === 'bar');
|
|
assert.ok(bar, 'plugin skill should be discovered');
|
|
assert.equal(bar.source, 'plugin');
|
|
assert.equal(bar.pluginName, 'demo');
|
|
});
|
|
|
|
it('finds user skills', async () => {
|
|
// Add a user skill
|
|
await mkdir(join(fixture.fakeHome, '.claude', 'skills', 'userskill'), { recursive: true });
|
|
await writeFile(
|
|
join(fixture.fakeHome, '.claude', 'skills', 'userskill', 'SKILL.md'),
|
|
'# user skill\n',
|
|
);
|
|
const skills = await enumerateSkills([]);
|
|
const userSkill = skills.find(s => s.name === 'userskill');
|
|
assert.ok(userSkill, 'user skill should be discovered');
|
|
assert.equal(userSkill.source, 'user');
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// readActiveHooks
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('readActiveHooks', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('hooks'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('merges hooks from user + project + plugin', async () => {
|
|
const plugins = await enumeratePlugins();
|
|
const hooks = await readActiveHooks(fixture.root, plugins);
|
|
const sources = new Set(hooks.map(h => h.source));
|
|
assert.ok(sources.has('user'), 'user hook present');
|
|
assert.ok(sources.has('project'), 'project hook present');
|
|
assert.ok([...sources].some(s => s.startsWith('plugin:')), 'plugin hook present');
|
|
});
|
|
|
|
it('does not dedupe across scopes', async () => {
|
|
// Add duplicate hook in user and project settings
|
|
const dupeHook = {
|
|
hooks: { PreToolUse: [{ matcher: 'Bash', hooks: [{ type: 'command', command: 'same.sh' }] }] },
|
|
};
|
|
await writeFile(join(fixture.fakeHome, '.claude', 'settings.json'), JSON.stringify(dupeHook));
|
|
await writeFile(join(fixture.root, '.claude', 'settings.json'), JSON.stringify(dupeHook));
|
|
const hooks = await readActiveHooks(fixture.root, []);
|
|
const sameCmd = hooks.filter(h => h.command === 'same.sh');
|
|
assert.equal(sameCmd.length, 2, 'should report both occurrences');
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// readActiveMcpServers
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('readActiveMcpServers', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('mcp'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('merges project .mcp.json + .claude.json slice', async () => {
|
|
const servers = await readActiveMcpServers(fixture.root);
|
|
const names = servers.map(s => s.name);
|
|
assert.ok(names.includes('alpha'), 'alpha from project');
|
|
assert.ok(names.includes('beta'), 'beta from project');
|
|
assert.ok(names.includes('gamma'), 'gamma from .claude.json');
|
|
});
|
|
|
|
it('honors disabledMcpjsonServers', async () => {
|
|
const servers = await readActiveMcpServers(fixture.root);
|
|
const beta = servers.find(s => s.name === 'beta');
|
|
assert.equal(beta.enabled, false);
|
|
assert.equal(beta.disabledBy, 'disabledMcpjsonServers');
|
|
|
|
const alpha = servers.find(s => s.name === 'alpha');
|
|
assert.equal(alpha.enabled, true);
|
|
assert.equal(alpha.disabledBy, null);
|
|
});
|
|
});
|
|
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
// readActiveConfig (integration)
|
|
// ─────────────────────────────────────────────────────────────────────────
|
|
|
|
describe('readActiveConfig (integration)', () => {
|
|
let fixture;
|
|
let originalHome;
|
|
|
|
beforeEach(async () => {
|
|
fixture = await buildRichRepo(uniqueDir('full'));
|
|
originalHome = process.env.HOME;
|
|
process.env.HOME = fixture.fakeHome;
|
|
});
|
|
afterEach(async () => {
|
|
process.env.HOME = originalHome;
|
|
await rm(fixture.root, { recursive: true, force: true });
|
|
});
|
|
|
|
it('produces expected top-level shape', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
const keys = Object.keys(result).sort();
|
|
assert.deepEqual(keys, [
|
|
'claudeMd', 'hooks', 'mcpServers', 'meta', 'plugins',
|
|
'settings', 'skills', 'suggestDisables', 'totals', 'warnings',
|
|
]);
|
|
});
|
|
|
|
it('meta contains required fields', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
assert.equal(result.meta.tool, 'config-audit:whats-active');
|
|
assert.equal(result.meta.version, '1.0.0');
|
|
assert.ok(typeof result.meta.generatedAt === 'string');
|
|
assert.equal(result.meta.repoPath, resolve(fixture.root));
|
|
assert.equal(result.meta.gitRoot, resolve(fixture.root));
|
|
assert.equal(result.meta.projectKey, fixture.root);
|
|
assert.ok(typeof result.meta.durationMs === 'number');
|
|
});
|
|
|
|
it('settings cascade reflects all three layers', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
const scopes = result.settings.cascade.map(c => c.scope);
|
|
assert.deepEqual(scopes, ['user', 'project', 'local']);
|
|
const user = result.settings.cascade.find(c => c.scope === 'user');
|
|
const project = result.settings.cascade.find(c => c.scope === 'project');
|
|
assert.equal(user.exists, true);
|
|
assert.equal(project.exists, true);
|
|
});
|
|
|
|
it('totals.grandTotal equals sum of category subtotals', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
const t = result.totals.estimatedTokens;
|
|
assert.equal(t.grandTotal, t.claudeMd + t.plugins + t.skills + t.mcpServers + t.hooks);
|
|
});
|
|
|
|
it('performance budget: durationMs < 2000', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
assert.ok(result.meta.durationMs < 2000,
|
|
`expected < 2000ms, got ${result.meta.durationMs}ms`);
|
|
});
|
|
|
|
it('token estimate within ±20% of hand-computed value', async () => {
|
|
const result = await readActiveConfig(fixture.root);
|
|
const expectedClaudeMd = Math.ceil(result.claudeMd.totalBytes / 4);
|
|
const low = Math.floor(expectedClaudeMd * 0.8);
|
|
const high = Math.ceil(expectedClaudeMd * 1.2);
|
|
assert.ok(
|
|
result.totals.estimatedTokens.claudeMd >= low &&
|
|
result.totals.estimatedTokens.claudeMd <= high,
|
|
`claudeMd tokens ${result.totals.estimatedTokens.claudeMd} outside [${low}, ${high}]`,
|
|
);
|
|
});
|
|
|
|
it('suggestDisables is null by default, object when flag set', async () => {
|
|
const noFlag = await readActiveConfig(fixture.root);
|
|
assert.equal(noFlag.suggestDisables, null);
|
|
|
|
const withFlag = await readActiveConfig(fixture.root, { suggestDisables: true });
|
|
assert.ok(withFlag.suggestDisables && Array.isArray(withFlag.suggestDisables.candidates));
|
|
});
|
|
|
|
it('suggestDisables flags disabled MCP servers', async () => {
|
|
const result = await readActiveConfig(fixture.root, { suggestDisables: true });
|
|
const betaCandidate = result.suggestDisables.candidates.find(
|
|
c => c.kind === 'mcp' && c.name === 'beta',
|
|
);
|
|
assert.ok(betaCandidate, 'beta should be flagged as already disabled');
|
|
assert.equal(betaCandidate.confidence, 'high');
|
|
});
|
|
});
|