409 lines
15 KiB
JavaScript
409 lines
15 KiB
JavaScript
// supply-chain-recheck.test.mjs — Tests for the supply chain re-check scanner
|
|
// Tests use fixture lockfiles with known compromised + clean packages.
|
|
// OSV.dev is NOT mocked — blocklist and typosquat tests are deterministic.
|
|
// OSV tests are conditional (skip gracefully if network unavailable).
|
|
|
|
import { describe, it, beforeEach } from 'node:test';
|
|
import assert from 'node:assert/strict';
|
|
import { resolve, join } from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { mkdirSync, writeFileSync, rmSync, existsSync, copyFileSync } from 'node:fs';
|
|
import { resetCounter } from '../../scanners/lib/output.mjs';
|
|
import { scan } from '../../scanners/supply-chain-recheck.mjs';
|
|
|
|
const __dirname = fileURLToPath(new URL('.', import.meta.url));
|
|
const FIXTURES = resolve(__dirname, '../fixtures/supply-chain');
|
|
const TEMP = resolve(__dirname, '../fixtures/supply-chain-tmp');
|
|
|
|
function setupTemp(files) {
|
|
if (existsSync(TEMP)) rmSync(TEMP, { recursive: true });
|
|
mkdirSync(TEMP, { recursive: true });
|
|
for (const [name, source] of Object.entries(files)) {
|
|
copyFileSync(join(FIXTURES, source), join(TEMP, name));
|
|
}
|
|
}
|
|
|
|
function cleanupTemp() {
|
|
if (existsSync(TEMP)) rmSync(TEMP, { recursive: true });
|
|
}
|
|
|
|
// ============================================================================
|
|
// Scanner interface
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: scanner interface', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('returns scannerResult envelope with required fields', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-clean.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
assert.ok(result.scanner, 'has scanner field');
|
|
assert.ok(result.status, 'has status field');
|
|
assert.ok('findings' in result, 'has findings field');
|
|
assert.ok('counts' in result, 'has counts field');
|
|
assert.ok('duration_ms' in result, 'has duration_ms field');
|
|
assert.ok('files_scanned' in result, 'has files_scanned field');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('returns status skipped when no lockfiles present', async () => {
|
|
const emptyDir = join(TEMP, 'empty');
|
|
mkdirSync(emptyDir, { recursive: true });
|
|
try {
|
|
const result = await scan(emptyDir, { files: [] });
|
|
assert.equal(result.status, 'skipped');
|
|
assert.equal(result.findings.length, 0);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('returns status ok when lockfiles are present', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-clean.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
assert.equal(result.status, 'ok');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('scanner name is supply-chain-recheck', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-clean.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
assert.equal(result.scanner, 'supply-chain-recheck');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('counts files scanned correctly', async () => {
|
|
setupTemp({
|
|
'package-lock.json': 'package-lock-clean.json',
|
|
'requirements.txt': 'requirements-clean.txt',
|
|
});
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
assert.equal(result.files_scanned, 2);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Blocklist detection (npm)
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: npm blocklist', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('detects compromised event-stream@3.3.6 in package-lock.json', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-compromised.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(
|
|
f => f.title.includes('Compromised') && f.title.includes('event-stream')
|
|
);
|
|
assert.ok(compromised.length >= 1, `Expected compromised finding for event-stream, got ${result.findings.map(f => f.title).join('; ')}`);
|
|
assert.equal(compromised[0].severity, 'critical');
|
|
assert.equal(compromised[0].scanner, 'SCR');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('does not flag clean packages in package-lock.json', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-clean.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(f => f.title.includes('Compromised'));
|
|
assert.equal(compromised.length, 0, `Unexpected compromised findings: ${compromised.map(f => f.title).join('; ')}`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('detects compromised colors@1.4.1 in yarn.lock', async () => {
|
|
setupTemp({ 'yarn.lock': 'yarn-compromised.lock' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(
|
|
f => f.title.includes('Compromised') && f.title.includes('colors')
|
|
);
|
|
assert.ok(compromised.length >= 1, `Expected compromised finding for colors`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Blocklist detection (pip)
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: pip blocklist', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('detects compromised colourama in requirements.txt', async () => {
|
|
setupTemp({ 'requirements.txt': 'requirements-compromised.txt' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(
|
|
f => f.title.includes('Compromised') && f.title.includes('colourama')
|
|
);
|
|
assert.ok(compromised.length >= 1, `Expected compromised finding for colourama`);
|
|
assert.equal(compromised[0].severity, 'critical');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('detects compromised djanga in requirements.txt', async () => {
|
|
setupTemp({ 'requirements.txt': 'requirements-compromised.txt' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(
|
|
f => f.title.includes('Compromised') && f.title.includes('djanga')
|
|
);
|
|
assert.ok(compromised.length >= 1, `Expected compromised finding for djanga`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('detects compromised colourama in Pipfile.lock', async () => {
|
|
setupTemp({ 'Pipfile.lock': 'Pipfile.lock' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(
|
|
f => f.title.includes('Compromised') && f.title.includes('colourama')
|
|
);
|
|
assert.ok(compromised.length >= 1, `Expected compromised finding for colourama in Pipfile.lock`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('does not flag clean requirements.txt', async () => {
|
|
setupTemp({ 'requirements.txt': 'requirements-clean.txt' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const compromised = result.findings.filter(f => f.title.includes('Compromised'));
|
|
assert.equal(compromised.length, 0);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Typosquat detection
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: typosquat detection', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('detects npm typosquats from lockfile deps', async () => {
|
|
// Create a package-lock with a typosquat dep
|
|
if (existsSync(TEMP)) rmSync(TEMP, { recursive: true });
|
|
mkdirSync(TEMP, { recursive: true });
|
|
writeFileSync(join(TEMP, 'package-lock.json'), JSON.stringify({
|
|
name: 'test',
|
|
version: '1.0.0',
|
|
lockfileVersion: 3,
|
|
packages: {
|
|
'': { name: 'test', version: '1.0.0' },
|
|
'node_modules/expresss': { version: '4.18.0' },
|
|
'node_modules/lodash': { version: '4.17.21' },
|
|
},
|
|
}));
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const typo = result.findings.filter(f => f.title.toLowerCase().includes('typosquat'));
|
|
assert.ok(typo.length >= 1, `Expected typosquat finding for "expresss", got: ${result.findings.map(f => f.title).join('; ')}`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Finding format
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: finding format', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('all findings have SCR scanner prefix', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-compromised.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
for (const f of result.findings) {
|
|
assert.equal(f.scanner, 'SCR', `Finding "${f.title}" has wrong scanner: ${f.scanner}`);
|
|
}
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('all findings have OWASP reference', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-compromised.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
for (const f of result.findings) {
|
|
assert.ok(f.owasp, `Finding "${f.title}" missing OWASP reference`);
|
|
}
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('finding IDs follow DS-SCR-NNN pattern', async () => {
|
|
setupTemp({ 'package-lock.json': 'package-lock-compromised.json' });
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
for (const f of result.findings) {
|
|
assert.match(f.id, /^DS-SCR-\d{3}$/, `Finding ID "${f.id}" doesn't match pattern`);
|
|
}
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('severity counts match finding counts', async () => {
|
|
setupTemp({
|
|
'package-lock.json': 'package-lock-compromised.json',
|
|
'requirements.txt': 'requirements-compromised.txt',
|
|
});
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const counted = { critical: 0, high: 0, medium: 0, low: 0, info: 0 };
|
|
for (const f of result.findings) counted[f.severity]++;
|
|
assert.deepEqual(result.counts, counted, 'Counts should match findings');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Multiple lockfiles
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-recheck: multiple lockfiles', () => {
|
|
beforeEach(() => resetCounter());
|
|
|
|
it('scans both npm and pip lockfiles in same directory', async () => {
|
|
setupTemp({
|
|
'package-lock.json': 'package-lock-compromised.json',
|
|
'requirements.txt': 'requirements-compromised.txt',
|
|
});
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
const npmFindings = result.findings.filter(f => f.file === 'package-lock.json');
|
|
const pipFindings = result.findings.filter(f => f.file === 'requirements.txt');
|
|
assert.ok(npmFindings.length > 0, 'Should have npm findings');
|
|
assert.ok(pipFindings.length > 0, 'Should have pip findings');
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
|
|
it('reports total files scanned across all lockfile types', async () => {
|
|
setupTemp({
|
|
'package-lock.json': 'package-lock-compromised.json',
|
|
'requirements.txt': 'requirements-compromised.txt',
|
|
'Pipfile.lock': 'Pipfile.lock',
|
|
});
|
|
try {
|
|
const result = await scan(TEMP, { files: [] });
|
|
assert.ok(result.files_scanned >= 3, `Expected >= 3 files scanned, got ${result.files_scanned}`);
|
|
} finally {
|
|
cleanupTemp();
|
|
}
|
|
});
|
|
});
|
|
|
|
// ============================================================================
|
|
// Shared module (supply-chain-data.mjs)
|
|
// ============================================================================
|
|
|
|
describe('supply-chain-data: shared module', () => {
|
|
it('isCompromised returns true for wildcard blocklist entries', async () => {
|
|
const { isCompromised, PIP_COMPROMISED } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.ok(isCompromised(PIP_COMPROMISED, 'colourama', '0.4.6'));
|
|
assert.ok(isCompromised(PIP_COMPROMISED, 'colourama', null));
|
|
});
|
|
|
|
it('isCompromised returns true for specific version matches', async () => {
|
|
const { isCompromised, NPM_COMPROMISED } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.ok(isCompromised(NPM_COMPROMISED, 'event-stream', '3.3.6'));
|
|
assert.ok(!isCompromised(NPM_COMPROMISED, 'event-stream', '3.3.5'));
|
|
});
|
|
|
|
it('isCompromised returns false for unknown packages', async () => {
|
|
const { isCompromised, NPM_COMPROMISED } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.ok(!isCompromised(NPM_COMPROMISED, 'express', '4.18.2'));
|
|
});
|
|
|
|
it('parseSpec handles scoped npm packages', async () => {
|
|
const { parseSpec } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
const result = parseSpec('@scope/pkg@1.0.0');
|
|
assert.equal(result.name, '@scope/pkg');
|
|
assert.equal(result.version, '1.0.0');
|
|
});
|
|
|
|
it('parseSpec handles unversioned packages', async () => {
|
|
const { parseSpec } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
const result = parseSpec('lodash');
|
|
assert.equal(result.name, 'lodash');
|
|
assert.equal(result.version, null);
|
|
});
|
|
|
|
it('parsePipSpec handles == pinned versions', async () => {
|
|
const { parsePipSpec } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
const result = parsePipSpec('flask==2.3.0');
|
|
assert.equal(result.name, 'flask');
|
|
assert.equal(result.version, '2.3.0');
|
|
});
|
|
|
|
it('parsePipSpec handles unpinned packages', async () => {
|
|
const { parsePipSpec } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
const result = parsePipSpec('requests>=2.0');
|
|
assert.equal(result.name, 'requests');
|
|
assert.equal(result.version, null);
|
|
});
|
|
|
|
it('extractOSVSeverity handles database_specific.severity', async () => {
|
|
const { extractOSVSeverity } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.equal(extractOSVSeverity({ database_specific: { severity: 'critical' } }), 'CRITICAL');
|
|
assert.equal(extractOSVSeverity({ database_specific: { severity: 'high' } }), 'HIGH');
|
|
});
|
|
|
|
it('extractOSVSeverity falls back to CVSS score', async () => {
|
|
const { extractOSVSeverity } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.equal(extractOSVSeverity({ severity: [{ score: 9.5 }] }), 'CRITICAL');
|
|
assert.equal(extractOSVSeverity({ severity: [{ score: 7.5 }] }), 'HIGH');
|
|
assert.equal(extractOSVSeverity({ severity: [{ score: 5.0 }] }), 'MEDIUM');
|
|
});
|
|
|
|
it('extractOSVSeverity defaults to HIGH for GHSA/CVE IDs', async () => {
|
|
const { extractOSVSeverity } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.equal(extractOSVSeverity({ id: 'GHSA-xxxx-xxxx' }), 'HIGH');
|
|
assert.equal(extractOSVSeverity({ id: 'CVE-2024-1234' }), 'HIGH');
|
|
});
|
|
|
|
it('OSV_ECOSYSTEM_MAP covers expected ecosystems', async () => {
|
|
const { OSV_ECOSYSTEM_MAP } = await import('../../scanners/lib/supply-chain-data.mjs');
|
|
assert.equal(OSV_ECOSYSTEM_MAP.npm, 'npm');
|
|
assert.equal(OSV_ECOSYSTEM_MAP.pip, 'PyPI');
|
|
assert.equal(OSV_ECOSYSTEM_MAP.cargo, 'crates.io');
|
|
assert.equal(OSV_ECOSYSTEM_MAP.gem, 'RubyGems');
|
|
assert.equal(OSV_ECOSYSTEM_MAP.go, 'Go');
|
|
});
|
|
});
|