Pre-installation verification of VS Code extensions via URL — fetch a remote VSIX, extract it in a hardened sandbox, and run the existing IDE scanner pipeline against it. No npm dependencies. Sources: - VS Code Marketplace (publisher.gallery.vsassets.io direct download) - OpenVSX (open-vsx.org official API) - Direct .vsix HTTPS URLs Defenses: - HTTPS-only, TLS verified, manual redirect with per-source host whitelist - 30s total timeout via AbortController - 50MB compressed cap, 500MB uncompressed, 100x expansion ratio - Zero-dep ZIP extractor: zip-slip, absolute paths, drive letters, NUL bytes, symlinks (Unix mode 0xA000), depth limits, ZIP64 rejected, encrypted rejected - SHA-256 streamed during fetch, surfaced in meta.source - Temp dir cleanup in all paths (try/finally) Files: - scanners/lib/vsix-fetch.mjs (HTTPS fetcher, host whitelist, streaming SHA-256) - scanners/lib/zip-extract.mjs (zero-dep parser with hardening caps) - knowledge/marketplace-api-notes.md (endpoint reference) - 3 test files (48 tests added: vsix-fetch, zip-extract, ide-extension-url) Tests: 1296 → 1344 (all green). Co-Authored-By: Claude Opus 4.7 <noreply@anthropic.com>
267 lines
10 KiB
JavaScript
267 lines
10 KiB
JavaScript
// zip-extract.test.mjs — Unit tests for the zero-dep ZIP extractor.
|
|
|
|
import { describe, it } from 'node:test';
|
|
import assert from 'node:assert/strict';
|
|
import { mkdtemp, rm, readFile, readdir } from 'node:fs/promises';
|
|
import { tmpdir } from 'node:os';
|
|
import { join } from 'node:path';
|
|
import { deflateRawSync } from 'node:zlib';
|
|
import { extractToDir, listEntries, ZipError, __testing } from '../../scanners/lib/zip-extract.mjs';
|
|
import { buildZip, unixModeAttr, MODE_SYMLINK } from '../lib/build-zip.mjs';
|
|
|
|
const { validateEntryName, isSymlink, DEFAULT_CAPS } = __testing;
|
|
|
|
async function withTempDir(fn) {
|
|
const dir = await mkdtemp(join(tmpdir(), 'zip-test-'));
|
|
try { return await fn(dir); }
|
|
finally { await rm(dir, { recursive: true, force: true }); }
|
|
}
|
|
|
|
describe('validateEntryName', () => {
|
|
it('accepts a normal nested path', () => {
|
|
const out = validateEntryName('extension/package.json', DEFAULT_CAPS);
|
|
assert.ok(out && out.includes('package.json'));
|
|
});
|
|
it('returns null for directory entries', () => {
|
|
assert.equal(validateEntryName('extension/', DEFAULT_CAPS), null);
|
|
});
|
|
it('rejects parent traversal', () => {
|
|
assert.throws(() => validateEntryName('../etc/passwd', DEFAULT_CAPS), /traversal/);
|
|
});
|
|
it('rejects deep parent traversal', () => {
|
|
assert.throws(() => validateEntryName('extension/../../escape', DEFAULT_CAPS), /traversal/);
|
|
});
|
|
it('rejects POSIX absolute paths', () => {
|
|
assert.throws(() => validateEntryName('/etc/passwd', DEFAULT_CAPS), /absolute/);
|
|
});
|
|
it('rejects Windows drive letters', () => {
|
|
assert.throws(() => validateEntryName('C:\\Windows\\sys', DEFAULT_CAPS), /drive-letter|absolute/);
|
|
});
|
|
it('rejects backslash absolute paths', () => {
|
|
assert.throws(() => validateEntryName('\\foo', DEFAULT_CAPS), /absolute/);
|
|
});
|
|
it('rejects NUL bytes', () => {
|
|
assert.throws(() => validateEntryName('foo\u0000bar', DEFAULT_CAPS), /NUL/);
|
|
});
|
|
it('rejects empty entry names', () => {
|
|
assert.throws(() => validateEntryName('', DEFAULT_CAPS), /empty/);
|
|
});
|
|
it('rejects very deep paths beyond depth cap', () => {
|
|
const deep = Array.from({ length: 25 }, () => 'a').join('/');
|
|
assert.throws(() => validateEntryName(deep, { ...DEFAULT_CAPS, maxDepth: 20 }), /depth/);
|
|
});
|
|
it('normalizes backslashes in path', () => {
|
|
const out = validateEntryName('extension\\sub\\file.txt', DEFAULT_CAPS);
|
|
assert.ok(out && (out.includes('sub') || out.includes('file.txt')));
|
|
});
|
|
});
|
|
|
|
describe('isSymlink', () => {
|
|
it('detects unix-made symlink mode bits', () => {
|
|
const entry = { versionMadeBy: (3 << 8) | 20, externalAttr: unixModeAttr(MODE_SYMLINK) };
|
|
assert.equal(isSymlink(entry), true);
|
|
});
|
|
it('ignores mode bits when versionMadeBy os != Unix', () => {
|
|
const entry = { versionMadeBy: (0 << 8) | 20, externalAttr: unixModeAttr(MODE_SYMLINK) };
|
|
assert.equal(isSymlink(entry), false);
|
|
});
|
|
it('returns false for regular file', () => {
|
|
const entry = { versionMadeBy: (3 << 8) | 20, externalAttr: unixModeAttr(0x81A4) };
|
|
assert.equal(isSymlink(entry), false);
|
|
});
|
|
});
|
|
|
|
describe('extractToDir — happy path', () => {
|
|
it('extracts a small ZIP with a nested file', async () => {
|
|
const buf = buildZip([
|
|
{ name: 'extension/package.json', data: '{"hello":"world"}' },
|
|
{ name: 'extension/extension.js', data: 'console.log(1)' },
|
|
]);
|
|
await withTempDir(async (dir) => {
|
|
const r = await extractToDir(buf, dir);
|
|
assert.equal(r.entries, 2);
|
|
const pkg = await readFile(join(dir, 'extension/package.json'), 'utf8');
|
|
assert.match(pkg, /hello/);
|
|
});
|
|
});
|
|
|
|
it('extracts deflate-compressed entries', async () => {
|
|
// Pseudo-random bytes so compression ratio stays well under the cap.
|
|
const original = Buffer.alloc(2000);
|
|
for (let i = 0; i < original.length; i++) original[i] = (i * 73 + 11) & 0xFF;
|
|
const compressed = deflateRawSync(original);
|
|
// Manually construct a buildZip-style entry but with method=8 + compSize set.
|
|
// buildZip only supports STORE; we need a small bespoke builder for this test.
|
|
// Use raw buildZip + override method by patching after.
|
|
// Simpler: assert listEntries handles a deflate one we craft.
|
|
// Construct manually:
|
|
const nameBuf = Buffer.from('extension/big.txt', 'utf8');
|
|
const lfh = Buffer.alloc(30);
|
|
lfh.writeUInt32LE(0x04034b50, 0);
|
|
lfh.writeUInt16LE(20, 4);
|
|
lfh.writeUInt16LE(0, 6);
|
|
lfh.writeUInt16LE(8, 8); // DEFLATE
|
|
lfh.writeUInt32LE(0, 14); // CRC unused (we don't validate)
|
|
lfh.writeUInt32LE(compressed.length, 18);
|
|
lfh.writeUInt32LE(original.length, 22);
|
|
lfh.writeUInt16LE(nameBuf.length, 26);
|
|
lfh.writeUInt16LE(0, 28);
|
|
const cd = Buffer.alloc(46);
|
|
cd.writeUInt32LE(0x02014b50, 0);
|
|
cd.writeUInt16LE(20, 4);
|
|
cd.writeUInt16LE(20, 6);
|
|
cd.writeUInt16LE(0, 8);
|
|
cd.writeUInt16LE(8, 10); // DEFLATE
|
|
cd.writeUInt32LE(0, 16);
|
|
cd.writeUInt32LE(compressed.length, 20);
|
|
cd.writeUInt32LE(original.length, 24);
|
|
cd.writeUInt16LE(nameBuf.length, 28);
|
|
cd.writeUInt32LE(0, 38);
|
|
cd.writeUInt32LE(0, 42); // LFH at offset 0
|
|
const eocd = Buffer.alloc(22);
|
|
eocd.writeUInt32LE(0x06054b50, 0);
|
|
eocd.writeUInt16LE(1, 8);
|
|
eocd.writeUInt16LE(1, 10);
|
|
eocd.writeUInt32LE(46 + nameBuf.length, 12);
|
|
eocd.writeUInt32LE(30 + nameBuf.length + compressed.length, 16);
|
|
const buf = Buffer.concat([lfh, nameBuf, compressed, cd, nameBuf, eocd]);
|
|
|
|
await withTempDir(async (dir) => {
|
|
const r = await extractToDir(buf, dir);
|
|
assert.equal(r.entries, 1);
|
|
const out = await readFile(join(dir, 'extension/big.txt'));
|
|
assert.equal(out.length, original.length);
|
|
assert.equal(out.toString('utf8'), original.toString('utf8'));
|
|
});
|
|
});
|
|
|
|
it('lists entries without extracting', () => {
|
|
const buf = buildZip([{ name: 'a.txt', data: 'x' }, { name: 'b.txt', data: 'yy' }]);
|
|
const out = listEntries(buf);
|
|
assert.equal(out.length, 2);
|
|
assert.equal(out[0].name, 'a.txt');
|
|
assert.equal(out[1].uncompSize, 2);
|
|
});
|
|
});
|
|
|
|
describe('extractToDir — adversarial', () => {
|
|
it('rejects zip-slip via parent traversal', async () => {
|
|
const buf = buildZip([{ name: '../escape.txt', data: 'pwned' }]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(() => extractToDir(buf, dir), /traversal/);
|
|
const items = await readdir(dir);
|
|
assert.equal(items.length, 0, 'no files should have been written');
|
|
});
|
|
});
|
|
|
|
it('rejects zip-slip via absolute POSIX path', async () => {
|
|
const buf = buildZip([{ name: '/tmp/leak.txt', data: 'pwned' }]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(() => extractToDir(buf, dir), /absolute|traversal/);
|
|
});
|
|
});
|
|
|
|
it('rejects symlink entries', async () => {
|
|
const buf = buildZip([{
|
|
name: 'evil-link',
|
|
data: '../../etc/passwd',
|
|
versionMadeBy: (3 << 8) | 20,
|
|
externalAttr: unixModeAttr(MODE_SYMLINK),
|
|
}]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(() => extractToDir(buf, dir), /symlink/);
|
|
});
|
|
});
|
|
|
|
it('rejects entries beyond maxEntries cap', async () => {
|
|
const entries = Array.from({ length: 5 }, (_, i) => ({ name: `f${i}.txt`, data: 'x' }));
|
|
const buf = buildZip(entries);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(
|
|
() => extractToDir(buf, dir, { caps: { ...DEFAULT_CAPS, maxEntries: 3 } }),
|
|
/too many/,
|
|
);
|
|
});
|
|
});
|
|
|
|
it('rejects zip-bomb: STORED entry exceeding maxUncompressedBytes', async () => {
|
|
const buf = buildZip([{
|
|
name: 'bomb.txt',
|
|
data: Buffer.alloc(2000),
|
|
declaredUncompSize: 2000,
|
|
}]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(
|
|
() => extractToDir(buf, dir, { caps: { ...DEFAULT_CAPS, maxUncompressedBytes: 1000 } }),
|
|
/maxUncompressedBytes/,
|
|
);
|
|
});
|
|
});
|
|
|
|
it('rejects zip-bomb: deflate expansion ratio exceeds cap', async () => {
|
|
// Build an entry with high uncompressed and tiny compressed claim.
|
|
const original = Buffer.alloc(20_000); // 20KB of zeros — compresses tiny
|
|
const compressed = deflateRawSync(original);
|
|
const nameBuf = Buffer.from('bomb.bin', 'utf8');
|
|
const lfh = Buffer.alloc(30);
|
|
lfh.writeUInt32LE(0x04034b50, 0);
|
|
lfh.writeUInt16LE(20, 4);
|
|
lfh.writeUInt16LE(8, 8);
|
|
lfh.writeUInt32LE(compressed.length, 18);
|
|
lfh.writeUInt32LE(original.length, 22);
|
|
lfh.writeUInt16LE(nameBuf.length, 26);
|
|
const cd = Buffer.alloc(46);
|
|
cd.writeUInt32LE(0x02014b50, 0);
|
|
cd.writeUInt16LE(20, 4); cd.writeUInt16LE(20, 6); cd.writeUInt16LE(8, 10);
|
|
cd.writeUInt32LE(compressed.length, 20);
|
|
cd.writeUInt32LE(original.length, 24);
|
|
cd.writeUInt16LE(nameBuf.length, 28);
|
|
cd.writeUInt32LE(0, 42);
|
|
const eocd = Buffer.alloc(22);
|
|
eocd.writeUInt32LE(0x06054b50, 0);
|
|
eocd.writeUInt16LE(1, 8); eocd.writeUInt16LE(1, 10);
|
|
eocd.writeUInt32LE(46 + nameBuf.length, 12);
|
|
eocd.writeUInt32LE(30 + nameBuf.length + compressed.length, 16);
|
|
const buf = Buffer.concat([lfh, nameBuf, compressed, cd, nameBuf, eocd]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(
|
|
() => extractToDir(buf, dir, { caps: { ...DEFAULT_CAPS, maxExpansionRatio: 5 } }),
|
|
/expansion ratio|exceeds/,
|
|
);
|
|
});
|
|
});
|
|
|
|
it('rejects unknown compression methods', async () => {
|
|
// Manually craft an entry with method=6 (Implode, unsupported)
|
|
const nameBuf = Buffer.from('weird.bin', 'utf8');
|
|
const data = Buffer.from('x');
|
|
const lfh = Buffer.alloc(30);
|
|
lfh.writeUInt32LE(0x04034b50, 0);
|
|
lfh.writeUInt16LE(6, 8); // method=Implode
|
|
lfh.writeUInt32LE(data.length, 18);
|
|
lfh.writeUInt32LE(data.length, 22);
|
|
lfh.writeUInt16LE(nameBuf.length, 26);
|
|
const cd = Buffer.alloc(46);
|
|
cd.writeUInt32LE(0x02014b50, 0);
|
|
cd.writeUInt16LE(6, 10);
|
|
cd.writeUInt32LE(data.length, 20);
|
|
cd.writeUInt32LE(data.length, 24);
|
|
cd.writeUInt16LE(nameBuf.length, 28);
|
|
const eocd = Buffer.alloc(22);
|
|
eocd.writeUInt32LE(0x06054b50, 0);
|
|
eocd.writeUInt16LE(1, 8); eocd.writeUInt16LE(1, 10);
|
|
eocd.writeUInt32LE(46 + nameBuf.length, 12);
|
|
eocd.writeUInt32LE(30 + nameBuf.length + data.length, 16);
|
|
const buf = Buffer.concat([lfh, nameBuf, data, cd, nameBuf, eocd]);
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(() => extractToDir(buf, dir), /unsupported compression/);
|
|
});
|
|
});
|
|
|
|
it('throws ZipError when EOCD is missing', async () => {
|
|
const garbage = Buffer.from('not a zip file at all');
|
|
await withTempDir(async (dir) => {
|
|
await assert.rejects(() => extractToDir(garbage, dir), /EOCD/);
|
|
});
|
|
});
|
|
});
|