Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 58 additions & 6 deletions packages/zip/lib/extract.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
const { Transform } = require('stream');
const errors = require('@tryghost/errors');

const defaultOptions = {};
Expand Down Expand Up @@ -70,12 +71,8 @@ function getEntryUncompressedSize(entry, limits) {
return entry.uncompressedSize;
}

function throwOnEntryTooLarge(entry, observedBytes, limitBytes) {
if (observedBytes <= limitBytes) {
return;
}

throw new errors.UnsupportedMediaTypeError({
function entryTooLargeError(entry, observedBytes, limitBytes) {
return new errors.UnsupportedMediaTypeError({
message: 'Zip entry exceeds maximum uncompressed size.',
context: 'The zip contains an entry that exceeds the maximum uncompressed size.',
code: 'ENTRY_TOO_LARGE',
Expand All @@ -87,6 +84,12 @@ function throwOnEntryTooLarge(entry, observedBytes, limitBytes) {
});
}

function throwOnEntryTooLarge(entry, observedBytes, limitBytes) {
if (observedBytes > limitBytes) {
throw entryTooLargeError(entry, observedBytes, limitBytes);
}
}

function throwOnTotalTooLarge(entry, observedBytes, limitBytes, entriesProcessed) {
if (observedBytes <= limitBytes) {
return;
Expand Down Expand Up @@ -185,6 +188,9 @@ module.exports = async (zipToExtract, destination, options) => {

throwOnSymlinks(entry);
throwOnLargeFilenames(entry);

installStreamingCounter(zipfile, limits);

if (originalOnEntry) {
originalOnEntry(entry, zipfile);
}
Expand All @@ -194,3 +200,49 @@ module.exports = async (zipToExtract, destination, options) => {

return { path: destination };
};

// Enforce `limits.perEntryUncompressedBytes` against the actual decompressed
// bytes streamed for each entry, not just the central-directory metadata —
// otherwise an archive whose header lies about its uncompressed size slips
// past the pre-flight check above and (worse) hangs `extract()` indefinitely:
// yauzl's built-in `AssertByteCountStream` overrides its own `destroy` so the
// mid-stream error never surfaces to extract-zip's `pipeline()`.
//
// We turn yauzl's broken counter off (`validateEntrySizes = false`) and wrap
// the read stream with our own counter, whose errors propagate cleanly.
function installStreamingCounter(zipfile, limits) {
if (zipfile.__streamingCounterInstalled || typeof zipfile.openReadStream !== 'function') {
return;
}
zipfile.__streamingCounterInstalled = true;
zipfile.validateEntrySizes = false;

const originalOpenReadStream = zipfile.openReadStream.bind(zipfile);
zipfile.openReadStream = function (entry, optsOrCb, maybeCb) {
const cb = typeof optsOrCb === 'function' ? optsOrCb : maybeCb;
const readOpts = typeof optsOrCb === 'function' ? {} : optsOrCb;
originalOpenReadStream(entry, readOpts, (err, readStream) => {
if (err) {
return cb(err);
}
let observedBytes = 0;
const counter = new Transform({
transform(chunk, _encoding, transformCb) {
observedBytes += chunk.length;
if (observedBytes > limits.perEntryUncompressedBytes) {
return transformCb(
entryTooLargeError(
entry,
observedBytes,
limits.perEntryUncompressedBytes,
),
);
}
transformCb(null, chunk);
},
});
readStream.on('error', (readErr) => counter.destroy(readErr));
cb(null, readStream.pipe(counter));
});
};
}
35 changes: 35 additions & 0 deletions packages/zip/test/zip.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,18 @@ function createZipWithEntries(zipPath, entries) {
});
}

// Rewrite every central-directory header's uncompressedSize field to simulate
// a zip whose declared metadata understates its real payload.
function forgeCentralUncompressedSize(zipPath, fake) {
const buf = fs.readFileSync(zipPath);
for (let i = 0; i < buf.length - 4; i++) {
if (buf.readUInt32LE(i) === 0x02014b50) {
buf.writeUInt32LE(fake, i + 24);
}
}
fs.writeFileSync(zipPath, buf);
}

describe('Compress and Extract should be opposite functions', function () {
let symlinkPath, themeFolder, zipDestination, unzipDestination;

Expand Down Expand Up @@ -330,6 +342,29 @@ describe('Extract zip', function () {
}
});

it('rejects a zip whose central directory understates an entry size (streaming defence)', async function () {
// The metadata pre-flight check sees a tiny declared size and lets it
// through; the streaming counter sees the real 1 MB payload and rejects.
await createZipWithEntries(zipDestination, [
{ name: 'lying.txt', content: Buffer.alloc(1024 * 1024, 'a') },
]);
forgeCentralUncompressedSize(zipDestination, 5);

await assert.rejects(
() =>
extract(zipDestination, unzipDestination, {
limits: { perEntryUncompressedBytes: 100 },
}),
(err) => {
assert.equal(err.code, 'ENTRY_TOO_LARGE');
assert.equal(err.errorDetails.entryName, 'lying.txt');
assert.equal(err.errorDetails.limitBytes, 100);
assert.equal(err.errorDetails.observedBytes > 100, true);
return true;
},
);
});

it('throws when an entry exceeds the per-entry uncompressed size limit', async function () {
await createZipWithEntries(zipDestination, [{ name: 'big-file.txt', content: '12345' }]);

Expand Down