diff --git a/packages/zip/lib/extract.js b/packages/zip/lib/extract.js index 4cc6d8b80..209ec11e0 100644 --- a/packages/zip/lib/extract.js +++ b/packages/zip/lib/extract.js @@ -1,3 +1,4 @@ +const { Transform } = require('stream'); const errors = require('@tryghost/errors'); const defaultOptions = {}; @@ -70,12 +71,8 @@ function getEntryUncompressedSize(entry, limits) { return entry.uncompressedSize; } -function throwOnEntryTooLarge(entry, observedBytes, limitBytes) { - if (observedBytes <= limitBytes) { - return; - } - - throw new errors.UnsupportedMediaTypeError({ +function entryTooLargeError(entry, observedBytes, limitBytes) { + return new errors.UnsupportedMediaTypeError({ message: 'Zip entry exceeds maximum uncompressed size.', context: 'The zip contains an entry that exceeds the maximum uncompressed size.', code: 'ENTRY_TOO_LARGE', @@ -87,6 +84,12 @@ function throwOnEntryTooLarge(entry, observedBytes, limitBytes) { }); } +function throwOnEntryTooLarge(entry, observedBytes, limitBytes) { + if (observedBytes > limitBytes) { + throw entryTooLargeError(entry, observedBytes, limitBytes); + } +} + function throwOnTotalTooLarge(entry, observedBytes, limitBytes, entriesProcessed) { if (observedBytes <= limitBytes) { return; @@ -185,6 +188,9 @@ module.exports = async (zipToExtract, destination, options) => { throwOnSymlinks(entry); throwOnLargeFilenames(entry); + + installStreamingCounter(zipfile, limits); + if (originalOnEntry) { originalOnEntry(entry, zipfile); } @@ -194,3 +200,49 @@ module.exports = async (zipToExtract, destination, options) => { return { path: destination }; }; + +// Enforce `limits.perEntryUncompressedBytes` against the actual decompressed +// bytes streamed for each entry, not just the central-directory metadata — +// otherwise an archive whose header lies about its uncompressed size slips +// past the pre-flight check above and (worse) hangs `extract()` indefinitely: +// yauzl's built-in `AssertByteCountStream` overrides its own `destroy` so the +// mid-stream error never surfaces to extract-zip's `pipeline()`. +// +// We turn yauzl's broken counter off (`validateEntrySizes = false`) and wrap +// the read stream with our own counter, whose errors propagate cleanly. +function installStreamingCounter(zipfile, limits) { + if (zipfile.__streamingCounterInstalled || typeof zipfile.openReadStream !== 'function') { + return; + } + zipfile.__streamingCounterInstalled = true; + zipfile.validateEntrySizes = false; + + const originalOpenReadStream = zipfile.openReadStream.bind(zipfile); + zipfile.openReadStream = function (entry, optsOrCb, maybeCb) { + const cb = typeof optsOrCb === 'function' ? optsOrCb : maybeCb; + const readOpts = typeof optsOrCb === 'function' ? {} : optsOrCb; + originalOpenReadStream(entry, readOpts, (err, readStream) => { + if (err) { + return cb(err); + } + let observedBytes = 0; + const counter = new Transform({ + transform(chunk, _encoding, transformCb) { + observedBytes += chunk.length; + if (observedBytes > limits.perEntryUncompressedBytes) { + return transformCb( + entryTooLargeError( + entry, + observedBytes, + limits.perEntryUncompressedBytes, + ), + ); + } + transformCb(null, chunk); + }, + }); + readStream.on('error', (readErr) => counter.destroy(readErr)); + cb(null, readStream.pipe(counter)); + }); + }; +} diff --git a/packages/zip/test/zip.test.js b/packages/zip/test/zip.test.js index 563e1974b..cd861a764 100644 --- a/packages/zip/test/zip.test.js +++ b/packages/zip/test/zip.test.js @@ -62,6 +62,18 @@ function createZipWithEntries(zipPath, entries) { }); } +// Rewrite every central-directory header's uncompressedSize field to simulate +// a zip whose declared metadata understates its real payload. +function forgeCentralUncompressedSize(zipPath, fake) { + const buf = fs.readFileSync(zipPath); + for (let i = 0; i < buf.length - 4; i++) { + if (buf.readUInt32LE(i) === 0x02014b50) { + buf.writeUInt32LE(fake, i + 24); + } + } + fs.writeFileSync(zipPath, buf); +} + describe('Compress and Extract should be opposite functions', function () { let symlinkPath, themeFolder, zipDestination, unzipDestination; @@ -330,6 +342,29 @@ describe('Extract zip', function () { } }); + it('rejects a zip whose central directory understates an entry size (streaming defence)', async function () { + // The metadata pre-flight check sees a tiny declared size and lets it + // through; the streaming counter sees the real 1 MB payload and rejects. + await createZipWithEntries(zipDestination, [ + { name: 'lying.txt', content: Buffer.alloc(1024 * 1024, 'a') }, + ]); + forgeCentralUncompressedSize(zipDestination, 5); + + await assert.rejects( + () => + extract(zipDestination, unzipDestination, { + limits: { perEntryUncompressedBytes: 100 }, + }), + (err) => { + assert.equal(err.code, 'ENTRY_TOO_LARGE'); + assert.equal(err.errorDetails.entryName, 'lying.txt'); + assert.equal(err.errorDetails.limitBytes, 100); + assert.equal(err.errorDetails.observedBytes > 100, true); + return true; + }, + ); + }); + it('throws when an entry exceeds the per-entry uncompressed size limit', async function () { await createZipWithEntries(zipDestination, [{ name: 'big-file.txt', content: '12345' }]);