From 4fab19741973b8216f204a489d26817fc274bfa5 Mon Sep 17 00:00:00 2001 From: Ayush Aher Date: Fri, 7 Jun 2024 09:44:37 +0530 Subject: [PATCH] #261 Moved from fstream to fs-extra package, as fstream is no longer supported and its dependencies contains vulnerability --- lib/Open/directory.js | 320 ++++++++++++++++++++++++------------------ lib/extract.js | 53 +++---- package.json | 6 +- 3 files changed, 217 insertions(+), 162 deletions(-) diff --git a/lib/Open/directory.js b/lib/Open/directory.js index 88ea27d..1530aef 100644 --- a/lib/Open/directory.js +++ b/lib/Open/directory.js @@ -1,12 +1,12 @@ -const PullStream = require('../PullStream'); -const unzip = require('./unzip'); -const BufferStream = require('../BufferStream'); -const parseExtraField = require('../parseExtraField'); -const path = require('path'); -const Writer = require('fstream').Writer; -const parseDateTime = require('../parseDateTime'); -const parseBuffer = require('../parseBuffer'); -const Bluebird = require('bluebird'); +const PullStream = require("../PullStream"); +const unzip = require("./unzip"); +const BufferStream = require("../BufferStream"); +const parseExtraField = require("../parseExtraField"); +const path = require("path"); +const fs = require("fs-extra"); +const parseDateTime = require("../parseDateTime"); +const parseBuffer = require("../parseBuffer"); +const Bluebird = require("bluebird"); const signature = Buffer.alloc(4); signature.writeUInt32LE(0x06054b50, 0); @@ -14,24 +14,31 @@ signature.writeUInt32LE(0x06054b50, 0); function getCrxHeader(source) { const sourceStream = source.stream(0).pipe(PullStream()); - return sourceStream.pull(4).then(function(data) { + return sourceStream.pull(4).then(function (data) { const signature = data.readUInt32LE(0); if (signature === 0x34327243) { let crxHeader; - return sourceStream.pull(12).then(function(data) { - crxHeader = parseBuffer.parse(data, [ - ['version', 4], - ['pubKeyLength', 4], - ['signatureLength', 4], - ]); - }).then(function() { - return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength); - }).then(function(data) { - crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength); - crxHeader.signature = data.slice(crxHeader.pubKeyLength); - crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength; - return crxHeader; - }); + return sourceStream + .pull(12) + .then(function (data) { + crxHeader = parseBuffer.parse(data, [ + ["version", 4], + ["pubKeyLength", 4], + ["signatureLength", 4], + ]); + }) + .then(function () { + return sourceStream.pull( + crxHeader.pubKeyLength + crxHeader.signatureLength + ); + }) + .then(function (data) { + crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength); + crxHeader.signature = data.slice(crxHeader.pubKeyLength); + crxHeader.size = + 16 + crxHeader.pubKeyLength + crxHeader.signatureLength; + return crxHeader; + }); } }); } @@ -39,14 +46,17 @@ function getCrxHeader(source) { // Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT function getZip64CentralDirectory(source, zip64CDL) { const d64loc = parseBuffer.parse(zip64CDL, [ - ['signature', 4], - ['diskNumber', 4], - ['offsetToStartOfCentralDirectory', 8], - ['numberOfDisks', 4], + ["signature", 4], + ["diskNumber", 4], + ["offsetToStartOfCentralDirectory", 8], + ["numberOfDisks", 4], ]); if (d64loc.signature != 0x07064b50) { - throw new Error('invalid zip64 end of central dir locator signature (0x07064b50): 0x' + d64loc.signature.toString(16)); + throw new Error( + "invalid zip64 end of central dir locator signature (0x07064b50): 0x" + + d64loc.signature.toString(16) + ); } const dir64 = PullStream(); @@ -56,22 +66,25 @@ function getZip64CentralDirectory(source, zip64CDL) { } // Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT -function parseZip64DirRecord (dir64record) { +function parseZip64DirRecord(dir64record) { const vars = parseBuffer.parse(dir64record, [ - ['signature', 4], - ['sizeOfCentralDirectory', 8], - ['version', 2], - ['versionsNeededToExtract', 2], - ['diskNumber', 4], - ['diskStart', 4], - ['numberOfRecordsOnDisk', 8], - ['numberOfRecords', 8], - ['sizeOfCentralDirectory', 8], - ['offsetToStartOfCentralDirectory', 8], + ["signature", 4], + ["sizeOfCentralDirectory", 8], + ["version", 2], + ["versionsNeededToExtract", 2], + ["diskNumber", 4], + ["diskStart", 4], + ["numberOfRecordsOnDisk", 8], + ["numberOfRecords", 8], + ["sizeOfCentralDirectory", 8], + ["offsetToStartOfCentralDirectory", 8], ]); if (vars.signature != 0x06064b50) { - throw new Error('invalid zip64 end of central dir locator signature (0x06064b50): 0x0' + vars.signature.toString(16)); + throw new Error( + "invalid zip64 end of central dir locator signature (0x06064b50): 0x0" + + vars.signature.toString(16) + ); } return vars; @@ -81,58 +94,67 @@ module.exports = function centralDirectory(source, options) { const endDir = PullStream(); const records = PullStream(); const tailSize = (options && options.tailSize) || 80; - let sourceSize, - crxHeader, - startOffset, - vars; + let sourceSize, crxHeader, startOffset, vars; - if (options && options.crx) - crxHeader = getCrxHeader(source); + if (options && options.crx) crxHeader = getCrxHeader(source); - return source.size() - .then(function(size) { + return source + .size() + .then(function (size) { sourceSize = size; - source.stream(Math.max(0, size-tailSize)) - .on('error', function (error) { endDir.emit('error', error); }) + source + .stream(Math.max(0, size - tailSize)) + .on("error", function (error) { + endDir.emit("error", error); + }) .pipe(endDir); return endDir.pull(signature); }) - .then(function() { - return Bluebird.props({directory: endDir.pull(22), crxHeader: crxHeader}); + .then(function () { + return Bluebird.props({ + directory: endDir.pull(22), + crxHeader: crxHeader, + }); }) - .then(function(d) { + .then(function (d) { const data = d.directory; - startOffset = d.crxHeader && d.crxHeader.size || 0; + startOffset = (d.crxHeader && d.crxHeader.size) || 0; vars = parseBuffer.parse(data, [ - ['signature', 4], - ['diskNumber', 2], - ['diskStart', 2], - ['numberOfRecordsOnDisk', 2], - ['numberOfRecords', 2], - ['sizeOfCentralDirectory', 4], - ['offsetToStartOfCentralDirectory', 4], - ['commentLength', 2], + ["signature", 4], + ["diskNumber", 2], + ["diskStart", 2], + ["numberOfRecordsOnDisk", 2], + ["numberOfRecords", 2], + ["sizeOfCentralDirectory", 4], + ["offsetToStartOfCentralDirectory", 4], + ["commentLength", 2], ]); // Is this zip file using zip64 format? Use same check as Go: // https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503 // For zip64 files, need to find zip64 central directory locator header to extract // relative offset for zip64 central directory record. - if (vars.numberOfRecords == 0xffff|| vars.numberOfRecords == 0xffff || - vars.offsetToStartOfCentralDirectory == 0xffffffff) { - + if ( + vars.numberOfRecords == 0xffff || + vars.numberOfRecords == 0xffff || + vars.offsetToStartOfCentralDirectory == 0xffffffff + ) { // Offset to zip64 CDL is 20 bytes before normal CDR const zip64CDLSize = 20; - const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize); + const zip64CDLOffset = + sourceSize - (tailSize - endDir.match + zip64CDLSize); const zip64CDLStream = PullStream(); source.stream(zip64CDLOffset).pipe(zip64CDLStream); - return zip64CDLStream.pull(zip64CDLSize) - .then(function (d) { return getZip64CentralDirectory(source, d); }) + return zip64CDLStream + .pull(zip64CDLSize) + .then(function (d) { + return getZip64CentralDirectory(source, d); + }) .then(function (dir64record) { vars = parseZip64DirRecord(dir64record); }); @@ -140,91 +162,117 @@ module.exports = function centralDirectory(source, options) { vars.offsetToStartOfCentralDirectory += startOffset; } }) - .then(function() { - if (vars.commentLength) return endDir.pull(vars.commentLength).then(function(comment) { - vars.comment = comment.toString('utf8'); - }); + .then(function () { + if (vars.commentLength) + return endDir.pull(vars.commentLength).then(function (comment) { + vars.comment = comment.toString("utf8"); + }); }) - .then(function() { + .then(function () { source.stream(vars.offsetToStartOfCentralDirectory).pipe(records); - vars.extract = function(opts) { - if (!opts || !opts.path) throw new Error('PATH_MISSING'); + vars.extract = function (opts) { + if (!opts || !opts.path) throw new Error("PATH_MISSING"); // make sure path is normalized before using it opts.path = path.resolve(path.normalize(opts.path)); - return vars.files.then(function(files) { - return Bluebird.map(files, function(entry) { - if (entry.type == 'Directory') return; - - // to avoid zip slip (writing outside of the destination), we resolve - // the target path, and make sure it's nested in the intended - // destination, or not extract it otherwise. - const extractPath = path.join(opts.path, entry.path); - if (extractPath.indexOf(opts.path) != 0) { - return; - } - const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath }); - - return new Promise(function(resolve, reject) { - entry.stream(opts.password) - .on('error', reject) - .pipe(writer) - .on('close', resolve) - .on('error', reject); - }); - }, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 }); + return vars.files.then(function (files) { + return Bluebird.map( + files, + function (entry) { + if (entry.type == "Directory") return; + + // to avoid zip slip (writing outside of the destination), we resolve + // the target path, and make sure it's nested in the intended + // destination, or not extract it otherwise. + const extractPath = path.join(opts.path, entry.path); + if (extractPath.indexOf(opts.path) != 0) { + return; + } + + return fs.ensureFile(extractPath).then(() => { + const writer = opts.getWriter + ? opts.getWriter({ path: extractPath }) + : fs.createWriteStream(extractPath); + + return new Promise(function (resolve, reject) { + entry + .stream(opts.password) + .on("error", reject) + .pipe(writer) + .on("close", resolve) + .on("error", reject); + }); + }); + }, + { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 } + ); }); }; - vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function() { - return records.pull(46).then(function(data) { + vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function () { + return records.pull(46).then(function (data) { const vars = parseBuffer.parse(data, [ - ['signature', 4], - ['versionMadeBy', 2], - ['versionsNeededToExtract', 2], - ['flags', 2], - ['compressionMethod', 2], - ['lastModifiedTime', 2], - ['lastModifiedDate', 2], - ['crc32', 4], - ['compressedSize', 4], - ['uncompressedSize', 4], - ['fileNameLength', 2], - ['extraFieldLength', 2], - ['fileCommentLength', 2], - ['diskNumber', 2], - ['internalFileAttributes', 2], - ['externalFileAttributes', 4], - ['offsetToLocalFileHeader', 4], + ["signature", 4], + ["versionMadeBy", 2], + ["versionsNeededToExtract", 2], + ["flags", 2], + ["compressionMethod", 2], + ["lastModifiedTime", 2], + ["lastModifiedDate", 2], + ["crc32", 4], + ["compressedSize", 4], + ["uncompressedSize", 4], + ["fileNameLength", 2], + ["extraFieldLength", 2], + ["fileCommentLength", 2], + ["diskNumber", 2], + ["internalFileAttributes", 2], + ["externalFileAttributes", 4], + ["offsetToLocalFileHeader", 4], ]); vars.offsetToLocalFileHeader += startOffset; - vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime); + vars.lastModifiedDateTime = parseDateTime( + vars.lastModifiedDate, + vars.lastModifiedTime + ); - return records.pull(vars.fileNameLength).then(function(fileNameBuffer) { - vars.pathBuffer = fileNameBuffer; - vars.path = fileNameBuffer.toString('utf8'); - vars.isUnicode = (vars.flags & 0x800) != 0; - return records.pull(vars.extraFieldLength); - }) - .then(function(extraField) { + return records + .pull(vars.fileNameLength) + .then(function (fileNameBuffer) { + vars.pathBuffer = fileNameBuffer; + vars.path = fileNameBuffer.toString("utf8"); + vars.isUnicode = (vars.flags & 0x800) != 0; + return records.pull(vars.extraFieldLength); + }) + .then(function (extraField) { vars.extra = parseExtraField(extraField, vars); return records.pull(vars.fileCommentLength); }) - .then(function(comment) { + .then(function (comment) { vars.comment = comment; - vars.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path)) ? 'Directory' : 'File'; - const padding = options && options.padding || 1000; - vars.stream = function(_password) { - const totalSize = 30 - + padding // add an extra buffer - + (vars.extraFieldLength || 0) - + (vars.fileNameLength || 0) - + vars.compressedSize; - - return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize); + vars.type = + vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path) + ? "Directory" + : "File"; + const padding = (options && options.padding) || 1000; + vars.stream = function (_password) { + const totalSize = + 30 + + padding + // add an extra buffer + (vars.extraFieldLength || 0) + + (vars.fileNameLength || 0) + + vars.compressedSize; + + return unzip( + source, + vars.offsetToLocalFileHeader, + _password, + vars, + totalSize + ); }; - vars.buffer = function(_password) { + vars.buffer = function (_password) { return BufferStream(vars.stream(_password)); }; return vars; diff --git a/lib/extract.js b/lib/extract.js index 31d725a..0c09def 100644 --- a/lib/extract.js +++ b/lib/extract.js @@ -1,54 +1,57 @@ module.exports = Extract; -const Parse = require('./parse'); -const Writer = require('fstream').Writer; -const path = require('path'); -const stream = require('stream'); -const duplexer2 = require('duplexer2'); +const Parse = require("./parse"); +const fs = require("fs-extra"); +const path = require("path"); +const stream = require("stream"); +const duplexer2 = require("duplexer2"); -function Extract (opts) { +function Extract(opts) { // make sure path is normalized before using it opts.path = path.resolve(path.normalize(opts.path)); const parser = new Parse(opts); - const outStream = new stream.Writable({objectMode: true}); - outStream._write = function(entry, encoding, cb) { - - if (entry.type == 'Directory') return cb(); + const outStream = new stream.Writable({ objectMode: true }); + outStream._write = async function (entry, encoding, cb) { + if (entry.type == "Directory") return cb(); // to avoid zip slip (writing outside of the destination), we resolve // the target path, and make sure it's nested in the intended // destination, or not extract it otherwise. // NOTE: Need to normalize to forward slashes for UNIX OS's to properly // ignore the zip slipped file entirely - const extractPath = path.join(opts.path, entry.path.replace(/\\/g, '/')); + const extractPath = path.join(opts.path, entry.path.replace(/\\/g, "/")); if (extractPath.indexOf(opts.path) != 0) { return cb(); } - const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath }); + try { + // Ensure the file and its parent directories exist + await fs.ensureFile(extractPath); + const writer = opts.getWriter + ? opts.getWriter({ path: extractPath }) + : fs.createWriteStream(extractPath); - entry.pipe(writer) - .on('error', cb) - .on('close', cb); + entry.pipe(writer).on("error", cb).on("close", cb); + } catch (err) { + cb(err); + } }; const extract = duplexer2(parser, outStream); - parser.once('crx-header', function(crxHeader) { + parser.once("crx-header", function (crxHeader) { extract.crxHeader = crxHeader; }); - parser - .pipe(outStream) - .on('finish', function() { - extract.emit('close'); - }); + parser.pipe(outStream).on("finish", function () { + extract.emit("close"); + }); - extract.promise = function() { - return new Promise(function(resolve, reject) { - extract.on('close', resolve); - extract.on('error', reject); + extract.promise = function () { + return new Promise(function (resolve, reject) { + extract.on("close", resolve); + extract.on("error", reject); }); }; diff --git a/package.json b/package.json index 071a492..c31df99 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,10 @@ { "name": "Joe Ferner", "email": "joe.ferner@nearinfinity.com" + }, + { + "name": "Ayush Aher", + "email": "ayushaher118@gmail.com" } ], "repository": { @@ -26,7 +30,7 @@ "big-integer": "^1.6.17", "bluebird": "~3.4.1", "duplexer2": "~0.1.4", - "fstream": "^1.0.12", + "fs-extra": "^11.2.0", "graceful-fs": "^4.2.2" }, "devDependencies": {