Skip to content

Commit

Permalink
Merge pull request #119 from ZJONSSON/crx
Browse files Browse the repository at this point in the history
Crx
  • Loading branch information
ZJONSSON authored Jun 1, 2019
2 parents 4b7c39c + 4e02476 commit 26edb18
Show file tree
Hide file tree
Showing 8 changed files with 202 additions and 3 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ There are no added compiled dependencies - inflation is handled by node.js's bui

Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)

Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.

## Installation

Expand Down Expand Up @@ -194,7 +195,7 @@ Previous methods rely on the entire zipfile being received through a pipe. The
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike `adm-zip` the Open methods will never read the entire zipfile into buffer.

The last argument is optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size.
The last argument is optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size. Additionally you can supply option `crx: true` which will check for a crx header and parse the file accordingly by shifting all file offsets by the length of the crx header.

### Open.file([path], [options])
Returns a Promise to the central directory information with methods to extract individual files. `start` and `end` options are used to avoid reading the whole file.
Expand Down
40 changes: 38 additions & 2 deletions lib/Open/directory.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,53 @@ var Buffer = require('../Buffer');
var signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50,0);

function getCrxHeader(source) {
var sourceStream = source.stream(0).pipe(PullStream());

return sourceStream.pull(4).then(function(data) {
var signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
var crxHeader;
return sourceStream.pull(12).then(function(data) {
crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
}).then(function() {
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
}).then(function(data) {
crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
});
}
});
}

module.exports = function centralDirectory(source, options) {
var endDir = PullStream(),
records = PullStream(),
tailSize = (options && options.tailSize) || 80,
crxHeader,
vars;

if (options && options.crx)
crxHeader = getCrxHeader(source);

return source.size()
.then(function(size) {
source.stream(Math.max(0,size-tailSize)).pipe(endDir);
return endDir.pull(signature);
})
.then(function() {
return endDir.pull(22);
return Promise.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(data) {
.then(function(d) {
var data = d.directory;
var startOffset = d.crxHeader && d.crxHeader.size || 0;

vars = binary.parse(data)
.word32lu('signature')
.word16lu('diskNumber')
Expand All @@ -35,6 +67,8 @@ module.exports = function centralDirectory(source, options) {
.word16lu('commentLength')
.vars;

vars.offsetToStartOfCentralDirectory += startOffset;

source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);

vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() {
Expand All @@ -59,6 +93,8 @@ module.exports = function centralDirectory(source, options) {
.word32lu('offsetToLocalFileHeader')
.vars;

vars.offsetToLocalFileHeader += startOffset;

return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
Expand Down
3 changes: 3 additions & 0 deletions lib/extract.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ function Extract (opts) {
};

var extract = duplexer2(parser,outStream);
parser.once('crx-header', function(crxHeader) {
extract.crxHeader = crxHeader;
});

parser
.pipe(outStream)
Expand Down
22 changes: 22 additions & 0 deletions lib/parse.js
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ Parse.prototype._readRecord = function () {

var signature = data.readUInt32LE(0);

if (signature === 0x34327243) {
return self._readCrxHeader();
}
if (signature === 0x04034b50) {
return self._readFile();
}
Expand All @@ -63,6 +66,23 @@ Parse.prototype._readRecord = function () {
});
};

Parse.prototype._readCrxHeader = function() {
var self = this;
return self.pull(12).then(function(data) {
self.crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
}).then(function(data) {
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
self.emit('crx-header',self.crxHeader);
return self._readRecord();
});
};

Parse.prototype._readFile = function () {
var self = this;
return self.pull(26).then(function(data) {
Expand All @@ -79,6 +99,8 @@ Parse.prototype._readFile = function () {
.word16lu('extraFieldLength')
.vars;

if (self.crxHeader) vars.crxHeader = self.crxHeader;

return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
var fileName = fileNameBuffer.toString('utf8');
var entry = Stream.PassThrough();
Expand Down
78 changes: 78 additions & 0 deletions test/compressed-crx.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
'use strict';

var test = require('tap').test;
var fs = require('fs');
var path = require('path');
var temp = require('temp');
var dirdiff = require('dirdiff');
var unzip = require('../');

test('parse/extract crx archive', function (t) {
var archive = path.join(__dirname, '../testData/compressed-standard-crx/archive.crx');

temp.mkdir('node-unzip-', function (err, dirPath) {
if (err) {
throw err;
}
var unzipExtractor = unzip.Extract({ path: dirPath });
unzipExtractor.on('error', function(err) {
throw err;
});
unzipExtractor.on('close', testExtractionResults);

fs.createReadStream(archive).pipe(unzipExtractor);

function testExtractionResults() {
t.same(unzipExtractor.crxHeader.version,2);
dirdiff(path.join(__dirname, '../testData/compressed-standard/inflated'), dirPath, {
fileContents: true
}, function (err, diffs) {
if (err) {
throw err;
}
t.equal(diffs.length, 0, 'extracted directory contents');
t.end();
});
}
});
});

test('open methods', function(t) {
var archive = path.join(__dirname, '../testData/compressed-standard-crx/archive.crx');
var buffer = fs.readFileSync(archive);
var request = require('request');
var AWS = require('aws-sdk');
var s3 = new AWS.S3({region: 'us-east-1'});

// We have to modify the `getObject` and `headObject` to use makeUnauthenticated
s3.getObject = function(params,cb) {
return s3.makeUnauthenticatedRequest('getObject',params,cb);
};

s3.headObject = function(params,cb) {
return s3.makeUnauthenticatedRequest('headObject',params,cb);
};

var tests = [
{name: 'buffer',args: [buffer]},
{name: 'file', args: [archive]},
{name: 'url', args: [request, 'https://s3.amazonaws.com/unzipper/archive.crx']},
{name: 's3', args: [s3, { Bucket: 'unzipper', Key: 'archive.crx'}]}
];

tests.forEach(function(test) {
t.test(test.name, function(t) {
t.test('opening with crx option', function(t) {
var method = unzip.Open[test.name];
method.apply(method, test.args.concat({crx:true}))
.then(function(d) {
return d.files[1].buffer();
})
.then(function(d) {
t.same(String(d), '42\n', test.name + ' content matches');
t.end();
});
});
});
});
});
Binary file added testData/compressed-standard-crx/archive.crx
Binary file not shown.
31 changes: 31 additions & 0 deletions testData/compressed-standard-crx/crxmake.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
#!/bin/bash -e
#
# Purpose: Pack a Chromium extension directory into crx format

crx="archive.crx"
pub="publickey.tmp"
sig="signature.tmp"
zip="../compressed-standard/archive.zip"
key="./key"

# signature
openssl sha1 -sha1 -binary -sign key.pem < "$zip" > "$sig"

# public key
openssl rsa -pubout -outform DER < key.pem > "$pub" 2>/dev/null

byte_swap () {
# Take "abcdefgh" and return it as "ghefcdab"
echo "${1:6:2}${1:4:2}${1:2:2}${1:0:2}"
}

crmagic_hex="4372 3234" # Cr24
version_hex="0200 0000" # 2
pub_len_hex=$(byte_swap $(printf '%08x\n' $(ls -l "$pub" | awk '{print $5}')))
sig_len_hex=$(byte_swap $(printf '%08x\n' $(ls -l "$sig" | awk '{print $5}')))
(
echo "$crmagic_hex $version_hex $pub_len_hex $sig_len_hex" | xxd -r -p
cat "$pub" "$sig" "$zip"
) > "$crx"
rm *.tmp
echo "Wrote $crx"
28 changes: 28 additions & 0 deletions testData/compressed-standard-crx/key.pem
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDP/wGpId0UmDp9
hLSepNEErUJGeATwDWGbNyGkK621CHPAMrDqNO8Z2XAimic6bbxUg6NxYLKwkxDM
+5kGY3lHFQkqmwO1nYwMTq8hSjmEtgfF+vF3EM5DJamv9vHVgQ6uW3v6VSvlSl7v
RlO8IgoIh1hTpVZN6w3rKIcQdENlTtfZJZIHWcv2oYp05+tw+hAWKB8WAmLrRzKF
tP9VrIPYIVClqhyOQR386d1U+8YW26LKOTQmLZ9Fy45bzQSYKA3qDObYustvncvR
1IOvTHKM+/2DPT+k9QvvWe0osKkDfG9lmsxO3Pv0XJ5F1TEjW621OychV46u4k2E
ugcqLw7JAgMBAAECggEBAK8kSrCxjCj4WmAxK6penIa0ohcWnwnIKsV5cgF8qiPD
fzx7Ms+0TRXWK39nkNq5Zpk/05P32d+npsEEpwFjJ888HmDa8Q+wHyqJ4xxEFRTz
9q22NjeNVjHid/VtGowuxT76V/YeW/0SV8hhzsafXxV5al3c3kR0Rl8a3Eh1rf6S
FZdP2tts/+jyX1A81o2+duTguuftyQPgTFv7ugsiEW3De/dwn53Xr3tELZZ41ilJ
h9dryoq3gqWG9d7EgQ5F3YOdkn/YJopYzIiCNprqSMOG056VW+Fx7gkj6uGiIjRr
UbeUy6fWbmXYZgn4UHCVBXbHsM4XFtDmyXwjZ7EQm9UCgYEA7Xj3/BkGGyNW7XBO
pFGI0+lIfp9sFUsqZcVEapkdwSApL+/M/jmV1PTBn3KMge3vf8AgGKhD4ttK/Z+i
6H2wlazOuZjRcRDdfO/4lOUQxd/FTnr2JDXsSYdKQoaM4p+HH1FRE12j/xVrPBBT
M29mQOHzZYEFFQ21AMLv+KLa99cCgYEA4DlOaigXeJekg8w/sjlYmWz7FyECZT09
xIaLndt8JrY4hH1gk5issHsBSzpMgiPizX9hqk3PxSkGfpH3iW9cE+WNdKs6/fEI
tWpVVCZ3DfEy/r3KVgZDvJbEKuu1alBuNlfF3mKHQnHGj+VwIRNLzB6PcZ/EF67a
uZF/l98g2l8CgYEA2nTbJIXcsSBseldDcTQ0fEVx1FJSOrCAG0lC7BFZZu1wFlIy
sXhGFrbmXAkjqu840LvsiuJYORxlOzYcxmXCCZ8EOYaUvb+3EZUsh8TGDlIRj2Xc
g2k7qlSUAukGOABrbGsA+6C8GhAZKxMVhw6m8W8q2qi7BSgr57xsx70BVNcCgYAg
H3Whdb7vEuKJ00Ao21hbGqbaSGtcb6qithfYdLJTpXVxXbjxTEUpP2YPDfoaBuQe
RqqKSH2EpHz+sxDAisipPRDH7yQTb22s99/jn2MdBzokDrKnIlyf7wWJlJ037u/r
LyX01y7DkSM+SEOJKYeJZbNtNtNUBUPmo/agnmHJhwKBgQCwZ1m+X2vBRwJiopIz
nwd8lActXaw8uSl+ydEj+4dx8pV9I2NhahB6FxrMY9iGz0q6/vPyYvY4bZn1QLQ2
FZtwNLwVpItalag3YTBAcF8tsQEDWJCjbpuTlQLXwXIQxUoSRC/rK95iaKkZX62+
JnP43yc9/htpBX/RdPvMRD8Lpg==
-----END PRIVATE KEY-----

0 comments on commit 26edb18

Please sign in to comment.