Skip to content

Commit c9fc57c

Browse files
committed
feat: add initial support for brotli
1 parent 8c5af15 commit c9fc57c

File tree

10 files changed

+296
-2
lines changed

10 files changed

+296
-2
lines changed

lib/pack.js

+9
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,15 @@ const Pack = warner(class Pack extends Minipass {
9191
this.zip.on('end', _ => super.end())
9292
this.zip.on('drain', _ => this[ONDRAIN]())
9393
this.on('resume', _ => this.zip.resume())
94+
} else if (opt.brotli) {
95+
if (typeof opt.brotli !== 'object') {
96+
opt.brotli = {}
97+
}
98+
this.zip = new zlib.BrotliCompress(opt.brotli)
99+
this.zip.on('data', chunk => super.write(chunk))
100+
this.zip.on('end', _ => super.end())
101+
this.zip.on('drain', _ => this[ONDRAIN]())
102+
this.on('resume', _ => this.zip.resume())
94103
} else {
95104
this.on('drain', this[ONDRAIN])
96105
}

lib/parse.js

+18
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,9 @@ module.exports = warner(class Parser extends EE {
9797
this.strict = !!opt.strict
9898
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
9999
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
100+
// Unlike gzip, brotli doesn't have any magic bytes to identify it
101+
// Users need to explicitly tell us they're extracting a brotli file
102+
this.brotli = opt.brotli
100103

101104
// have to set this so that streams are ok piping into it
102105
this.writable = true
@@ -356,6 +359,21 @@ module.exports = warner(class Parser extends EE {
356359
this[BUFFER] = chunk
357360
return true
358361
}
362+
if (this[UNZIP] === null && this.brotli) {
363+
const ended = this[ENDED]
364+
this[ENDED] = false
365+
this[UNZIP] = new zlib.BrotliDecompress()
366+
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
367+
this[UNZIP].on('error', er => this.abort(er))
368+
this[UNZIP].on('end', _ => {
369+
this[ENDED] = true
370+
this[CONSUMECHUNK]()
371+
})
372+
this[WRITING] = true
373+
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
374+
this[WRITING] = false
375+
return ret
376+
}
359377
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
360378
if (chunk[i] !== gzipHeader[i]) {
361379
this[UNZIP] = false

lib/replace.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ module.exports = (opt_, files, cb) => {
2323
throw new TypeError('file is required')
2424
}
2525

26-
if (opt.gzip) {
26+
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
2727
throw new TypeError('cannot append to compressed archives')
2828
}
2929

lib/update.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ module.exports = (opt_, files, cb) => {
1313
throw new TypeError('file is required')
1414
}
1515

16-
if (opt.gzip) {
16+
if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
1717
throw new TypeError('cannot append to compressed archives')
1818
}
1919

test/extract.js

+27
Original file line numberDiff line numberDiff line change
@@ -310,3 +310,30 @@ t.test('sync gzip error edge case test', async t => {
310310

311311
t.end()
312312
})
313+
314+
t.test('brotli', async t => {
315+
const file = path.resolve(__dirname, 'fixtures/example.tbr')
316+
const dir = path.resolve(__dirname, 'brotli')
317+
318+
t.beforeEach(async () => {
319+
await mkdirp(dir)
320+
})
321+
322+
t.afterEach(async () => {
323+
await rimraf(dir)
324+
})
325+
326+
t.test('fails if brotli', async t => {
327+
const expect = new Error("TAR_BAD_ARCHIVE: Unrecognized archive format")
328+
t.throws(_ => x({ sync: true, file: file }), expect)
329+
})
330+
331+
t.test('succeeds', t => {
332+
x({ sync: true, file: file, C: dir, brotli: true })
333+
334+
t.same(fs.readdirSync(dir + '/x').sort(),
335+
['1', '10', '2', '3', '4', '5', '6', '7', '8', '9'])
336+
t.end()
337+
})
338+
})
339+

test/fixtures/example.tbr

201 Bytes
Binary file not shown.

test/pack.js

+165
Original file line numberDiff line numberDiff line change
@@ -375,6 +375,13 @@ t.test('if gzip is truthy, make it an object', t => {
375375
t.end()
376376
})
377377

378+
t.test('if brotli is truthy, make it an object', t => {
379+
const opt = { brotli: true }
380+
new Pack(opt)
381+
t.type(opt.brotli, 'object')
382+
t.end()
383+
})
384+
378385
t.test('gzip, also a very deep path', t => {
379386
const out = []
380387

@@ -454,6 +461,85 @@ t.test('gzip, also a very deep path', t => {
454461
})
455462
})
456463

464+
t.test('brotli, also a very deep path', t => {
465+
const out = []
466+
467+
new Pack({
468+
cwd: files,
469+
brotli: { flush: 1 },
470+
})
471+
.add('dir')
472+
.add('long-path')
473+
.on('data', c => out.push(c))
474+
.end()
475+
.on('end', _ => {
476+
const zipped = Buffer.concat(out)
477+
const data = zlib.brotliDecompressSync(zipped)
478+
const entries = []
479+
for (var i = 0; i < data.length; i += 512) {
480+
const slice = data.slice(i, i + 512)
481+
const h = new Header(slice)
482+
if (h.nullBlock) {
483+
entries.push('null block')
484+
} else if (h.cksumValid) {
485+
entries.push([h.type, h.path])
486+
} else if (entries[entries.length - 1][0] === 'File') {
487+
entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, ''))
488+
}
489+
}
490+
491+
const expect = [
492+
['Directory', 'dir/'],
493+
['Directory', 'long-path/'],
494+
['File', 'dir/x'],
495+
['Directory', 'long-path/r/'],
496+
['Directory', 'long-path/r/e/'],
497+
['Directory', 'long-path/r/e/a/'],
498+
['Directory', 'long-path/r/e/a/l/'],
499+
['Directory', 'long-path/r/e/a/l/l/'],
500+
['Directory', 'long-path/r/e/a/l/l/y/'],
501+
['Directory', 'long-path/r/e/a/l/l/y/-/'],
502+
['Directory', 'long-path/r/e/a/l/l/y/-/d/'],
503+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/'],
504+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/'],
505+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/'],
506+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/'],
507+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/'],
508+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/'],
509+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'],
510+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'],
511+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'],
512+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'],
513+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'],
514+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'],
515+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'],
516+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'],
517+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'],
518+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'],
519+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'],
520+
['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
521+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'],
522+
['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'],
523+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
524+
['ExtendedHeader', 'PaxHeader/Ω.txt'],
525+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'],
526+
'null block',
527+
'null block',
528+
]
529+
530+
let ok = true
531+
entries.forEach((entry, i) => {
532+
ok = ok &&
533+
t.equal(entry[0], expect[i][0]) &&
534+
t.equal(entry[1], expect[i][1]) &&
535+
(!entry[2] || t.equal(entry[2], expect[i][2]))
536+
})
537+
538+
// t.match(entries, expect)
539+
t.end()
540+
})
541+
})
542+
457543
t.test('very deep gzip path, sync', t => {
458544
const pack = new PackSync({
459545
cwd: files,
@@ -533,6 +619,85 @@ t.test('very deep gzip path, sync', t => {
533619
t.end()
534620
})
535621

622+
t.test('very deep brotli path, sync', t => {
623+
const pack = new PackSync({
624+
cwd: files,
625+
brotli: true,
626+
}).add('dir')
627+
.add('long-path')
628+
.end()
629+
630+
// these do nothing!
631+
pack.pause()
632+
pack.resume()
633+
634+
const zipped = pack.read()
635+
t.type(zipped, Buffer)
636+
const data = zlib.brotliDecompressSync(zipped)
637+
const entries = []
638+
for (var i = 0; i < data.length; i += 512) {
639+
const slice = data.slice(i, i + 512)
640+
const h = new Header(slice)
641+
if (h.nullBlock) {
642+
entries.push('null block')
643+
} else if (h.cksumValid) {
644+
entries.push([h.type, h.path])
645+
} else if (entries[entries.length - 1][0] === 'File') {
646+
entries[entries.length - 1].push(slice.toString().replace(/\0.*$/, ''))
647+
}
648+
}
649+
650+
const expect = [
651+
['Directory', 'dir/'],
652+
['File', 'dir/x'],
653+
['Directory', 'long-path/'],
654+
['Directory', 'long-path/r/'],
655+
['Directory', 'long-path/r/e/'],
656+
['Directory', 'long-path/r/e/a/'],
657+
['Directory', 'long-path/r/e/a/l/'],
658+
['Directory', 'long-path/r/e/a/l/l/'],
659+
['Directory', 'long-path/r/e/a/l/l/y/'],
660+
['Directory', 'long-path/r/e/a/l/l/y/-/'],
661+
['Directory', 'long-path/r/e/a/l/l/y/-/d/'],
662+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/'],
663+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/'],
664+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/'],
665+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/'],
666+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/'],
667+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/'],
668+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/'],
669+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/'],
670+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/'],
671+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/'],
672+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/'],
673+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/'],
674+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/'],
675+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/'],
676+
['Directory', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/'],
677+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/a.txt', 'short\n'],
678+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '1111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111'],
679+
['ExtendedHeader', 'PaxHeader/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
680+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc', '2222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222'],
681+
['ExtendedHeader', 'PaxHeader/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccc'],
682+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxccccccccccccccccccccccccccccccccccccccccccccccccc', 'cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'],
683+
['ExtendedHeader', 'PaxHeader/Ω.txt'],
684+
['File', 'long-path/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/Ω.txt', 'Ω'],
685+
'null block',
686+
'null block',
687+
]
688+
689+
let ok = true
690+
entries.forEach((entry, i) => {
691+
ok = ok &&
692+
t.equal(entry[0], expect[i][0]) &&
693+
t.equal(entry[1], expect[i][1]) &&
694+
(!entry[2] || t.equal(entry[2], expect[i][2]))
695+
})
696+
697+
// t.match(entries, expect)
698+
t.end()
699+
})
700+
536701
t.test('write after end', t => {
537702
const p = new Pack()
538703
p.end()

test/parse.js

+24
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,30 @@ t.test('fixture tests', t => {
125125
bs.end(zlib.gzipSync(tardata))
126126
})
127127

128+
t.test('compress with brotli all at once', t => {
129+
const p = new Parse({
130+
maxMetaEntrySize: maxMeta,
131+
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
132+
strict: strict,
133+
brotli: {}
134+
})
135+
trackEvents(t, expect, p)
136+
p.end(zlib.brotliCompressSync(tardata))
137+
})
138+
139+
t.test('compress with brotli byte at a time', t => {
140+
const bs = new ByteStream()
141+
const bp = new Parse({
142+
maxMetaEntrySize: maxMeta,
143+
filter: filter ? (path, entry) => entry.size % 2 !== 0 : null,
144+
strict: strict,
145+
brotli: {},
146+
})
147+
trackEvents(t, expect, bp)
148+
bs.pipe(bp)
149+
bs.end(zlib.brotliCompressSync(tardata))
150+
})
151+
128152
t.test('async chunks', t => {
129153
const p = new Parse({
130154
maxMetaEntrySize: maxMeta,

test/replace.js

+25
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ const fixtureDef = {
2323
'zero.tar': Buffer.from(''),
2424
'empty.tar': Buffer.alloc(512),
2525
'compressed.tgz': zlib.gzipSync(data),
26+
'compressed.tbr': zlib.brotliCompressSync(data),
2627
}
2728

2829
t.test('basic file add to archive (good or truncated)', t => {
@@ -211,6 +212,30 @@ t.test('cannot append to gzipped archives', async t => {
211212
}, [path.basename(__filename)], er => t.match(er, expect))
212213
})
213214

215+
t.test('cannot append to brotli compressed archives', async t => {
216+
const dir = t.testdir({
217+
'compressed.tbr': fixtureDef['compressed.tbr'],
218+
})
219+
const file = resolve(dir, 'compressed.tbr')
220+
221+
const expect = new Error('cannot append to compressed archives')
222+
const expectT = new TypeError('cannot append to compressed archives')
223+
224+
t.throws(_ => r({
225+
file,
226+
cwd: __dirname,
227+
brotli: true,
228+
}, [path.basename(__filename)]), expectT)
229+
230+
t.throws(_ => r({
231+
file,
232+
cwd: __dirname,
233+
sync: true,
234+
}, [path.basename(__filename)]), expect)
235+
236+
t.end()
237+
})
238+
214239
t.test('other throws', t => {
215240
t.throws(_ => r({}, ['asdf']), new TypeError('file is required'))
216241
t.throws(_ => r({ file: 'asdf' }, []),

test/update.js

+26
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ const { resolve } = require('path')
99
const fixtures = path.resolve(__dirname, 'fixtures')
1010
const tars = path.resolve(fixtures, 'tars')
1111
const zlib = require('zlib')
12+
const r = require("../lib/replace");
1213

1314
const spawn = require('child_process').spawn
1415

@@ -22,6 +23,7 @@ const fixtureDef = {
2223
'zero.tar': Buffer.from(''),
2324
'empty.tar': Buffer.alloc(512),
2425
'compressed.tgz': zlib.gzipSync(data),
26+
'compressed.tbr': zlib.brotliCompressSync(data),
2527
}
2628

2729
t.test('basic file add to archive (good or truncated)', t => {
@@ -213,6 +215,30 @@ t.test('cannot append to gzipped archives', t => {
213215
})
214216
})
215217

218+
t.test('cannot append to brotli archives', t => {
219+
const dir = t.testdir({
220+
'compressed.tbr': fixtureDef['compressed.tbr'],
221+
})
222+
const file = resolve(dir, 'compressed.tbr')
223+
224+
const expect = new Error('cannot append to compressed archives')
225+
const expectT = new TypeError('cannot append to compressed archives')
226+
227+
t.throws(_ => u({
228+
file,
229+
cwd: __dirname,
230+
brotli: true,
231+
}, [path.basename(__filename)]), expectT)
232+
233+
t.throws(_ => u({
234+
file,
235+
cwd: __dirname,
236+
sync: true,
237+
}, [path.basename(__filename)]), expect)
238+
239+
t.end()
240+
})
241+
216242
t.test('other throws', t => {
217243
t.throws(_ => u({}, ['asdf']), new TypeError('file is required'))
218244
t.throws(_ => u({ file: 'asdf' }, []),

0 commit comments

Comments
 (0)