From b786929a368521e9cfcdcf7490a8e77485d5a253 Mon Sep 17 00:00:00 2001 From: Arjun Barrett Date: Tue, 15 Jun 2021 14:33:00 -0700 Subject: [PATCH] v0.7.1 --- CHANGELOG.md | 4 ++++ README.md | 18 +++++++++++++----- package.json | 2 +- 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c159c5..f1e7b48 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.7.1 +- Removed requirement for `setTimeout` +- Added support for unzip file filters (thanks to [@manucorporat](https://github.com/manucorporat): #67) +- Fixed streaming gunzip and unzlib bug causing corruption ## 0.7.0 - Improved errors - Now errors are error objects instead of strings diff --git a/README.md b/README.md index 092b69c..f12eac5 100644 --- a/README.md +++ b/README.md @@ -249,7 +249,7 @@ const zipped = fflate.zipSync({ 'other/tmp.txt': new Uint8Array([97, 98, 99, 100]) }, // You can also provide compression options - 'myImageData.bmp': [aMassiveFile, { + 'massiveImage.bmp': [aMassiveFile, { level: 9, mem: 12, // ZIP-specific: mtime works here too, defaults to current time @@ -274,13 +274,21 @@ const zipped = fflate.zipSync({ // | |-> 你好.txt // |-> other // | |-> tmp.txt -// myImageData.bmp +// massiveImage.bmp // superTinyFile.png // When decompressing, folders are not nested; all filepaths are fully // written out in the keys. For example, the return value may be: -// { 'nested/directory/a2.txt': Uint8Array(2) [97, 97] }) -const decompressed = fflate.unzipSync(zipped); +// { 'nested/directory/structure.txt': Uint8Array(2) [97, 97] } +const decompressed = fflate.unzipSync(zipped, { + // You may optionally supply a filter for files. By default, all files in a + // ZIP archive are extracted, but a filter can save resources by telling + // the library not to decompress certain files + filter(file) { + // Don't decompress the massive image or any files larger than 10 MiB + return file.name != 'massiveImage.bmp' && file.originalSize <= 10_000_000; + } +}); ``` If you need extremely high performance or custom ZIP compression formats, you can use the highly-extensible ZIP streams. They take streams as both input and output. You can even use custom compression/decompression algorithms from other libraries, as long as they [are defined in the ZIP spec](https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT) (see section 4.4.5). If you'd like more info on using custom compressors, [feel free to ask](https://github.com/101arrowz/fflate/discussions). @@ -441,7 +449,7 @@ zip({ f1: aMassiveFile, 'f2.txt': anotherMassiveFile }, { }); // unzip is the only async function without support for consume option -// Also parallelized, so unzip is also often much faster than unzipSync +// It is parallelized, so unzip is also often much faster than unzipSync unzip(aMassiveZIPFile, (err, unzipped) => { // If the archive has data.xml, log it here console.log(unzipped['data.xml']); diff --git a/package.json b/package.json index b2b0f07..169ef9d 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "fflate", - "version": "0.7.0", + "version": "0.7.1", "description": "High performance (de)compression in an 8kB package", "main": "./lib/index.cjs", "module": "./esm/browser.js",