By adding an adapter layer, fflate can work as a Compression Streams ponyfill. Since Compression Streams is quite new, this is useful in the future months.
Also, the proposed ponyfill comes with extra compression options (like level, dictionary) which the spec doesn't yet support, this is useful for next coming years.
B.T.W. Currently native CompressionStream API is 2x more faster than fflate in most cases, use of WebAssembly/WebWoker is hepefully to speed up fflate by 50%.
An optimal solution
Code samples:
ponyfill.js
import {AsyncDeflate, AsyncGzip, AsyncZlib, AsyncInflate, AsyncGunzip, AsyncUnzlib} from 'fflate/esm/browser.js';
// import 'fflate/umd/index.js'; /* global fflate */
// const {AsyncDeflate, AsyncGzip, AsyncZlib, AsyncInflate, AsyncGunzip, AsyncUnzlib} = fflate;
export class CompressionStream {
#readable;
#writable;
constructor(format, options=null){
let opts = {consume: true, level: 5, ...options};
let deflator = null;
switch(format){
case 'deflate': // RFC 1950: ZLIB Compressed Data Format Specification version 3.3
deflator = new AsyncZlib(opts);
break;
case 'deflate-raw': // RFC 1951: DEFLATE Compressed Data Format Specification version 1.3
deflator = new AsyncDeflate(opts);
break;
case 'gzip': // RFC 1952: GZIP file format specification version 4.3
deflator = new AsyncGzip(opts);
break;
default:
throw new TypeError(arguments.length===0?'1 argument required, but only 0 present':
'Unsupported compression format: '+format);
}
this.#writable = new WritableStream({
write(chunk) { deflator.push(chunk);},
close() { deflator.push(new Uint8Array(0), true); }
});
this.#readable = new ReadableStream({
start(controller) {
deflator.ondata = (err, chunk, final) => {
if (err)
writable.abort(err.message);
controller.enqueue(chunk);
if (final)
controller.close();
}
}
});
}
get writable(){
return this.#writable;
}
get readable(){
return this.#readable;
}
}
export class DecompressionStream {
#readable;
#writable;
constructor(format, options=null){
let opts = {consume: true, ...options};
let inflator = null;
switch(format){
case 'deflate':
inflator = new AsyncUnzlib(opts);
break;
case 'deflate-raw':
inflator = new AsyncInflate(opts);
break;
case 'gzip':
inflator = new AsyncGunzip(opts);
break;
default:
throw new TypeError(arguments.length===0?'1 argument required, but only 0 present':
'Unsupported compression format: '+format);
}
this.#writable = new WritableStream({
write(chunk) { inflator.push(chunk); },
close() { inflator.push(new Uint8Array(0), true); }
});
this.#readable = new ReadableStream({
start(controller) {
inflator.ondata = (err, chunk, final) => {
if (err)
writable.abort(err.message);
controller.enqueue(chunk);
if (final)
controller.close();
}
}
});
}
get writable(){
return this.#writable;
}
get readable(){
return this.#readable;
}
}
What can't you do right now?
By adding an adapter layer, fflate can work as a Compression Streams ponyfill. Since Compression Streams is quite new, this is useful in the future months.
see Can I Use CompressionStream API
Also, the proposed ponyfill comes with extra compression options (like
level
,dictionary
) which the spec doesn't yet support, this is useful for next coming years.B.T.W. Currently native CompressionStream API is 2x more faster than fflate in most cases, use of WebAssembly/WebWoker is hepefully to speed up fflate by 50%.
An optimal solution
Code samples: ponyfill.js
ponyfill.test.js
(How) is this done by other libraries?