/*!
* compression
* Copyright(c) 2017 Arturas Molcanovas
* Copyright(c) 2010 Sencha Inc.
* Copyright(c) 2011 TJ Holowaychuk
* Copyright(c) 2014 Jonathan Ong
* Copyright(c) 2014-2015 Douglas Christopher Wilson
* MIT Licensed
*/
import accepts from 'accepts'
import bytes from 'bytes'
import compressible from 'compressible'
const debug = require('debug')('compression')
import { Duplex } from 'stream'
import iltorb from 'iltorb'
import lruCache from 'lru-cache'
import multipipe from 'multipipe'
import onHeaders from 'on-headers'
import { Readable } from 'stream'
import util from 'util'
import vary from 'vary'
import { Writable } from 'stream'
import zlib from 'zlib'
// import zopfli from 'node-zopfli-es'
/**
* Module variables.
* @private
*/
const cacheControlNoTransformRegExp = /(?:^|,)\s*?no-transform\s*?(?:,|$)/
// according to https://blogs.akamai.com/2016/02/understanding-brotlis-potential.html , brotli:4
// is slightly faster than gzip with somewhat better compression; good default if we don't want to
// worry about compression runtime being slower than gzip
const BROTLI_DEFAULT_QUALITY = 4
/**
* Compress response data with gzip / deflate.
*
* @param {Object} [options]
* @return {Function} middleware
* @public
*/
function compression(options) {
const opts = options || {}
// options
const filter = opts.filter || shouldCompress
let threshold = bytes.parse(opts.threshold)
if (threshold === null) {
threshold = 1024
}
const brotliOpts = opts.brotli || {}
brotliOpts.quality = brotliOpts.quality || BROTLI_DEFAULT_QUALITY
const zlibOpts = opts.zlib || {}
const zlibOptNames = [
'flush',
'chunkSize',
'windowBits',
'level',
'memLevel',
'strategy',
'dictionary',
]
zlibOptNames.forEach(option => {
zlibOpts[option] = zlibOpts[option] || opts[option]
})
if (!opts.hasOwnProperty('cacheSize')) opts.cacheSize = '128mB'
const cache = opts.cacheSize
? createCache(bytes(opts.cacheSize.toString()))
: null
const shouldCache = opts.cache || (() => true)
const dummyBrotliFlush = () => {}
return function compression(req, res, next) {
let ended = false
let length
let listeners = []
let stream
const _end = res.end
const _on = res.on
const _write = res.write
// flush
res.flush = function flush() {
if (stream) {
stream.flush()
}
}
// proxy
res.write = function write(chunk, encoding) {
if (ended) {
return false
}
if (!this._header) {
this._implicitHeader()
}
return stream
? stream.write(new Buffer(chunk, encoding))
: _write.call(this, chunk, encoding)
}
res.end = function end(chunk, encoding) {
if (ended) {
return false
}
if (!this._header) {
// estimate the length
if (!this.getHeader('Content-Length')) {
length = chunkLength(chunk, encoding)
}
this._implicitHeader()
}
if (!stream) {
return _end.call(this, chunk, encoding)
}
// mark ended
ended = true
// write Buffer for Node.js 0.8
return chunk ? stream.end(new Buffer(chunk, encoding)) : stream.end()
}
res.on = function on(type, listener) {
if (!listeners || type !== 'drain') {
return _on.call(this, type, listener)
}
if (stream) {
return stream.on(type, listener)
}
// buffer listeners for future stream
listeners.push([type, listener])
return this
}
function nocompress(msg) {
debug('no compression: %s', msg)
addListeners(res, _on, listeners)
listeners = null
}
onHeaders(res, function onResponseHeaders() {
// determine if request is filtered
if (!filter(req, res)) {
nocompress('filtered')
return
}
// determine if the entity should be transformed
if (!shouldTransform(req, res)) {
nocompress('no transform')
return
}
// vary
vary(res, 'Accept-Encoding')
// content-length below threshold
if (
Number(res.getHeader('Content-Length')) < threshold ||
length < threshold
) {
nocompress('size below threshold')
return
}
const encoding = res.getHeader('Content-Encoding') || 'identity'
// already encoded
if (encoding !== 'identity') {
nocompress('already encoded')
return
}
// head
if (req.method === 'HEAD') {
nocompress('HEAD request')
return
}
const contentType = res.getHeader('Content-Type')
// compression method
const accept = accepts(req)
// send in each compression method separately to ignore client preference and
// instead enforce server preference. also, server-sent events (mime type of
// text/event-stream) require flush functionality, so skip brotli in that
// case.
const method =
(contentType !== 'text/event-stream' && accept.encoding('br')) ||
accept.encoding('gzip') ||
accept.encoding('deflate') ||
accept.encoding('identity')
// negotiation failed
if (!method || method === 'identity') {
nocompress('not acceptable')
return
}
// do we have this coding/url/etag combo in the cache?
const etag = res.getHeader('ETag') || null
const cacheable =
cache &&
shouldCache(req, res) &&
etag &&
res.statusCode >= 200 &&
res.statusCode < 300
if (cacheable) {
const buffer = cache.lookup(method, req.url, etag)
if (buffer) {
// the rest of the code expects a duplex stream, so
// make a duplex stream that just ignores its input
stream = new BufferDuplex(buffer)
}
}
// if stream is not assigned, we got a cache miss and need to compress
// the result
if (!stream) {
// compression stream
debug('%s compression', method)
switch (method) {
case 'br':
stream = iltorb.compressStream(brotliOpts)
// brotli has no flush method. add a dummy flush method here.
stream.flush = dummyBrotliFlush
break
case 'gzip':
stream = zlib.createGzip(zlibOpts)
break
case 'deflate':
stream = zlib.createDeflate(zlibOpts)
break
}
// if it is cacheable, let's keep hold of the compressed chunks and cache
// them once the compression stream ends.
if (cacheable) {
const chunks = []
stream.on('data', chunk => {
chunks.push(chunk)
})
stream.on('end', () => {
cache.add(method, req.url, etag, chunks)
})
}
}
// add buffered listeners to stream
addListeners(stream, stream.on, listeners)
// header fields
res.setHeader('Content-Encoding', method)
res.removeHeader('Content-Length')
// compression
stream.on('data', function onStreamData(chunk) {
if (_write.call(res, chunk) === false) {
stream.pause()
}
})
stream.on('end', function onStreamEnd() {
_end.call(res)
})
_on.call(res, 'drain', function onResponseDrain() {
stream.resume()
})
})
next()
}
}
/**
* Add bufferred listeners to stream
* @private
*/
function addListeners(stream, on, listeners) {
for (let i = 0; i < listeners.length; i++) {
on.apply(stream, listeners[i])
}
}
/**
* Get the length of a given chunk
*/
function chunkLength(chunk, encoding) {
if (!chunk) {
return 0
}
return !Buffer.isBuffer(chunk)
? Buffer.byteLength(chunk, encoding)
: chunk.length
}
/**
* Default filter function.
* @private
*/
function shouldCompress(req, res) {
const type = res.getHeader('Content-Type')
if (type === undefined || !compressible(type)) {
debug('%s not compressible', type)
return false
}
return true
}
/**
* Determine if the entity should be transformed.
* @private
*/
function shouldTransform(req, res) {
const cacheControl = res.getHeader('Cache-Control')
Loading ...