Add brotli support

This commit is contained in:
Ken-Håvard Lieng 2017-04-14 04:33:44 +02:00
parent 326161a563
commit e5832d2c58
29 changed files with 13892 additions and 415 deletions

File diff suppressed because one or more lines are too long

View File

@ -6,12 +6,29 @@ var gulp = require('gulp');
var gutil = require('gulp-util');
var nano = require('gulp-cssnano');
var autoprefixer = require('gulp-autoprefixer');
var gzip = require('gulp-gzip');
var concat = require('gulp-concat');
var cache = require('gulp-cached');
var express = require('express');
var proxy = require('express-http-proxy');
var webpack = require('webpack');
var through = require('through2');
var br = require('brotli');
function brotli(opts) {
return through.obj(function(file, enc, callback) {
if (file.isNull()) {
return callback(null, file);
}
if (file.isStream()) {
this.emit('error', new gutil.PluginError('brotli', 'Streams not supported'));
} else if (file.isBuffer()) {
file.path += '.br';
file.contents = new Buffer(br.compress(file.contents, opts).buffer);
return callback(null, file);
}
});
}
gulp.task('css', function() {
return gulp.src(['src/css/fonts.css', 'src/css/fontello.css', 'src/css/style.css'])
@ -47,32 +64,32 @@ gulp.task('fonts', function() {
gulp.task('fonts:woff', function() {
return gulp.src('src/font/*(*.woff|*.woff2)')
.pipe(gulp.dest('dist/gz/font'));
.pipe(gulp.dest('dist/br/font'));
});
gulp.task('config', function() {
return gulp.src('../config.default.toml')
.pipe(gulp.dest('dist/gz'));
.pipe(gulp.dest('dist/br'));
});
function compress() {
return gulp.src(['dist/**/!(*.gz|*.woff|*.woff2)', '!dist/{gz,gz/**}'])
.pipe(gzip())
.pipe(gulp.dest('dist/gz'));
return gulp.src(['dist/**/!(*.br|*.woff|*.woff2)', '!dist/{br,br/**}'])
.pipe(brotli({ quality: 11 }))
.pipe(gulp.dest('dist/br'));
}
gulp.task('gzip', ['css', 'js', 'fonts'], compress);
gulp.task('gzip:dev', ['css', 'fonts'], compress);
gulp.task('compress', ['css', 'js', 'fonts'], compress);
gulp.task('compress:dev', ['css', 'fonts'], compress);
gulp.task('bindata', ['gzip', 'config'], function(cb) {
exec('go-bindata -nomemcopy -nocompress -pkg assets -o ../assets/bindata.go -prefix "dist/gz" dist/gz/...', cb);
gulp.task('bindata', ['compress', 'config'], function(cb) {
exec('go-bindata -nomemcopy -nocompress -pkg assets -o ../assets/bindata.go -prefix "dist/br" dist/br/...', cb);
});
gulp.task('bindata:dev', ['gzip:dev', 'config'], function(cb) {
exec('go-bindata -debug -pkg assets -o ../assets/bindata.go -prefix "dist/gz" dist/gz/...', cb);
gulp.task('bindata:dev', ['compress:dev', 'config'], function(cb) {
exec('go-bindata -debug -pkg assets -o ../assets/bindata.go -prefix "dist/br" dist/br/...', cb);
});
gulp.task('dev', ['css', 'fonts', 'fonts:woff', 'config', 'gzip:dev', 'bindata:dev'], function() {
gulp.task('dev', ['css', 'fonts', 'fonts:woff', 'config', 'compress:dev', 'bindata:dev'], function() {
gulp.watch('src/css/*.css', ['css']);
var config = require('./webpack.config.dev.js');
@ -104,6 +121,6 @@ gulp.task('dev', ['css', 'fonts', 'fonts:woff', 'config', 'gzip:dev', 'bindata:d
});
});
gulp.task('build', ['css', 'js', 'fonts', 'fonts:woff', 'config', 'gzip', 'bindata']);
gulp.task('build', ['css', 'js', 'fonts', 'fonts:woff', 'config', 'compress', 'bindata']);
gulp.task('default', ['dev']);

View File

@ -13,6 +13,7 @@
"babel-preset-es2015": "^6.22.0",
"babel-preset-react": "^6.23.0",
"babel-preset-stage-0": "^6.22.0",
"brotli": "^1.3.1",
"css-loader": "^0.28.0",
"eslint": "^3.15.0",
"eslint-config-airbnb": "^14.1.0",
@ -27,9 +28,9 @@
"gulp-cached": "^1.1.1",
"gulp-concat": "^2.6.1",
"gulp-cssnano": "^2.1.2",
"gulp-gzip": "1.4.0",
"gulp-util": "^3.0.8",
"style-loader": "^0.16.0",
"through2": "^2.0.3",
"webpack": "^2.2.1",
"webpack-dev-middleware": "^1.10.0",
"webpack-hot-middleware": "^2.17.0"

View File

@ -72,18 +72,10 @@ ansi-html@0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/ansi-html/-/ansi-html-0.0.7.tgz#813584021962a9e9e6fd039f940d12f56ca7859e"
ansi-regex@^0.2.0, ansi-regex@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-0.2.1.tgz#0d8e946967a3d8143f93e24e298525fc1b2235f9"
ansi-regex@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df"
ansi-styles@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.1.0.tgz#eaecbf66cd706882760b2f4691582b8f55d7a7de"
ansi-styles@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe"
@ -136,10 +128,6 @@ array-differ@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-1.0.0.tgz#eff52e3758249d33be402b8bb8e564bb2b5d4031"
array-find-index@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/array-find-index/-/array-find-index-1.0.2.tgz#df010aa1287e164bbda6f9723b0a96a1ec4187a1"
array-flatten@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
@ -953,7 +941,7 @@ base64-arraybuffer@^0.1.5:
version "0.1.5"
resolved "https://registry.yarnpkg.com/base64-arraybuffer/-/base64-arraybuffer-0.1.5.tgz#73926771923b5a19747ad666aa5cd4bf9c6e9ce8"
base64-js@^1.0.2:
base64-js@^1.0.2, base64-js@^1.1.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.0.tgz#a39992d723584811982be5e290bb6a53d86700f1"
@ -1010,6 +998,12 @@ brorand@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f"
brotli@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/brotli/-/brotli-1.3.1.tgz#352a6f3f6973c5a74fd4be04aba40b337b3b6a7e"
dependencies:
base64-js "^1.1.2"
browserify-aes@^1.0.0, browserify-aes@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.0.6.tgz#5e7725dbdef1fd5930d4ebab48567ce451c48a0a"
@ -1096,10 +1090,6 @@ bytes@2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-2.4.0.tgz#7d97196f9d5baf7f6935e25985549edd2a6c2339"
bytes@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-0.3.0.tgz#78e2e0e28c7f9c7b988ea8aee0db4d5fa9941935"
caller-path@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-0.1.0.tgz#94085ef63581ecd3daa92444a8fe94e82577751f"
@ -1110,21 +1100,10 @@ callsites@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/callsites/-/callsites-0.2.0.tgz#afab96262910a7f33c19a5775825c69f34e350ca"
camelcase-keys@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/camelcase-keys/-/camelcase-keys-2.1.0.tgz#308beeaffdf28119051efa1d932213c91b8f92e7"
dependencies:
camelcase "^2.0.0"
map-obj "^1.0.0"
camelcase@^1.0.2:
version "1.2.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39"
camelcase@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-2.1.1.tgz#7c1d16d679a1bbe59ca02cacecfb011e201f5a1f"
camelcase@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a"
@ -1153,16 +1132,6 @@ center-align@^0.1.1:
align-text "^0.1.3"
lazy-cache "^1.0.3"
chalk@^0.5.0:
version "0.5.1"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.5.1.tgz#663b3a648b68b55d04690d49167aa837858f2174"
dependencies:
ansi-styles "^1.1.0"
escape-string-regexp "^1.0.0"
has-ansi "^0.1.0"
strip-ansi "^0.3.0"
supports-color "^0.2.0"
chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
@ -1238,7 +1207,7 @@ clone-buffer@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58"
clone-stats@^0.0.1, clone-stats@~0.0.1:
clone-stats@^0.0.1:
version "0.0.1"
resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-0.0.1.tgz#b88f94a82cf38b8791d58046ea4029ad88ca99d1"
@ -1525,12 +1494,6 @@ csso@~2.3.1:
clap "^1.0.9"
source-map "^0.5.3"
currently-unhandled@^0.4.1:
version "0.4.1"
resolved "https://registry.yarnpkg.com/currently-unhandled/-/currently-unhandled-0.4.1.tgz#988df33feab191ef799a61369dd76c17adf957ea"
dependencies:
array-find-index "^1.0.1"
d@1:
version "1.0.0"
resolved "https://registry.yarnpkg.com/d/-/d-1.0.0.tgz#754bb5bfe55451da69a58b94d45f4c5b0462d58f"
@ -1551,13 +1514,6 @@ date-now@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
dateformat@^1.0.7-1.2.3:
version "1.0.12"
resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-1.0.12.tgz#9f124b67594c937ff706932e4a642cca8dbbfee9"
dependencies:
get-stdin "^4.0.1"
meow "^3.3.0"
dateformat@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/dateformat/-/dateformat-2.0.0.tgz#2743e3abb5c3fc2462e527dca445e04e9f4dee17"
@ -1854,7 +1810,7 @@ escape-html@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
escape-string-regexp@^1.0.0, escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
@ -2347,10 +2303,6 @@ get-caller-file@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5"
get-stdin@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe"
getpass@^0.1.1:
version "0.1.6"
resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.6.tgz#283ffd9fc1256840875311c1b60e8c40187110e6"
@ -2521,28 +2473,6 @@ gulp-cssnano@^2.1.2:
object-assign "^4.0.1"
vinyl-sourcemaps-apply "^0.2.1"
gulp-gzip@1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/gulp-gzip/-/gulp-gzip-1.4.0.tgz#5ff8dff837cac2ebc2c89743dc0ac76e2be5e6c2"
dependencies:
bytes "^0.3.0"
gulp-util "^2.2.14"
stream-to-array "~1.0.0"
through2 "^0.4.1"
gulp-util@^2.2.14:
version "2.2.20"
resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-2.2.20.tgz#d7146e5728910bd8f047a6b0b1e549bc22dbd64c"
dependencies:
chalk "^0.5.0"
dateformat "^1.0.7-1.2.3"
lodash._reinterpolate "^2.4.1"
lodash.template "^2.4.1"
minimist "^0.2.0"
multipipe "^0.1.0"
through2 "^0.5.0"
vinyl "^0.2.1"
gulp-util@^3.0.0, gulp-util@^3.0.6, gulp-util@^3.0.8:
version "3.0.8"
resolved "https://registry.yarnpkg.com/gulp-util/-/gulp-util-3.0.8.tgz#0054e1e744502e27c04c187c3ecc505dd54bbb4f"
@ -2601,12 +2531,6 @@ har-validator@~4.2.1:
ajv "^4.9.1"
har-schema "^1.0.5"
has-ansi@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-0.1.0.tgz#84f265aae8c0e6a88a12d7022894b7568894c62e"
dependencies:
ansi-regex "^0.2.0"
has-ansi@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
@ -2753,12 +2677,6 @@ imurmurhash@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
indent-string@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-2.1.0.tgz#8e2d48348742121b4a8218b7a137e9a52049dc80"
dependencies:
repeating "^2.0.0"
indexes-of@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607"
@ -3207,36 +3125,14 @@ lodash._basevalues@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/lodash._basevalues/-/lodash._basevalues-3.0.0.tgz#5b775762802bde3d3297503e26300820fdf661b7"
lodash._escapehtmlchar@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._escapehtmlchar/-/lodash._escapehtmlchar-2.4.1.tgz#df67c3bb6b7e8e1e831ab48bfa0795b92afe899d"
dependencies:
lodash._htmlescapes "~2.4.1"
lodash._escapestringchar@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._escapestringchar/-/lodash._escapestringchar-2.4.1.tgz#ecfe22618a2ade50bfeea43937e51df66f0edb72"
lodash._getnative@^3.0.0:
version "3.9.1"
resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5"
lodash._htmlescapes@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._htmlescapes/-/lodash._htmlescapes-2.4.1.tgz#32d14bf0844b6de6f8b62a051b4f67c228b624cb"
lodash._isiterateecall@^3.0.0:
version "3.0.9"
resolved "https://registry.yarnpkg.com/lodash._isiterateecall/-/lodash._isiterateecall-3.0.9.tgz#5203ad7ba425fae842460e696db9cf3e6aac057c"
lodash._isnative@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._isnative/-/lodash._isnative-2.4.1.tgz#3ea6404b784a7be836c7b57580e1cdf79b14832c"
lodash._objecttypes@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._objecttypes/-/lodash._objecttypes-2.4.1.tgz#7c0b7f69d98a1f76529f890b0cdb1b4dfec11c11"
lodash._reescape@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/lodash._reescape/-/lodash._reescape-3.0.0.tgz#2b1d6f5dfe07c8a355753e5f27fac7f1cde1616a"
@ -3245,31 +3141,14 @@ lodash._reevaluate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/lodash._reevaluate/-/lodash._reevaluate-3.0.0.tgz#58bc74c40664953ae0b124d806996daca431e2ed"
lodash._reinterpolate@^2.4.1, lodash._reinterpolate@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-2.4.1.tgz#4f1227aa5a8711fc632f5b07a1f4607aab8b3222"
lodash._reinterpolate@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/lodash._reinterpolate/-/lodash._reinterpolate-3.0.0.tgz#0ccf2d89166af03b3663c796538b75ac6e114d9d"
lodash._reunescapedhtml@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._reunescapedhtml/-/lodash._reunescapedhtml-2.4.1.tgz#747c4fc40103eb3bb8a0976e571f7a2659e93ba7"
dependencies:
lodash._htmlescapes "~2.4.1"
lodash.keys "~2.4.1"
lodash._root@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
lodash._shimkeys@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash._shimkeys/-/lodash._shimkeys-2.4.1.tgz#6e9cc9666ff081f0b5a6c978b83e242e6949d203"
dependencies:
lodash._objecttypes "~2.4.1"
lodash.assignwith@^4.0.7:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.assignwith/-/lodash.assignwith-4.2.0.tgz#127a97f02adc41751a954d24b0de17e100e038eb"
@ -3286,27 +3165,12 @@ lodash.defaults@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-4.2.0.tgz#d09178716ffea4dde9e5fb7b37f6f0802274580c"
lodash.defaults@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.defaults/-/lodash.defaults-2.4.1.tgz#a7e8885f05e68851144b6e12a8f3678026bc4c54"
dependencies:
lodash._objecttypes "~2.4.1"
lodash.keys "~2.4.1"
lodash.escape@^3.0.0:
version "3.2.0"
resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-3.2.0.tgz#995ee0dc18c1b48cc92effae71a10aab5b487698"
dependencies:
lodash._root "^3.0.0"
lodash.escape@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.escape/-/lodash.escape-2.4.1.tgz#2ce12c5e084db0a57dda5e5d1eeeb9f5d175a3b4"
dependencies:
lodash._escapehtmlchar "~2.4.1"
lodash._reunescapedhtml "~2.4.1"
lodash.keys "~2.4.1"
lodash.isarguments@^3.0.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a"
@ -3319,12 +3183,6 @@ lodash.isempty@^4.2.1:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.isempty/-/lodash.isempty-4.4.0.tgz#6f86cbedd8be4ec987be9aaf33c9684db1b31e7e"
lodash.isobject@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-2.4.1.tgz#5a2e47fe69953f1ee631a7eba1fe64d2d06558f5"
dependencies:
lodash._objecttypes "~2.4.1"
lodash.isplainobject@^4.0.4:
version "4.0.6"
resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb"
@ -3341,14 +3199,6 @@ lodash.keys@^3.0.0:
lodash.isarguments "^3.0.0"
lodash.isarray "^3.0.0"
lodash.keys@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-2.4.1.tgz#48dea46df8ff7632b10d706b8acb26591e2b3727"
dependencies:
lodash._isnative "~2.4.1"
lodash._shimkeys "~2.4.1"
lodash.isobject "~2.4.1"
lodash.mapvalues@^4.4.0:
version "4.6.0"
resolved "https://registry.yarnpkg.com/lodash.mapvalues/-/lodash.mapvalues-4.6.0.tgz#1bafa5005de9dd6f4f26668c30ca37230cc9689c"
@ -3365,18 +3215,6 @@ lodash.restparam@^3.0.0:
version "3.6.1"
resolved "https://registry.yarnpkg.com/lodash.restparam/-/lodash.restparam-3.6.1.tgz#936a4e309ef330a7645ed4145986c85ae5b20805"
lodash.template@^2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-2.4.1.tgz#9e611007edf629129a974ab3c48b817b3e1cf20d"
dependencies:
lodash._escapestringchar "~2.4.1"
lodash._reinterpolate "~2.4.1"
lodash.defaults "~2.4.1"
lodash.escape "~2.4.1"
lodash.keys "~2.4.1"
lodash.templatesettings "~2.4.1"
lodash.values "~2.4.1"
lodash.template@^3.0.0:
version "3.6.2"
resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-3.6.2.tgz#f8cdecc6169a255be9098ae8b0c53d378931d14f"
@ -3398,23 +3236,10 @@ lodash.templatesettings@^3.0.0:
lodash._reinterpolate "^3.0.0"
lodash.escape "^3.0.0"
lodash.templatesettings@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.templatesettings/-/lodash.templatesettings-2.4.1.tgz#ea76c75d11eb86d4dbe89a83893bb861929ac699"
dependencies:
lodash._reinterpolate "~2.4.1"
lodash.escape "~2.4.1"
lodash.uniq@^4.5.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
lodash.values@~2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/lodash.values/-/lodash.values-2.4.1.tgz#abf514436b3cb705001627978cbcf30b1280eea4"
dependencies:
lodash.keys "~2.4.1"
lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.2.0, lodash@^4.2.1, lodash@^4.3.0, lodash@^4.6.1:
version "4.17.4"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
@ -3433,13 +3258,6 @@ loose-envify@^1.0.0, loose-envify@^1.1.0, loose-envify@^1.2.0, loose-envify@^1.3
dependencies:
js-tokens "^3.0.0"
loud-rejection@^1.0.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/loud-rejection/-/loud-rejection-1.6.0.tgz#5b46f80147edee578870f086d04821cf998e551f"
dependencies:
currently-unhandled "^0.4.1"
signal-exit "^3.0.0"
lru-cache@2:
version "2.7.3"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-2.7.3.tgz#6d4524e8b955f95d4f5b58851ce21dd72fb4e952"
@ -3452,10 +3270,6 @@ map-cache@^0.2.0:
version "0.2.2"
resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf"
map-obj@^1.0.0, map-obj@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d"
math-expression-evaluator@^1.2.14:
version "1.2.16"
resolved "https://registry.yarnpkg.com/math-expression-evaluator/-/math-expression-evaluator-1.2.16.tgz#b357fa1ca9faefb8e48d10c14ef2bcb2d9f0a7c9"
@ -3471,21 +3285,6 @@ memory-fs@^0.4.0, memory-fs@~0.4.1:
errno "^0.1.3"
readable-stream "^2.0.1"
meow@^3.3.0:
version "3.7.0"
resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb"
dependencies:
camelcase-keys "^2.0.0"
decamelize "^1.1.2"
loud-rejection "^1.0.0"
map-obj "^1.0.1"
minimist "^1.1.3"
normalize-package-data "^2.3.4"
object-assign "^4.0.1"
read-pkg-up "^1.0.1"
redent "^1.0.0"
trim-newlines "^1.0.0"
merge-descriptors@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
@ -3570,11 +3369,7 @@ minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
minimist@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.2.0.tgz#4dffe525dae2b864c66c2e23c6271d7afdecefce"
minimist@^1.1.0, minimist@^1.1.3, minimist@^1.2.0:
minimist@^1.1.0, minimist@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
@ -3592,7 +3387,7 @@ ms@0.7.2:
version "0.7.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765"
multipipe@^0.1.0, multipipe@^0.1.2:
multipipe@^0.1.2:
version "0.1.2"
resolved "https://registry.yarnpkg.com/multipipe/-/multipipe-0.1.2.tgz#2a8f2ddf70eed564dff2d57f1e1a137d9f05078b"
dependencies:
@ -3674,7 +3469,7 @@ nopt@^4.0.1:
abbrev "1"
osenv "^0.1.4"
normalize-package-data@^2.3.2, normalize-package-data@^2.3.4:
normalize-package-data@^2.3.2:
version "2.3.6"
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.3.6.tgz#498fa420c96401f787402ba21e600def9f981fff"
dependencies:
@ -3739,10 +3534,6 @@ object-keys@^1.0.10, object-keys@^1.0.8:
version "1.0.11"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.0.11.tgz#c54601778ad560f1142ce0e01bcca8b56d13426d"
object-keys@~0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-0.4.0.tgz#28a6aae7428dd2c3a92f3d95f21335dd204e0336"
object.assign@^4.0.4:
version "4.0.4"
resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.0.4.tgz#b1c9cc044ef1b9fe63606fc141abbb32e14730cc"
@ -4413,7 +4204,7 @@ read-pkg@^1.0.0:
normalize-package-data "^2.3.2"
path-type "^1.0.0"
"readable-stream@>=1.0.33-1 <1.1.0-0", readable-stream@~1.0.17:
"readable-stream@>=1.0.33-1 <1.1.0-0":
version "1.0.34"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.0.34.tgz#125820e34bc842d2f2aaafafe4c2916ee32c157c"
dependencies:
@ -4474,13 +4265,6 @@ redbox-react@^1.2.5:
object-assign "^4.0.1"
prop-types "^15.5.4"
redent@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/redent/-/redent-1.0.0.tgz#cf916ab1fd5f1f16dfb20822dd6ec7f730c2afde"
dependencies:
indent-string "^2.1.0"
strip-indent "^1.0.1"
reduce-css-calc@^1.2.6:
version "1.3.0"
resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz#747c914e049614a4c9cfbba629871ad1d2927716"
@ -4691,14 +4475,14 @@ sax@~1.2.1:
version "1.2.2"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828"
"semver@2 || 3 || 4 || 5", semver@^4.1.0:
version "4.3.6"
resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da"
semver@^5.3.0:
"semver@2 || 3 || 4 || 5", semver@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f"
semver@^4.1.0:
version "4.3.6"
resolved "https://registry.yarnpkg.com/semver/-/semver-4.3.6.tgz#300bc6e0e86374f7ba61068b5b1ecd57fc6532da"
send@0.15.1:
version "0.15.1"
resolved "https://registry.yarnpkg.com/send/-/send-0.15.1.tgz#8a02354c26e6f5cca700065f5f0cdeba90ec7b5f"
@ -4878,10 +4662,6 @@ stream-http@^2.3.1:
to-arraybuffer "^1.0.0"
xtend "^4.0.0"
stream-to-array@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/stream-to-array/-/stream-to-array-1.0.0.tgz#94166bb29f3ea24f082d2f8cd3ebb2cc0d6eca2c"
strict-uri-encode@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
@ -4915,12 +4695,6 @@ stringstream@~0.0.4:
version "0.0.5"
resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
strip-ansi@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.3.0.tgz#25f48ea22ca79187f3174a4db8759347bb126220"
dependencies:
ansi-regex "^0.2.1"
strip-ansi@^3.0.0, strip-ansi@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf"
@ -4944,12 +4718,6 @@ strip-bom@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
strip-indent@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-1.0.1.tgz#0c7962a6adefa7bbd4ac366460a638552ae1a0a2"
dependencies:
get-stdin "^4.0.1"
strip-json-comments@~2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
@ -4960,10 +4728,6 @@ style-loader@^0.16.0:
dependencies:
loader-utils "^1.0.2"
supports-color@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-0.2.0.tgz#d92de2694eb3f67323973d7ae3d8b55b4c22190a"
supports-color@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
@ -5030,20 +4794,6 @@ text-table@~0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
through2@^0.4.1:
version "0.4.2"
resolved "https://registry.yarnpkg.com/through2/-/through2-0.4.2.tgz#dbf5866031151ec8352bb6c4db64a2292a840b9b"
dependencies:
readable-stream "~1.0.17"
xtend "~2.1.1"
through2@^0.5.0:
version "0.5.1"
resolved "https://registry.yarnpkg.com/through2/-/through2-0.5.1.tgz#dfdd012eb9c700e2323fd334f38ac622ab372da7"
dependencies:
readable-stream "~1.0.17"
xtend "~3.0.0"
through2@^0.6.1:
version "0.6.5"
resolved "https://registry.yarnpkg.com/through2/-/through2-0.6.5.tgz#41ab9c67b29d57209071410e1d7a7a968cd3ad48"
@ -5051,7 +4801,7 @@ through2@^0.6.1:
readable-stream ">=1.0.33-1 <1.1.0-0"
xtend ">=4.0.0 <4.1.0-0"
through2@^2.0.0, through2@^2.0.1:
through2@^2.0.0, through2@^2.0.1, through2@^2.0.3:
version "2.0.3"
resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.3.tgz#0004569b37c7c74ba39c43f3ced78d1ad94140be"
dependencies:
@ -5092,10 +4842,6 @@ tough-cookie@~2.3.0:
dependencies:
punycode "^1.4.1"
trim-newlines@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-1.0.0.tgz#5887966bb582a4503a41eb524f7d35011815a613"
trim-right@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/trim-right/-/trim-right-1.0.1.tgz#cb2e1203067e0c8de1f614094b9fe45704ea6003"
@ -5267,12 +5013,6 @@ vinyl-sourcemaps-apply@^0.2.0, vinyl-sourcemaps-apply@^0.2.1:
dependencies:
source-map "^0.5.1"
vinyl@^0.2.1:
version "0.2.3"
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-0.2.3.tgz#bca938209582ec5a49ad538a00fa1f125e513252"
dependencies:
clone-stats "~0.0.1"
vinyl@^0.4.0:
version "0.4.6"
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-0.4.6.tgz#2f356c87a550a255461f36bbeb2a5ba8bf784847"
@ -5427,16 +5167,6 @@ write@^0.2.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.1.tgz#a5c6d532be656e23db820efb943a1f04998d63af"
xtend@~2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-2.1.2.tgz#6efecc2a4dad8e6962c4901b337ce7ba87b5d28b"
dependencies:
object-keys "~0.4.0"
xtend@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-3.0.0.tgz#5cce7407baf642cba7becda568111c493f59665a"
y18n@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/y18n/-/y18n-3.2.1.tgz#6d15fba884c08679c0d77e88e7759e811e07fa41"

View File

@ -5,6 +5,7 @@ import (
"compress/gzip"
"crypto/md5"
"encoding/base64"
"io"
"io/ioutil"
"log"
"net/http"
@ -14,34 +15,39 @@ import (
"strings"
"time"
"github.com/dsnet/compress/brotli"
"github.com/spf13/viper"
"github.com/khlieng/dispatch/assets"
)
const longCacheControl = "public, max-age=31536000"
const disabledCacheControl = "no-cache, no-store, must-revalidate"
type File struct {
Path string
Asset string
GzipAsset []byte
ContentType string
CacheControl string
Gzip bool
Compressed bool
}
var (
files = []*File{
&File{
Path: "bundle.js",
Asset: "bundle.js.gz",
Asset: "bundle.js.br",
ContentType: "text/javascript",
CacheControl: "max-age=31536000",
Gzip: true,
CacheControl: longCacheControl,
Compressed: true,
},
&File{
Path: "bundle.css",
Asset: "bundle.css.gz",
Asset: "bundle.css.br",
ContentType: "text/css",
CacheControl: "max-age=31536000",
Gzip: true,
CacheControl: longCacheControl,
Compressed: true,
},
}
@ -65,6 +71,21 @@ func initFileServer() {
hash := md5.Sum(data)
files[0].Path = "bundle." + base64.RawURLEncoding.EncodeToString(hash[:]) + ".js"
br, err := brotli.NewReader(bytes.NewReader(data), nil)
if err != nil {
log.Fatal(err)
}
buf := &bytes.Buffer{}
gzw, err := gzip.NewWriterLevel(buf, gzip.BestCompression)
if err != nil {
log.Fatal(err)
}
io.Copy(gzw, br)
gzw.Close()
files[0].GzipAsset = buf.Bytes()
data, err = assets.Asset(files[1].Asset)
if err != nil {
log.Fatal(err)
@ -73,21 +94,46 @@ func initFileServer() {
hash = md5.Sum(data)
files[1].Path = "bundle." + base64.RawURLEncoding.EncodeToString(hash[:]) + ".css"
br.Reset(bytes.NewReader(data))
buf = &bytes.Buffer{}
gzw.Reset(buf)
io.Copy(gzw, br)
gzw.Close()
files[1].GzipAsset = buf.Bytes()
fonts, err := assets.AssetDir("font")
if err != nil {
log.Fatal(err)
}
for _, font := range fonts {
p := strings.TrimSuffix(font, ".gz")
p := strings.TrimSuffix(font, ".br")
files = append(files, &File{
file := &File{
Path: path.Join("font", p),
Asset: path.Join("font", font),
ContentType: contentTypes[filepath.Ext(p)],
CacheControl: "max-age=31536000",
Gzip: strings.HasSuffix(font, ".gz"),
})
CacheControl: longCacheControl,
Compressed: strings.HasSuffix(font, ".br"),
}
if file.Compressed {
data, err = assets.Asset(file.Asset)
if err != nil {
log.Fatal(err)
}
br.Reset(bytes.NewReader(data))
buf = &bytes.Buffer{}
gzw.Reset(buf)
io.Copy(gzw, br)
gzw.Close()
file.GzipAsset = buf.Bytes()
}
files = append(files, file)
}
if viper.GetBool("https.hsts.enabled") && viper.GetBool("https.enabled") {
@ -146,7 +192,7 @@ func serveIndex(w http.ResponseWriter, r *http.Request) {
}
w.Header().Set("Content-Type", "text/html")
w.Header().Set("Cache-Control", "no-store")
w.Header().Set("Cache-Control", disabledCacheControl)
w.Header().Set("X-Content-Type-Options", "nosniff")
w.Header().Set("X-Frame-Options", "deny")
w.Header().Set("X-XSS-Protection", "1; mode=block")
@ -189,15 +235,21 @@ func serveFile(w http.ResponseWriter, r *http.Request, file *File) {
w.Header().Set("Content-Type", file.ContentType)
if file.Gzip && strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
w.Header().Set("Content-Encoding", "gzip")
if file.Compressed {
if strings.Contains(r.Header.Get("Accept-Encoding"), "br") {
w.Header().Set("Content-Encoding", "br")
w.Header().Set("Content-Length", strconv.Itoa(len(data)))
w.Write(data)
} else if !file.Gzip {
} else if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
w.Header().Set("Content-Encoding", "gzip")
w.Header().Set("Content-Length", strconv.Itoa(len(file.GzipAsset)))
w.Write(file.GzipAsset)
}
} else if !file.Compressed {
w.Header().Set("Content-Length", strconv.Itoa(len(data)))
w.Write(data)
} else {
gzr, err := gzip.NewReader(bytes.NewReader(data))
gzr, err := gzip.NewReader(bytes.NewReader(file.GzipAsset))
buf, err := ioutil.ReadAll(gzr)
if err != nil {
http.Error(w, "", http.StatusInternalServerError)

373
vendor/github.com/dsnet/compress/brotli/bit_reader.go generated vendored Normal file
View File

@ -0,0 +1,373 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "io"
import "bufio"
// The bitReader preserves the property that it will never read more bytes than
// is necessary. However, this feature dramatically hurts performance because
// every byte needs to be obtained through a ReadByte method call.
// Furthermore, the decoding of variable length codes in ReadSymbol, often
// requires multiple passes before it knows the exact bit-length of the code.
//
// Thus, to improve performance, if the underlying byteReader is a bufio.Reader,
// then the bitReader will use the Peek and Discard methods to fill the internal
// bit buffer with as many bits as possible, allowing the TryReadBits and
// TryReadSymbol methods to often succeed on the first try.
type byteReader interface {
io.Reader
io.ByteReader
}
type bitReader struct {
rd byteReader
bufBits uint64 // Buffer to hold some bits
numBits uint // Number of valid bits in bufBits
offset int64 // Number of bytes read from the underlying io.Reader
// These fields are only used if rd is a bufio.Reader.
bufRd *bufio.Reader
bufPeek []byte // Buffer for the Peek data
discardBits int // Number of bits to discard from bufio.Reader
fedBits uint // Number of bits fed in last call to FeedBits
// Local copy of decoders to reduce memory allocations.
prefix prefixDecoder
}
func (br *bitReader) Init(r io.Reader) {
*br = bitReader{prefix: br.prefix}
if rr, ok := r.(byteReader); ok {
br.rd = rr
} else {
br.rd = bufio.NewReader(r)
}
if brd, ok := br.rd.(*bufio.Reader); ok {
br.bufRd = brd
}
}
// FlushOffset updates the read offset of the underlying byteReader.
// If the byteReader is a bufio.Reader, then this calls Discard to update the
// read offset.
func (br *bitReader) FlushOffset() int64 {
if br.bufRd == nil {
return br.offset
}
// Update the number of total bits to discard.
br.discardBits += int(br.fedBits - br.numBits)
br.fedBits = br.numBits
// Discard some bytes to update read offset.
nd := (br.discardBits + 7) / 8 // Round up to nearest byte
nd, _ = br.bufRd.Discard(nd)
br.discardBits -= nd * 8 // -7..0
br.offset += int64(nd)
// These are invalid after Discard.
br.bufPeek = nil
return br.offset
}
// FeedBits ensures that at least nb bits exist in the bit buffer.
// If the underlying byteReader is a bufio.Reader, then this will fill the
// bit buffer with as many bits as possible, relying on Peek and Discard to
// properly advance the read offset. Otherwise, it will use ReadByte to fill the
// buffer with just the right number of bits.
func (br *bitReader) FeedBits(nb uint) {
if br.bufRd != nil {
br.discardBits += int(br.fedBits - br.numBits)
for {
if len(br.bufPeek) == 0 {
br.fedBits = br.numBits // Don't discard bits just added
br.FlushOffset()
var err error
cntPeek := 8 // Minimum Peek amount to make progress
if br.bufRd.Buffered() > cntPeek {
cntPeek = br.bufRd.Buffered()
}
br.bufPeek, err = br.bufRd.Peek(cntPeek)
br.bufPeek = br.bufPeek[int(br.numBits/8):] // Skip buffered bits
if len(br.bufPeek) == 0 {
if br.numBits >= nb {
break
}
if err == io.EOF {
err = io.ErrUnexpectedEOF
}
panic(err)
}
}
cnt := int(64-br.numBits) / 8
if cnt > len(br.bufPeek) {
cnt = len(br.bufPeek)
}
for _, c := range br.bufPeek[:cnt] {
br.bufBits |= uint64(c) << br.numBits
br.numBits += 8
}
br.bufPeek = br.bufPeek[cnt:]
if br.numBits > 56 {
break
}
}
br.fedBits = br.numBits
} else {
for br.numBits < nb {
c, err := br.rd.ReadByte()
if err != nil {
if err == io.EOF {
err = io.ErrUnexpectedEOF
}
panic(err)
}
br.bufBits |= uint64(c) << br.numBits
br.numBits += 8
br.offset++
}
}
}
// Read reads up to len(buf) bytes into buf.
func (br *bitReader) Read(buf []byte) (cnt int, err error) {
if br.numBits%8 != 0 {
return 0, Error("non-aligned bit buffer")
}
if br.numBits > 0 {
for cnt = 0; len(buf) > cnt && br.numBits > 0; cnt++ {
buf[cnt] = byte(br.bufBits)
br.bufBits >>= 8
br.numBits -= 8
}
} else {
br.FlushOffset()
cnt, err = br.rd.Read(buf)
br.offset += int64(cnt)
}
return cnt, err
}
// TryReadBits attempts to read nb bits using the contents of the bit buffer
// alone. It returns the value and whether it succeeded.
//
// This method is designed to be inlined for performance reasons.
func (br *bitReader) TryReadBits(nb uint) (uint, bool) {
if br.numBits < nb {
return 0, false
}
val := uint(br.bufBits & uint64(1<<nb-1))
br.bufBits >>= nb
br.numBits -= nb
return val, true
}
// ReadBits reads nb bits in LSB order from the underlying reader.
func (br *bitReader) ReadBits(nb uint) uint {
br.FeedBits(nb)
val := uint(br.bufBits & uint64(1<<nb-1))
br.bufBits >>= nb
br.numBits -= nb
return val
}
// ReadPads reads 0-7 bits from the bit buffer to achieve byte-alignment.
func (br *bitReader) ReadPads() uint {
nb := br.numBits % 8
val := uint(br.bufBits & uint64(1<<nb-1))
br.bufBits >>= nb
br.numBits -= nb
return val
}
// TryReadSymbol attempts to decode the next symbol using the contents of the
// bit buffer alone. It returns the decoded symbol and whether it succeeded.
//
// This method is designed to be inlined for performance reasons.
func (br *bitReader) TryReadSymbol(pd *prefixDecoder) (uint, bool) {
if br.numBits < uint(pd.minBits) || len(pd.chunks) == 0 {
return 0, false
}
chunk := pd.chunks[uint32(br.bufBits)&pd.chunkMask]
nb := uint(chunk & prefixCountMask)
if nb > br.numBits || nb > uint(pd.chunkBits) {
return 0, false
}
br.bufBits >>= nb
br.numBits -= nb
return uint(chunk >> prefixCountBits), true
}
// ReadSymbol reads the next prefix symbol using the provided prefixDecoder.
func (br *bitReader) ReadSymbol(pd *prefixDecoder) uint {
if len(pd.chunks) == 0 {
panic(ErrCorrupt) // Decode with empty tree
}
nb := uint(pd.minBits)
for {
br.FeedBits(nb)
chunk := pd.chunks[uint32(br.bufBits)&pd.chunkMask]
nb = uint(chunk & prefixCountMask)
if nb > uint(pd.chunkBits) {
linkIdx := chunk >> prefixCountBits
chunk = pd.links[linkIdx][uint32(br.bufBits>>pd.chunkBits)&pd.linkMask]
nb = uint(chunk & prefixCountMask)
}
if nb <= br.numBits {
br.bufBits >>= nb
br.numBits -= nb
return uint(chunk >> prefixCountBits)
}
}
}
// ReadOffset reads an offset value using the provided rangesCodes indexed by
// the given symbol.
func (br *bitReader) ReadOffset(sym uint, rcs []rangeCode) uint {
rc := rcs[sym]
return uint(rc.base) + br.ReadBits(uint(rc.bits))
}
// ReadPrefixCode reads the prefix definition from the stream and initializes
// the provided prefixDecoder. The value maxSyms is the alphabet size of the
// prefix code being generated. The actual number of representable symbols
// will be between 1 and maxSyms, inclusively.
func (br *bitReader) ReadPrefixCode(pd *prefixDecoder, maxSyms uint) {
hskip := br.ReadBits(2)
if hskip == 1 {
br.readSimplePrefixCode(pd, maxSyms)
} else {
br.readComplexPrefixCode(pd, maxSyms, hskip)
}
}
// readSimplePrefixCode reads the prefix code according to RFC section 3.4.
func (br *bitReader) readSimplePrefixCode(pd *prefixDecoder, maxSyms uint) {
var codes [4]prefixCode
nsym := int(br.ReadBits(2)) + 1
clen := neededBits(uint32(maxSyms))
for i := 0; i < nsym; i++ {
codes[i].sym = uint32(br.ReadBits(clen))
}
var copyLens = func(lens []uint) {
for i := 0; i < nsym; i++ {
codes[i].len = uint32(lens[i])
}
}
var compareSwap = func(i, j int) {
if codes[i].sym > codes[j].sym {
codes[i], codes[j] = codes[j], codes[i]
}
}
switch nsym {
case 1:
copyLens(simpleLens1[:])
case 2:
copyLens(simpleLens2[:])
compareSwap(0, 1)
case 3:
copyLens(simpleLens3[:])
compareSwap(0, 1)
compareSwap(0, 2)
compareSwap(1, 2)
case 4:
if tsel := br.ReadBits(1) == 1; !tsel {
copyLens(simpleLens4a[:])
} else {
copyLens(simpleLens4b[:])
}
compareSwap(0, 1)
compareSwap(2, 3)
compareSwap(0, 2)
compareSwap(1, 3)
compareSwap(1, 2)
}
if uint(codes[nsym-1].sym) >= maxSyms {
panic(ErrCorrupt) // Symbol goes beyond range of alphabet
}
pd.Init(codes[:nsym], true) // Must have 1..4 symbols
}
// readComplexPrefixCode reads the prefix code according to RFC section 3.5.
func (br *bitReader) readComplexPrefixCode(pd *prefixDecoder, maxSyms, hskip uint) {
// Read the code-lengths prefix table.
var codeCLensArr [len(complexLens)]prefixCode // Sorted, but may have holes
sum := 32
for _, sym := range complexLens[hskip:] {
clen := br.ReadSymbol(&decCLens)
if clen > 0 {
codeCLensArr[sym] = prefixCode{sym: uint32(sym), len: uint32(clen)}
if sum -= 32 >> clen; sum <= 0 {
break
}
}
}
codeCLens := codeCLensArr[:0] // Compact the array to have no holes
for _, c := range codeCLensArr {
if c.len > 0 {
codeCLens = append(codeCLens, c)
}
}
if len(codeCLens) < 1 {
panic(ErrCorrupt)
}
br.prefix.Init(codeCLens, true) // Must have 1..len(complexLens) symbols
// Use code-lengths table to decode rest of prefix table.
var codesArr [maxNumAlphabetSyms]prefixCode
var sym, repSymLast, repCntLast, clenLast uint = 0, 0, 0, 8
codes := codesArr[:0]
for sym, sum = 0, 32768; sym < maxSyms && sum > 0; {
clen := br.ReadSymbol(&br.prefix)
if clen < 16 {
// Literal bit-length symbol used.
if clen > 0 {
codes = append(codes, prefixCode{sym: uint32(sym), len: uint32(clen)})
clenLast = clen
sum -= 32768 >> clen
}
repSymLast = 0 // Reset last repeater symbol
sym++
} else {
// Repeater symbol used.
// 16: Repeat previous non-zero code-length
// 17: Repeat code length of zero
repSym := clen // Rename clen for better clarity
if repSym != repSymLast {
repCntLast = 0
repSymLast = repSym
}
nb := repSym - 14 // 2..3 bits
rep := br.ReadBits(nb) + 3 // 3..6 or 3..10
if repCntLast > 0 {
rep += (repCntLast - 2) << nb // Modify previous repeat count
}
repDiff := rep - repCntLast // Always positive
repCntLast = rep
if repSym == 16 {
clen := clenLast
for symEnd := sym + repDiff; sym < symEnd; sym++ {
codes = append(codes, prefixCode{sym: uint32(sym), len: uint32(clen)})
}
sum -= int(repDiff) * (32768 >> clen)
} else {
sym += repDiff
}
}
}
if len(codes) < 2 || sym > maxSyms {
panic(ErrCorrupt)
}
pd.Init(codes, true) // Must have 2..maxSyms symbols
}

32
vendor/github.com/dsnet/compress/brotli/bit_writer.go generated vendored Normal file
View File

@ -0,0 +1,32 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "io"
type bitWriter struct {
wr io.Writer
offset int64 // Number of bytes written to underlying io.Writer
}
func (bw *bitWriter) Init(w io.Writer) {
return
}
func (bw *bitWriter) Write(buf []byte) (int, error) {
return 0, nil
}
func (bw *bitWriter) WriteBits(val, nb uint) {
return
}
func (bw *bitWriter) WritePads() {
return
}
func (bw *bitWriter) WriteSymbol(pe *prefixEncoder, sym uint) {
return
}

View File

@ -0,0 +1,5 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli

163
vendor/github.com/dsnet/compress/brotli/common.go generated vendored Normal file
View File

@ -0,0 +1,163 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
// Package brotli implements the Brotli compressed data format.
package brotli
import "runtime"
// Error is the wrapper type for errors specific to this library.
type Error string
func (e Error) Error() string { return "brotli: " + string(e) }
var (
ErrCorrupt error = Error("stream is corrupted")
)
func errRecover(err *error) {
switch ex := recover().(type) {
case nil:
// Do nothing.
case runtime.Error:
panic(ex)
case error:
*err = ex
default:
panic(ex)
}
}
var (
reverseLUT [256]uint8
mtfLUT [256]uint8
)
func init() {
initLUTs()
printLUTs() // Only occurs in debug mode
}
func initLUTs() {
initCommonLUTs()
initPrefixLUTs()
initContextLUTs()
initDictLUTs()
}
func initCommonLUTs() {
for i := range mtfLUT {
mtfLUT[i] = uint8(i)
}
for i := range reverseLUT {
b := uint8(i)
b = (b&0xaa)>>1 | (b&0x55)<<1
b = (b&0xcc)>>2 | (b&0x33)<<2
b = (b&0xf0)>>4 | (b&0x0f)<<4
reverseLUT[i] = b
}
}
// neededBits computes the minimum number of bits needed to encode n elements.
func neededBits(n uint32) (nb uint) {
for n -= 1; n > 0; n >>= 1 {
nb++
}
return
}
// reverseUint32 reverses all bits of v.
func reverseUint32(v uint32) (x uint32) {
x |= uint32(reverseLUT[byte(v>>0)]) << 24
x |= uint32(reverseLUT[byte(v>>8)]) << 16
x |= uint32(reverseLUT[byte(v>>16)]) << 8
x |= uint32(reverseLUT[byte(v>>24)]) << 0
return x
}
// reverseBits reverses the lower n bits of v.
func reverseBits(v uint32, n uint) uint32 {
return reverseUint32(v << (32 - n))
}
// moveToFront is a data structure that allows for more efficient move-to-front
// transformations (described in RFC section 7.3). Since most transformations
// only involve a fairly low number of symbols, it can be quite expensive
// filling out the dict with values 0..255 for every call. Instead, we remember
// what part of the dict was altered and make sure we reset it at the beginning
// of every encode and decode operation.
type moveToFront struct {
dict [256]uint8 // Mapping from indexes to values
tail int // Number of tail bytes that are already ordered
}
func (m *moveToFront) Encode(vals []uint8) {
// Reset dict to be identical to mtfLUT.
copy(m.dict[:], mtfLUT[:256-m.tail])
var max int
for i, val := range vals {
var idx uint8 // Reverse lookup idx in dict
for di, dv := range m.dict {
if dv == val {
idx = uint8(di)
break
}
}
vals[i] = idx
max |= int(idx)
copy(m.dict[1:], m.dict[:idx])
m.dict[0] = val
}
m.tail = 256 - max - 1
}
func (m *moveToFront) Decode(idxs []uint8) {
// Reset dict to be identical to mtfLUT.
copy(m.dict[:], mtfLUT[:256-m.tail])
var max int
for i, idx := range idxs {
val := m.dict[idx] // Forward lookup val in dict
idxs[i] = val
max |= int(idx)
copy(m.dict[1:], m.dict[:idx])
m.dict[0] = val
}
m.tail = 256 - max - 1
}
func allocUint8s(s []uint8, n int) []uint8 {
if cap(s) >= n {
return s[:n]
}
return make([]uint8, n, n*3/2)
}
func allocUint32s(s []uint32, n int) []uint32 {
if cap(s) >= n {
return s[:n]
}
return make([]uint32, n, n*3/2)
}
func extendSliceUints32s(s [][]uint32, n int) [][]uint32 {
if cap(s) >= n {
return s[:n]
}
ss := make([][]uint32, n, n*3/2)
copy(ss, s[:cap(s)])
return ss
}
func extendDecoders(s []prefixDecoder, n int) []prefixDecoder {
if cap(s) >= n {
return s[:n]
}
ss := make([]prefixDecoder, n, n*3/2)
copy(ss, s[:cap(s)])
return ss
}

225
vendor/github.com/dsnet/compress/brotli/common_test.go generated vendored Normal file
View File

@ -0,0 +1,225 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "hash/crc32"
import "bytes"
import "encoding/hex"
import "testing"
func TestTableCRC(t *testing.T) {
// Convert transformLUT to byte array according to Appendix B of the RFC.
var transformBuf bytes.Buffer
for _, t := range transformLUT {
transformBuf.WriteString(t.prefix + "\x00")
transformBuf.WriteByte(byte(t.transform))
transformBuf.WriteString(t.suffix + "\x00")
}
var vectors = []struct {
crc uint32
buf []byte
}{
{crc: 0x5136cb04, buf: dictLUT[:]},
{crc: 0x8e91efb7, buf: contextLUT0[:]},
{crc: 0xd01a32f4, buf: contextLUT1[:]},
{crc: 0x0dd7a0d6, buf: contextLUT2[:]},
{crc: 0x3d965f81, buf: transformBuf.Bytes()},
}
for i, v := range vectors {
crc := crc32.ChecksumIEEE(v.buf)
if crc != v.crc {
t.Errorf("test %d, CRC-32 mismatch: got %08x, want %08x", i, crc, v.crc)
}
}
}
func TestMoveToFront(t *testing.T) {
var vectors = []struct {
input, output string
}{{
input: "",
output: "",
}, {
input: "ff00ff00ff00ff00",
output: "ff01010101010101",
}, {
input: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000010000000001",
output: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000010100000001",
}, {
input: "0000000000000000000000010101010101010101010101010101010101010101" +
"0101010101010101010101010101010101010101000000000000000203030004",
output: "0000000000000000000000010000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000010000000000000203000204",
}, {
input: "00000001",
output: "00000001",
}, {
input: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000100000000",
output: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000101000000",
}, {
input: "0000000000000000010101010101010101010101010101010101010101010101" +
"0101010101010101010101010101010101010101010101010101010101010101" +
"0101010101010101010200000000000000020203030303030304040505050505" +
"0505050505050505050505050505050505050505050505050505050505050505",
output: "0000000000000000010000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000202000000000000010003000000000004000500000000" +
"0000000000000000000000000000000000000000000000000000000000000000",
}, {
input: "000000010101000202020003030300040404000505050006",
output: "000000010000010200000103000001040000010500000106",
}, {
input: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000001010102020202030202020202" +
"0202020202020202020202020202020202020202020202020202020202020202",
output: "0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000001000002000000030100000000" +
"0000000000000000000000000000000000000000000000000000000000000000",
}, {
input: "0000000000000000000102020202020202020000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000302020202020202010103030303030304040505050505" +
"0505050505050505050505050505050505050505050505050505050505050505",
output: "0000000000000000000102000000000000000200000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000302000000000000030002000000000004000500000000" +
"0000000000000000000000000000000000000000000000000000000000000000",
}, {
input: "0000010000000102020201030303010404020105",
output: "0000010100000102000001030000010400030205",
}, {
input: "0000000000000000010202010101010101010202020202020202020202020202" +
"0202020202020202020202020202020202020202020202020202020202020202" +
"0202020202020202020201010101010101020202020202020203040000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000201010101010101020205050505050503030606060606" +
"0606060606060606060606060606060606060606060606060606060606060606" +
"0606060606060606060202020202020202000702020202020202040404040404" +
"0404040404040404040404040404040404040404040404040404040404040404",
output: "0000000000000000010200010000000000000100000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000001000000000000010000000000000003040400000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000304000000000000010005000000000005000600000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000300000000000000050702000000000000070000000000" +
"0000000000000000000000000000000000000000000000000000000000000000",
}, {
input: "0000000000000001010100020202000303030004040400050505000606060007" +
"070700080808000909090a0a",
output: "0000000000000001000001020000010300000104000001050000010600000107" +
"000001080000010900000a00",
}, {
input: "0000010203030303040000050600070507050201070206060804000400020002" +
"06000200000006000905020000080805050a0202000808080808080105080808" +
"0400050205020505050505050b0205040b0505050505050b0605050505050505" +
"0505050505050505050505050505050502050505050505050505050202050505" +
"040502020b0b0b0b020b0b0b0b0b0b02020b0b0b0b0b0b02020b0b0b0b0b0b0b" +
"0b0b0b0b0b0b0b02020b0b0b0b0b0b0b0b0b0b0b0b0b0b0204020b0b0b050b0a" +
"0c0c02010d0d0d0d0d00060b0d0d0d0d020201020d0d0d0d0c0b02020d0d0d07" +
"04040404070606020b050402060602000e020202060205040404060606040202" +
"040404040404040404040404000000000f0f00090f0f0f0f0f0f0f0b09030d0d" +
"0909060909090101010100000909090909090909010101010101010101010101" +
"0101010101010101010101010d0d0d0d0d0d0d10090c0909090909100f0f0f0f" +
"0f0f07070f0f0f0f0f0f0e0e0f0f0f0f0f0f0f0f0c0c0c0c0c0c0c0c0c0c0c0c" +
"0c0c00080d0d0d0d0d0d020b0d0d0d0d030200010d0d0d0d0d0b02040d0d0d07" +
"0202020207060b020202020206060b0e0e040006060208080808080806060606" +
"00000000000000000000000009090909111111110d0d0d0d1212120900000000" +
"000107060a0a0505050500001212121212121212090909090909090909090909" +
"050511040d0d0d0d0d02040b0d070d0d0a0200010d0d0d0d050b02020d0d0d0d" +
"07020202060b0b0b0402050b02050b07010b00050202020202020c0002020202" +
"02020202020202020202020202020202020202020d0d0d0d0d0d0d0d09090909" +
"09090f0912121212121212121210101010101010090909090909090909090909" +
"0909090909090909090909090909090909090909090909090909090909090909" +
"090909090e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e" +
"0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e" +
"0e0e020211111111111111111111080808080808111111111111111111111111" +
"1111111111111111111111111111111111111111111111111111111111111111" +
"111111110e0e0e0e0e0e0e0e0e030303030303030e0e0e0e0e0e0e0e0e0e0e0e" +
"0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e0e" +
"0e0e0e0e03030303030303030303030303030303030303030303030303030303",
output: "0000010203000000040400050602070301010607030205000807070101040101" +
"04020201000002010908040300060003000a0400040400000000000905020000" +
"0804030501010100000000000b02020403020000000000010902000000000000" +
"0000000000000000000000000000000004010000000000000000000100010000" +
"0401020004000000010100000000000100010000000000010001000000000000" +
"0000000000000001000100000000000000000000000000010301020000030108" +
"0c0004090d00000000090907030000000500050102000000060403000300000c" +
"0a00000001070004050a0503040001090e020000030105050000030000010300" +
"010000000000000000000000050000000f00010e0200000000000008020f0b00" +
"0200080100000d00000007000200000000000000020000000000000000000000" +
"0000000000000000000000000400000000000010030e01000000000209000000" +
"00000e000100000000000e000100000000000000050000000000000000000000" +
"000008100800000000000e0d020000000d03050c040000000005040e0300000b" +
"03000000010e0503000000000200020c0006080400050a000000000002000000" +
"0300000000000000000000000e000000110000000a0000001200000304000000" +
"000c0c0712001200000005000700000000000000070000000000000000000000" +
"0300090c0a000000000c020e030b01000a050a0c040000000907050003000000" +
"070200000c040000090306030202020507020804050000000000100302000000" +
"000000000000000000000000000000000000000009000000000000000c000000" +
"000011010e000000000000000012000000000000020000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000011000000000000000000000000000000000000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000060010000000000000000000110000000000010000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000003000000000000000012000000000000010000000000000000000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000001000000000000000000000000000000000000000000000000000000",
}, {
input: "0000000000010101010102020202030303030404040405050505050606060707" +
"07070808",
output: "0000000000010000000002000000030000000400000005000000000600000700" +
"00000800",
}, {
input: "0000000000000000010001010101010101000000000000000002020303030303" +
"0303030303030303030303030303030303030303030303030303030303030303" +
"0303030303030303030401010101010101040005050505050502020303030303" +
"0303030303030303030303030303030303030303030303030303030303030303",
output: "0000000000000000010101000000000000010000000000000002000300000000" +
"0000000000000000000000000000000000000000000000000000000000000000" +
"0000000000000000000404000000000000010405000000000005000500000000" +
"0000000000000000000000000000000000000000000000000000000000000000",
}, {
input: "0000010000000102020101030303010404040105050501060606010707010108" +
"08010109",
output: "0000010100000102000100030000010400000105000001060000010700010008" +
"00010009",
}}
var mtf moveToFront
for i, v := range vectors {
input, _ := hex.DecodeString(v.input)
mtf.Encode(input)
output := append([]uint8(nil), input...)
mtf.Decode(input)
if input := hex.EncodeToString(input); input != v.input {
t.Errorf("test %d, input differs:\ngot %v\nwant %v", i, input, v.input)
}
if output := hex.EncodeToString(output); output != v.output {
t.Errorf("test %d, output differs:\ngot %v\nwant %v", i, output, v.output)
}
}
}
// This package relies on dynamic generation of LUTs to reduce the static
// binary size. This benchmark attempts to measure the startup cost of init.
// This benchmark is not thread-safe; so do not run it in parallel with other
// tests or benchmarks!
func BenchmarkInit(b *testing.B) {
b.ReportAllocs()
for i := 0; i < b.N; i++ {
initLUTs()
}
}

131
vendor/github.com/dsnet/compress/brotli/context.go generated vendored Normal file
View File

@ -0,0 +1,131 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
// These constants are defined in RFC section 7.1.
const (
contextLSB6 = iota
contextMSB6
contextUTF8
contextSigned
numContextModes
)
// These constants are defined in RFC sections 2 and 7.3.
const (
maxLitContextIDs = 64
maxDistContextIDs = 4
)
// These LUTs are taken directly from RFC section 7.1.
var (
contextLUT0 = [256]uint8{
0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 0, 0, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
8, 12, 16, 12, 12, 20, 12, 16, 24, 28, 12, 12, 32, 12, 36, 12,
44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 32, 32, 24, 40, 28, 12,
12, 48, 52, 52, 52, 48, 52, 52, 52, 48, 52, 52, 52, 52, 52, 48,
52, 52, 52, 52, 52, 48, 52, 52, 52, 52, 52, 24, 12, 28, 12, 12,
12, 56, 60, 60, 60, 56, 60, 60, 60, 56, 60, 60, 60, 60, 60, 56,
60, 60, 60, 60, 60, 56, 60, 60, 60, 60, 60, 24, 12, 28, 12, 0,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3, 2, 3,
}
contextLUT1 = [256]uint8{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1,
1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1,
1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 1, 1, 1, 1, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
}
contextLUT2 = [256]uint8{
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7,
}
)
// These LUTs are dynamically computed from the LUTs in the specification.
var (
contextP1LUT [256 * numContextModes]uint8
contextP2LUT [256 * numContextModes]uint8
)
// initContextLUTs computes LUTs so that context ID computation can be
// efficiently without any branches.
func initContextLUTs() {
for i := 0; i < 256; i++ {
for m := 0; m < numContextModes; m++ {
base := m << 8
// Operations performed here are specified in RFC section 7.1.
switch m {
case contextLSB6:
contextP1LUT[base+i] = byte(i) & 0x3f
contextP2LUT[base+i] = 0
case contextMSB6:
contextP1LUT[base+i] = byte(i) >> 2
contextP2LUT[base+i] = 0
case contextUTF8:
contextP1LUT[base+i] = contextLUT0[byte(i)]
contextP2LUT[base+i] = contextLUT1[byte(i)]
case contextSigned:
contextP1LUT[base+i] = contextLUT2[byte(i)] << 3
contextP2LUT[base+i] = contextLUT2[byte(i)]
default:
panic("unknown context mode")
}
}
}
}
// getLitContextID computes the context ID for literals from RFC section 7.1.
// Bytes p1 and p2 are the last and second-to-last byte, respectively.
func getLitContextID(p1, p2 byte, mode uint8) uint8 {
base := uint(mode) << 8
return contextP1LUT[base+uint(p1)] | contextP2LUT[base+uint(p2)]
}
// getDistContextID computes the context ID for distances using the copy length
// as specified in RFC section 7.2.
func getDistContextID(l int) uint8 {
if l > 4 {
return 3
}
return uint8(l - 2)
}

241
vendor/github.com/dsnet/compress/brotli/debug.go generated vendored Normal file
View File

@ -0,0 +1,241 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
// +build debug
package brotli
import "os"
import "fmt"
import "strings"
const debug = true
func printLUTs() {
var output = os.Stderr
printVar := func(name string, obj interface{}) {
var body string
if bs, ok := obj.([]uint8); ok && len(bs) >= 256 {
// Special case handling for large []uint8 to form 16x16 blocks.
var ss []string
ss = append(ss, "{")
var s string
for i, b := range bs {
s += fmt.Sprintf("%02x ", b)
if i%16 == 15 || i+1 == len(bs) {
ss = append(ss, "\t"+s+"")
s = ""
}
if i%256 == 255 && (i+1 != len(bs)) {
ss = append(ss, "")
}
}
ss = append(ss, "}")
body = strings.Join(ss, "\n")
} else {
body = fmt.Sprintf("%v", obj)
}
fmt.Fprintf(output, "var %s %T = %v\n", name, obj, body)
}
// Common LUTs.
printVar("reverseLUT", reverseLUT[:])
printVar("mtfLUT", mtfLUT[:])
fmt.Fprintln(output)
// Context LUTs.
printVar("contextP1LUT", contextP1LUT[:])
printVar("contextP2LUT", contextP2LUT[:])
fmt.Fprintln(output)
// Static dictionary LUTs.
printVar("dictBitSizes", dictBitSizes)
printVar("dictSizes", dictSizes)
printVar("dictOffsets", dictOffsets)
fmt.Fprintln(output)
// Prefix LUTs.
printVar("simpleLens1", simpleLens1)
printVar("simpleLens2", simpleLens2)
printVar("simpleLens3", simpleLens3)
printVar("simpleLens4a", simpleLens4a)
printVar("simpleLens4b", simpleLens4b)
printVar("complexLens", complexLens)
fmt.Fprintln(output)
printVar("insLenRanges", rangeCodes(insLenRanges))
printVar("cpyLenRanges", rangeCodes(cpyLenRanges))
printVar("blkLenRanges", rangeCodes(blkLenRanges))
printVar("maxRLERanges", rangeCodes(maxRLERanges))
fmt.Fprintln(output)
printVar("codeCLens", prefixCodes(codeCLens))
printVar("decCLens", decCLens)
printVar("encCLens", encCLens)
fmt.Fprintln(output)
printVar("codeMaxRLE", prefixCodes(codeMaxRLE))
printVar("decMaxRLE", decMaxRLE)
printVar("encMaxRLE", encMaxRLE)
fmt.Fprintln(output)
printVar("codeWinBits", prefixCodes(codeWinBits))
printVar("decWinBits", decWinBits)
printVar("encWinBits", encWinBits)
fmt.Fprintln(output)
printVar("codeCounts", prefixCodes(codeCounts))
printVar("decCounts", decCounts)
printVar("encCounts", encCounts)
fmt.Fprintln(output)
printVar("iacLUT", typeIaCLUT(iacLUT))
printVar("distShortLUT", typeDistShortLUT(distShortLUT))
printVar("distLongLUT", typeDistLongLUT(distLongLUT))
fmt.Fprintln(output)
}
func tabs(s string, n int) string {
tabs := strings.Repeat("\t", n)
return strings.Join(strings.Split(s, "\n"), "\n"+tabs)
}
type rangeCodes []rangeCode
func (rc rangeCodes) String() (s string) {
var maxBits, maxBase int
for _, c := range rc {
if maxBits < int(c.bits) {
maxBits = int(c.bits)
}
if maxBase < int(c.base) {
maxBase = int(c.base)
}
}
var ss []string
ss = append(ss, "{")
maxSymDig := len(fmt.Sprintf("%d", len(rc)-1))
maxBitsDig := len(fmt.Sprintf("%d", maxBits))
maxBaseDig := len(fmt.Sprintf("%d", maxBase))
for i, c := range rc {
base := fmt.Sprintf(fmt.Sprintf("%%%dd", maxBaseDig), c.base)
if c.bits > 0 {
base += fmt.Sprintf("-%d", c.base+1<<c.bits-1)
}
ss = append(ss, fmt.Sprintf(
fmt.Sprintf("\t%%%dd: {bits: %%%dd, base: %%s},",
maxSymDig, maxBitsDig),
i, c.bits, base,
))
}
ss = append(ss, "}")
return strings.Join(ss, "\n")
}
type prefixCodes []prefixCode
func (pc prefixCodes) String() (s string) {
var maxSym, maxLen int
for _, c := range pc {
if maxSym < int(c.sym) {
maxSym = int(c.sym)
}
if maxLen < int(c.len) {
maxLen = int(c.len)
}
}
var ss []string
ss = append(ss, "{")
maxSymDig := len(fmt.Sprintf("%d", maxSym))
for _, c := range pc {
ss = append(ss, fmt.Sprintf(
fmt.Sprintf("\t%%%dd:%s%%0%db,",
maxSymDig, strings.Repeat(" ", 2+maxLen-int(c.len)), c.len),
c.sym, c.val,
))
}
ss = append(ss, "}")
return strings.Join(ss, "\n")
}
func (pd prefixDecoder) String() string {
var ss []string
ss = append(ss, "{")
if len(pd.chunks) > 0 {
ss = append(ss, "\tchunks: {")
for i, c := range pd.chunks {
l := "sym"
if uint(c&prefixCountMask) > uint(pd.chunkBits) {
l = "idx"
}
ss = append(ss, fmt.Sprintf(
fmt.Sprintf("\t\t%%0%db: {%%s: %%3d, len: %%2d},", pd.chunkBits),
i, l, c>>prefixCountBits, c&prefixCountMask,
))
}
ss = append(ss, "\t},")
for j, links := range pd.links {
ss = append(ss, fmt.Sprintf("\tlinks[%d]: {", j))
linkBits := len(fmt.Sprintf("%b", pd.linkMask))
for i, c := range links {
ss = append(ss, fmt.Sprintf(
fmt.Sprintf("\t\t%%0%db: {sym: %%3d, len: %%2d},", linkBits),
i, c>>prefixCountBits, c&prefixCountMask,
))
}
ss = append(ss, "\t},")
}
ss = append(ss, fmt.Sprintf("\tchunkMask: %b,", pd.chunkMask))
ss = append(ss, fmt.Sprintf("\tlinkMask: %b,", pd.linkMask))
ss = append(ss, fmt.Sprintf("\tchunkBits: %d,", pd.chunkBits))
ss = append(ss, fmt.Sprintf("\tminBits: %d,", pd.minBits))
ss = append(ss, fmt.Sprintf("\tnumSyms: %d,", pd.numSyms))
}
ss = append(ss, "}")
return strings.Join(ss, "\n")
}
type typeIaCLUT [numIaCSyms]struct{ ins, cpy rangeCode }
func (t typeIaCLUT) String() string {
var ss []string
var ins, cpy rangeCodes
for _, rec := range t {
ins = append(ins, rec.ins)
cpy = append(cpy, rec.cpy)
}
ss = append(ss, "{")
ss = append(ss, "\tins: "+tabs(ins.String(), 1)+",")
ss = append(ss, "\tcpy: "+tabs(cpy.String(), 1)+",")
ss = append(ss, "}")
return strings.Join(ss, "\n")
}
type typeDistShortLUT [16]struct{ index, delta int }
func (t typeDistShortLUT) String() string {
var ss []string
ss = append(ss, "{")
for i, rec := range t {
ss = append(ss, fmt.Sprintf("\t%2d: {index: %d, delta: %+2d},", i, rec.index, rec.delta))
}
ss = append(ss, "}")
return strings.Join(ss, "\n")
}
type typeDistLongLUT [4][]rangeCode
func (t typeDistLongLUT) String() string {
var ss []string
ss = append(ss, "{")
for i, rc := range t {
ss = append(ss, fmt.Sprintf("\t%d: %s,", i, tabs(rangeCodes(rc).String(), 1)))
}
ss = append(ss, "}")
return strings.Join(ss, "\n")
}

10272
vendor/github.com/dsnet/compress/brotli/dict.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

136
vendor/github.com/dsnet/compress/brotli/dict_decoder.go generated vendored Normal file
View File

@ -0,0 +1,136 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
// The dictDecoder implements the LZ77 sliding dictionary that is commonly used
// in various compression formats. For performance reasons, this implementation
// performs little to no sanity checks about the arguments. As such, the
// invariants documented for each method call must be respected. Furthermore,
// to reduce the memory footprint decompressing short streams, the dictionary
// starts with a relatively small size and then lazily grows.
const (
initSize = 4096 // Initial size allocated for sliding dictionary
growFactor = 4 // Rate the dictionary is grown to match expected size
)
type dictDecoder struct {
// Invariant: len(hist) <= size
size int // Sliding window size
hist []byte // Sliding window history, dynamically grown to match size
// Invariant: 0 <= rdPos <= wrPos <= len(hist)
wrPos int // Current output position in buffer
rdPos int // Have emitted hist[:rdPos] already
full bool // Has a full window length been written yet?
}
func (dd *dictDecoder) Init(size int) {
*dd = dictDecoder{hist: dd.hist}
// Regardless of what size claims, start with a small dictionary to avoid
// denial-of-service attacks with large memory allocation.
dd.size = size
if dd.hist == nil {
dd.hist = make([]byte, initSize)
}
dd.hist = dd.hist[:cap(dd.hist)]
if len(dd.hist) > dd.size {
dd.hist = dd.hist[:dd.size]
}
for i := range dd.hist {
dd.hist[i] = 0 // Zero out history to make LastBytes logic easier
}
}
// HistSize reports the total amount of historical data in the dictionary.
func (dd *dictDecoder) HistSize() int {
if dd.full {
return dd.size
}
return dd.wrPos
}
// AvailSize reports the available amount of output buffer space.
func (dd *dictDecoder) AvailSize() int {
return len(dd.hist) - dd.wrPos
}
// WriteSlice returns a slice of the available buffer to write data to.
//
// This invariant will be kept: len(s) <= AvailSize()
func (dd *dictDecoder) WriteSlice() []byte {
return dd.hist[dd.wrPos:]
}
// WriteMark advances the writer pointer by cnt.
//
// This invariant must be kept: 0 <= cnt <= AvailSize()
func (dd *dictDecoder) WriteMark(cnt int) {
dd.wrPos += cnt
}
// WriteCopy copies a string at a given (distance, length) to the output.
// This returns the number of bytes copied and may be less than the requested
// length if the available space in the output buffer is too small.
//
// This invariant must be kept: 0 < dist <= HistSize()
func (dd *dictDecoder) WriteCopy(dist, length int) int {
wrBase := dd.wrPos
wrEnd := dd.wrPos + length
if wrEnd > len(dd.hist) {
wrEnd = len(dd.hist)
}
// Copy non-overlapping section after destination.
rdPos := dd.wrPos - dist
if rdPos < 0 {
rdPos += len(dd.hist)
dd.wrPos += copy(dd.hist[dd.wrPos:wrEnd], dd.hist[rdPos:])
rdPos = 0
}
// Copy overlapping section before destination.
for dd.wrPos < wrEnd {
dd.wrPos += copy(dd.hist[dd.wrPos:wrEnd], dd.hist[rdPos:dd.wrPos])
}
return dd.wrPos - wrBase
}
// ReadFlush returns a slice of the historical buffer that is ready to be
// emitted to the user. A call to ReadFlush is only valid after all of the data
// from a previous call to ReadFlush has been consumed.
func (dd *dictDecoder) ReadFlush() []byte {
toRead := dd.hist[dd.rdPos:dd.wrPos]
dd.rdPos = dd.wrPos
if dd.wrPos == len(dd.hist) {
if len(dd.hist) == dd.size {
dd.wrPos, dd.rdPos = 0, 0
dd.full = true
} else {
// Allocate a larger history buffer.
size := cap(dd.hist) * growFactor
if size > dd.size {
size = dd.size
}
hist := make([]byte, size)
copy(hist, dd.hist)
dd.hist = hist
}
}
return toRead
}
// LastBytes reports the last 2 bytes in the dictionary. If they do not exist,
// then zero values are returned.
func (dd *dictDecoder) LastBytes() (p1, p2 byte) {
if dd.wrPos > 1 {
return dd.hist[dd.wrPos-1], dd.hist[dd.wrPos-2]
} else if dd.wrPos > 0 {
return dd.hist[dd.wrPos-1], dd.hist[len(dd.hist)-1]
} else {
return dd.hist[len(dd.hist)-1], dd.hist[len(dd.hist)-2]
}
}

View File

@ -0,0 +1,161 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "bytes"
import "strings"
import "testing"
func TestDictDecoder(t *testing.T) {
const abc = "ABC\n"
const fox = "The quick brown fox jumped over the lazy dog!\n"
const poem = "The Road Not Taken\nRobert Frost\n" +
"\n" +
"Two roads diverged in a yellow wood,\n" +
"And sorry I could not travel both\n" +
"And be one traveler, long I stood\n" +
"And looked down one as far as I could\n" +
"To where it bent in the undergrowth;\n" +
"\n" +
"Then took the other, as just as fair,\n" +
"And having perhaps the better claim,\n" +
"Because it was grassy and wanted wear;\n" +
"Though as for that the passing there\n" +
"Had worn them really about the same,\n" +
"\n" +
"And both that morning equally lay\n" +
"In leaves no step had trodden black.\n" +
"Oh, I kept the first for another day!\n" +
"Yet knowing how way leads on to way,\n" +
"I doubted if I should ever come back.\n" +
"\n" +
"I shall be telling this with a sigh\n" +
"Somewhere ages and ages hence:\n" +
"Two roads diverged in a wood, and I-\n" +
"I took the one less traveled by,\n" +
"And that has made all the difference.\n"
var refs = []struct {
dist int // Backward distance (0 if this is an insertion)
length int // Length of copy or insertion
}{
{0, 38}, {33, 3}, {0, 48}, {79, 3}, {0, 11}, {34, 5}, {0, 6}, {23, 7},
{0, 8}, {50, 3}, {0, 2}, {69, 3}, {34, 5}, {0, 4}, {97, 3}, {0, 4},
{43, 5}, {0, 6}, {7, 4}, {88, 7}, {0, 12}, {80, 3}, {0, 2}, {141, 4},
{0, 1}, {196, 3}, {0, 3}, {157, 3}, {0, 6}, {181, 3}, {0, 2}, {23, 3},
{77, 3}, {28, 5}, {128, 3}, {110, 4}, {70, 3}, {0, 4}, {85, 6}, {0, 2},
{182, 6}, {0, 4}, {133, 3}, {0, 7}, {47, 5}, {0, 20}, {112, 5}, {0, 1},
{58, 3}, {0, 8}, {59, 3}, {0, 4}, {173, 3}, {0, 5}, {114, 3}, {0, 4},
{92, 5}, {0, 2}, {71, 3}, {0, 2}, {76, 5}, {0, 1}, {46, 3}, {96, 4},
{130, 4}, {0, 3}, {360, 3}, {0, 3}, {178, 5}, {0, 7}, {75, 3}, {0, 3},
{45, 6}, {0, 6}, {299, 6}, {180, 3}, {70, 6}, {0, 1}, {48, 3}, {66, 4},
{0, 3}, {47, 5}, {0, 9}, {325, 3}, {0, 1}, {359, 3}, {318, 3}, {0, 2},
{199, 3}, {0, 1}, {344, 3}, {0, 3}, {248, 3}, {0, 10}, {310, 3}, {0, 3},
{93, 6}, {0, 3}, {252, 3}, {157, 4}, {0, 2}, {273, 5}, {0, 14}, {99, 4},
{0, 1}, {464, 4}, {0, 2}, {92, 4}, {495, 3}, {0, 1}, {322, 4}, {16, 4},
{0, 3}, {402, 3}, {0, 2}, {237, 4}, {0, 2}, {432, 4}, {0, 1}, {483, 5},
{0, 2}, {294, 4}, {0, 2}, {306, 3}, {113, 5}, {0, 1}, {26, 4}, {164, 3},
{488, 4}, {0, 1}, {542, 3}, {248, 6}, {0, 5}, {205, 3}, {0, 8}, {48, 3},
{449, 6}, {0, 2}, {192, 3}, {328, 4}, {9, 5}, {433, 3}, {0, 3}, {622, 25},
{615, 5}, {46, 5}, {0, 2}, {104, 3}, {475, 10}, {549, 3}, {0, 4}, {597, 8},
{314, 3}, {0, 1}, {473, 6}, {317, 5}, {0, 1}, {400, 3}, {0, 3}, {109, 3},
{151, 3}, {48, 4}, {0, 4}, {125, 3}, {108, 3}, {0, 2},
}
var want string
var buf bytes.Buffer
var dd dictDecoder
dd.Init(1 << 11)
var checkLastBytes = func(str string) {
if len(str) < 2 {
str = "\x00\x00" + str
}
str = str[len(str)-2:]
p1, p2 := dd.LastBytes()
got := string([]byte{p2, p1})
if got != str {
t.Errorf("last bytes mismatch: got %q, want %q", got, str)
}
}
var writeCopy = func(dist, length int) {
if dist < length {
cnt := (dist + length - 1) / dist
want += strings.Repeat(want[len(want)-dist:], cnt)[:length]
} else {
want += want[len(want)-dist:][:length]
}
for length > 0 {
length -= dd.WriteCopy(dist, length)
if dd.AvailSize() == 0 {
buf.Write(dd.ReadFlush())
}
}
checkLastBytes(want)
}
var writeString = func(str string) {
want += str
for len(str) > 0 {
cnt := copy(dd.WriteSlice(), str)
str = str[cnt:]
dd.WriteMark(cnt)
if dd.AvailSize() == 0 {
buf.Write(dd.ReadFlush())
}
}
checkLastBytes(want)
}
writeString("")
writeString(".")
str := poem
for _, ref := range refs {
if ref.dist == 0 {
writeString(str[:ref.length])
} else {
writeCopy(ref.dist, ref.length)
}
str = str[ref.length:]
}
writeCopy(dd.HistSize(), 33)
writeString(abc)
writeCopy(len(abc), 59*len(abc))
writeString(fox)
writeCopy(len(fox), 9*len(fox))
writeString(".")
writeCopy(1, 9)
writeString(strings.ToUpper(poem))
writeCopy(len(poem), 7*len(poem))
writeCopy(dd.HistSize(), 10)
buf.Write(dd.ReadFlush())
if buf.String() != want {
t.Errorf("final string mismatch:\ngot %q\nwant %q", buf.String(), want)
}
}
func BenchmarkDictDecoderCopy(b *testing.B) {
nb := 1 << 24
b.SetBytes(int64(nb))
for i := 0; i < b.N; i++ {
var dd dictDecoder
dd.Init(1 << 16)
copy(dd.WriteSlice(), "abc")
dd.WriteMark(3)
dist, length := 3, nb
for length > 0 {
length -= dd.WriteCopy(dist, length)
if dd.AvailSize() == 0 {
dd.ReadFlush()
}
}
}
}

View File

@ -0,0 +1,5 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli

View File

@ -0,0 +1,5 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli

289
vendor/github.com/dsnet/compress/brotli/prefix.go generated vendored Normal file
View File

@ -0,0 +1,289 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
const (
// RFC section 3.5.
// This is the maximum bit-width of a prefix code.
// Thus, it is okay to use uint32 to store codes.
maxPrefixBits = 15
// RFC section 3.3.
// The size of the alphabet for various prefix codes.
numLitSyms = 256 // Literal symbols
maxNumDistSyms = 16 + 120 + (48 << 3) // Distance symbols
numIaCSyms = 704 // Insert-and-copy length symbols
numBlkCntSyms = 26 // Block count symbols
maxNumBlkTypeSyms = 256 + 2 // Block type symbols
maxNumCtxMapSyms = 256 + 16 // Context map symbols
// This should be the max of each of the constants above.
maxNumAlphabetSyms = numIaCSyms
)
var (
// RFC section 3.4.
// Prefix code lengths for simple codes.
simpleLens1 = [1]uint{0}
simpleLens2 = [2]uint{1, 1}
simpleLens3 = [3]uint{1, 2, 2}
simpleLens4a = [4]uint{2, 2, 2, 2}
simpleLens4b = [4]uint{1, 2, 3, 3}
// RFC section 3.5.
// Prefix code lengths for complex codes as they appear in the stream.
complexLens = [18]uint{
1, 2, 3, 4, 0, 5, 17, 6, 16, 7, 8, 9, 10, 11, 12, 13, 14, 15,
}
)
type rangeCode struct {
base uint32 // Starting base offset of the range
bits uint32 // Bit-width of a subsequent integer to add to base offset
}
var (
// RFC section 5.
// LUT to convert an insert symbol to an actual insert length.
insLenRanges []rangeCode
// RFC section 5.
// LUT to convert an copy symbol to an actual copy length.
cpyLenRanges []rangeCode
// RFC section 6.
// LUT to convert an block-type length symbol to an actual length.
blkLenRanges []rangeCode
// RFC section 7.3.
// LUT to convert RLE symbol to an actual repeat length.
maxRLERanges []rangeCode
)
type prefixCode struct {
sym uint32 // The symbol being mapped
val uint32 // Value of the prefix code (must be in [0..1<<len])
len uint32 // Bit length of the prefix code
}
var (
// RFC section 3.5.
// Prefix codecs for code lengths in complex prefix definition.
codeCLens []prefixCode
decCLens prefixDecoder
encCLens prefixEncoder
// RFC section 7.3.
// Prefix codecs for RLEMAX in context map definition.
codeMaxRLE []prefixCode
decMaxRLE prefixDecoder
encMaxRLE prefixEncoder
// RFC section 9.1.
// Prefix codecs for WBITS in stream header definition.
codeWinBits []prefixCode
decWinBits prefixDecoder
encWinBits prefixEncoder
// RFC section 9.2.
// Prefix codecs used for size fields in meta-block header definition.
codeCounts []prefixCode
decCounts prefixDecoder
encCounts prefixEncoder
)
var (
// RFC section 5.
// Table to convert insert-and-copy symbols to insert and copy lengths.
iacLUT [numIaCSyms]struct{ ins, cpy rangeCode }
// RFC section 4.
// Table to help convert short-codes (first 16 symbols) to distances using
// the ring buffer of past distances.
distShortLUT [16]struct{ index, delta int }
// RFC section 4.
// Table to help convert long-codes to distances. This is two dimensional
// slice keyed by the NPOSTFIX and the normalized distance symbol.
distLongLUT [4][]rangeCode
)
func initPrefixLUTs() {
// Sanity check some constants.
for _, numMax := range []uint{
numLitSyms, maxNumDistSyms, numIaCSyms, numBlkCntSyms, maxNumBlkTypeSyms, maxNumCtxMapSyms,
} {
if numMax > maxNumAlphabetSyms {
panic("maximum alphabet size is not updated")
}
}
if maxNumAlphabetSyms >= 1<<prefixSymbolBits {
panic("maximum alphabet size is too large to represent")
}
if maxPrefixBits >= 1<<prefixCountBits {
panic("maximum prefix bit-length is too large to represent")
}
initPrefixRangeLUTs()
initPrefixCodeLUTs()
initLengthLUTs()
}
func initPrefixRangeLUTs() {
var makeRanges = func(base uint, bits []uint) (rc []rangeCode) {
for _, nb := range bits {
rc = append(rc, rangeCode{base: uint32(base), bits: uint32(nb)})
base += 1 << nb
}
return rc
}
insLenRanges = makeRanges(0, []uint{
0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 12, 14, 24,
}) // RFC section 5
cpyLenRanges = makeRanges(2, []uint{
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 7, 8, 9, 10, 24,
}) // RFC section 5
blkLenRanges = makeRanges(1, []uint{
2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 7, 8, 9, 10, 11, 12, 13, 24,
}) // RFC section 6
maxRLERanges = makeRanges(2, []uint{
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
}) // RFC section 7.3
}
func initPrefixCodeLUTs() {
// Prefix code for reading code lengths in RFC section 3.5.
codeCLens = nil
for sym, clen := range []uint{2, 4, 3, 2, 2, 4} {
var code = prefixCode{sym: uint32(sym), len: uint32(clen)}
codeCLens = append(codeCLens, code)
}
decCLens.Init(codeCLens, true)
encCLens.Init(codeCLens)
// Prefix code for reading RLEMAX in RFC section 7.3.
codeMaxRLE = []prefixCode{{sym: 0, val: 0, len: 1}}
for i := uint32(0); i < 16; i++ {
var code = prefixCode{sym: i + 1, val: i<<1 | 1, len: 5}
codeMaxRLE = append(codeMaxRLE, code)
}
decMaxRLE.Init(codeMaxRLE, false)
encMaxRLE.Init(codeMaxRLE)
// Prefix code for reading WBITS in RFC section 9.1.
codeWinBits = nil
for i := uint32(9); i <= 24; i++ {
var code prefixCode
switch {
case i == 16:
code = prefixCode{sym: i, val: (i-16)<<0 | 0, len: 1} // Symbols: 16
case i > 17:
code = prefixCode{sym: i, val: (i-17)<<1 | 1, len: 4} // Symbols: 18..24
case i < 17:
code = prefixCode{sym: i, val: (i-8)<<4 | 1, len: 7} // Symbols: 9..15
default:
code = prefixCode{sym: i, val: (i-17)<<4 | 1, len: 7} // Symbols: 17
}
codeWinBits = append(codeWinBits, code)
}
codeWinBits[0].sym = 0 // Invalid code "1000100" to use symbol zero
decWinBits.Init(codeWinBits, false)
encWinBits.Init(codeWinBits)
// Prefix code for reading counts in RFC section 9.2.
// This is used for: NBLTYPESL, NBLTYPESI, NBLTYPESD, NTREESL, and NTREESD.
codeCounts = []prefixCode{{sym: 1, val: 0, len: 1}}
var code = codeCounts[len(codeCounts)-1]
for i := uint32(0); i < 8; i++ {
for j := uint32(0); j < 1<<i; j++ {
code.sym = code.sym + 1
code.val = j<<4 | i<<1 | 1
code.len = uint32(i + 4)
codeCounts = append(codeCounts, code)
}
}
decCounts.Init(codeCounts, false)
encCounts.Init(codeCounts)
}
func initLengthLUTs() {
// RFC section 5.
// The insert-and-copy length symbol is converted into an insert length
// and a copy length. Thus, create a table to precompute the result for
// all input symbols.
for iacSym := range iacLUT {
var insSym, cpySym int
switch iacSym / 64 {
case 0, 2: // 0..63 and 128..191
insSym, cpySym = 0, 0
case 1, 3: // 64..127 and 192..255
insSym, cpySym = 0, 8
case 4: // 256..319
insSym, cpySym = 8, 0
case 5: // 320..383
insSym, cpySym = 8, 8
case 6: // 384..447
insSym, cpySym = 0, 16
case 7: // 448..511
insSym, cpySym = 16, 0
case 8: // 512..575
insSym, cpySym = 8, 16
case 9: // 576..639
insSym, cpySym = 16, 8
case 10: // 640..703
insSym, cpySym = 16, 16
}
r64 := iacSym % 64
insSym += r64 >> 3 // Lower 3 bits
cpySym += r64 & 0x07 // Upper 3 bits
iacLUT[iacSym].ins = insLenRanges[insSym]
iacLUT[iacSym].cpy = cpyLenRanges[cpySym]
}
// RFC section 4.
// The first 16 symbols modify a previously seen symbol. Thus, we can create
// a table to determine which distance to use and how much to modify it by.
for distSym := range distShortLUT {
var index, delta int
switch {
case distSym < 4:
index, delta = distSym, 0
case distSym < 10:
index, delta = 0, int(distSym/2-1)
case distSym < 16:
index, delta = 1, int(distSym/2-4)
}
if distSym%2 == 0 {
delta *= -1
}
distShortLUT[distSym].index = index
distShortLUT[distSym].delta = delta
}
// RFC section 4.
// Longer distances are computed according the equation in the RFC.
// To reduce computation during runtime, we precompute as much of the output
// as possible. Thus, we compute the final distance using the following:
// rec := distLongLUT[NPOSTFIX][distSym - (16+NDIRECT)]
// distance := NDIRECT + rec.base + ReadBits(rec.bits)<<NPOSTFIX
for npostfix := range distLongLUT {
numDistSyms := 48 << uint(npostfix)
distLongLUT[npostfix] = make([]rangeCode, numDistSyms)
for distSym := range distLongLUT[npostfix] {
postfixMask := 1<<uint(npostfix) - 1
hcode := distSym >> uint(npostfix)
lcode := distSym & postfixMask
nbits := 1 + distSym>>uint(npostfix+1)
offset := ((2 + (hcode & 1)) << uint(nbits)) - 4
distLongLUT[npostfix][distSym] = rangeCode{
base: uint32(offset<<uint(npostfix) + lcode + 1),
bits: uint32(nbits),
}
}
}
}

View File

@ -0,0 +1,200 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
// The algorithm used to decode variable length codes is based on the lookup
// method in zlib. If the code is less-than-or-equal to prefixMaxChunkBits,
// then the symbol can be decoded using a single lookup into the chunks table.
// Otherwise, the links table will be used for a second level lookup.
//
// The chunks slice is keyed by the contents of the bit buffer ANDed with
// the chunkMask to avoid a out-of-bounds lookup. The value of chunks is a tuple
// that is decoded as follow:
//
// var length = chunks[bitBuffer&chunkMask] & prefixCountMask
// var symbol = chunks[bitBuffer&chunkMask] >> prefixCountBits
//
// If the decoded length is larger than chunkBits, then an overflow link table
// must be used for further decoding. In this case, the symbol is actually the
// index into the links tables. The second-level links table returned is
// processed in the same way as the chunks table.
//
// if length > chunkBits {
// var index = symbol // Previous symbol is index into links tables
// length = links[index][bitBuffer>>chunkBits & linkMask] & prefixCountMask
// symbol = links[index][bitBuffer>>chunkBits & linkMask] >> prefixCountBits
// }
//
// See the following:
// http://www.gzip.org/algorithm.txt
const (
// These values add up to the width of a uint32 integer.
prefixCountBits = 5 // Number of bits to store the bit-width of the code
prefixSymbolBits = 27 // Number of bits to store the symbol value
prefixCountMask = (1 << prefixCountBits) - 1
prefixMaxChunkBits = 9 // This can be tuned for better performance
)
type prefixDecoder struct {
chunks []uint32 // First-level lookup map
links [][]uint32 // Second-level lookup map
chunkMask uint32 // Mask the width of the chunks table
linkMask uint32 // Mask the width of the link table
chunkBits uint32 // Bit-width of the chunks table
minBits uint32 // The minimum number of bits to safely make progress
numSyms uint32 // Number of symbols
}
// Init initializes prefixDecoder according to the codes provided.
// The symbols provided must be unique and in ascending order.
//
// If assignCodes is true, then generate a canonical prefix tree using the
// prefixCode.len field and assign the generated value to prefixCode.val.
//
// If assignCodes is false, then initialize using the information inside the
// codes themselves. The input codes must form a valid prefix tree.
func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
// Handle special case trees.
if len(codes) <= 1 {
switch {
case len(codes) == 0: // Empty tree (should error if used later)
*pd = prefixDecoder{chunks: pd.chunks[:0], links: pd.links[:0], numSyms: 0}
case len(codes) == 1: // Single code tree (bit-width of zero)
*pd = prefixDecoder{
chunks: append(pd.chunks[:0], codes[0].sym<<prefixCountBits|0),
links: pd.links[:0],
numSyms: 1,
}
}
return
}
// Compute basic statistics on the symbols.
var bitCnts [maxPrefixBits + 1]uint
c0 := codes[0]
bitCnts[c0.len]++
minBits, maxBits, symLast := c0.len, c0.len, int(c0.sym)
for _, c := range codes[1:] {
if int(c.sym) <= symLast {
panic(ErrCorrupt) // Non-unique or non-monotonically increasing
}
if minBits > c.len {
minBits = c.len
}
if maxBits < c.len {
maxBits = c.len
}
bitCnts[c.len]++ // Histogram of bit counts
symLast = int(c.sym) // Keep track of last symbol
}
if maxBits >= 1<<prefixCountBits || minBits == 0 {
panic(ErrCorrupt) // Bit-width is too long or too short
}
if symLast >= 1<<prefixSymbolBits {
panic(ErrCorrupt) // Alphabet cardinality too large
}
// Compute the next code for a symbol of a given bit length.
var nextCodes [maxPrefixBits + 1]uint
var code uint
for i := minBits; i <= maxBits; i++ {
code <<= 1
nextCodes[i] = code
code += bitCnts[i]
}
if code != 1<<maxBits {
panic(ErrCorrupt) // Tree is under or over subscribed
}
// Allocate chunks table if necessary.
pd.numSyms = uint32(len(codes))
pd.minBits = minBits
pd.chunkBits = maxBits
if pd.chunkBits > prefixMaxChunkBits {
pd.chunkBits = prefixMaxChunkBits
}
numChunks := 1 << pd.chunkBits
pd.chunks = allocUint32s(pd.chunks, numChunks)
pd.chunkMask = uint32(numChunks - 1)
// Allocate links tables if necessary.
pd.links = pd.links[:0]
pd.linkMask = 0
if pd.chunkBits < maxBits {
numLinks := 1 << (maxBits - pd.chunkBits)
pd.linkMask = uint32(numLinks - 1)
if assignCodes {
baseCode := nextCodes[pd.chunkBits+1] >> 1
pd.links = extendSliceUints32s(pd.links, numChunks-int(baseCode))
for linkIdx := range pd.links {
code := reverseBits(uint32(baseCode)+uint32(linkIdx), uint(pd.chunkBits))
pd.links[linkIdx] = allocUint32s(pd.links[linkIdx], numLinks)
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | uint32(pd.chunkBits+1)
}
} else {
for i := range pd.chunks {
pd.chunks[i] = 0 // Logic below relies zero value as uninitialized
}
for _, c := range codes {
if c.len <= pd.chunkBits {
continue // Ignore symbols that don't require links
}
code := c.val & pd.chunkMask
if pd.chunks[code] > 0 {
continue // Link table already initialized
}
linkIdx := len(pd.links)
pd.links = extendSliceUints32s(pd.links, len(pd.links)+1)
pd.links[linkIdx] = allocUint32s(pd.links[linkIdx], numLinks)
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | uint32(pd.chunkBits+1)
}
}
}
// Fill out chunks and links tables with values.
for i, c := range codes {
chunk := c.sym<<prefixCountBits | uint32(c.len)
if assignCodes {
codes[i].val = reverseBits(uint32(nextCodes[c.len]), uint(c.len))
nextCodes[c.len]++
c = codes[i]
}
if c.len <= pd.chunkBits {
skip := 1 << uint(c.len)
for j := int(c.val); j < len(pd.chunks); j += skip {
pd.chunks[j] = chunk
}
} else {
linkIdx := pd.chunks[c.val&pd.chunkMask] >> prefixCountBits
links := pd.links[linkIdx]
skip := 1 << uint(c.len-pd.chunkBits)
for j := int(c.val >> pd.chunkBits); j < len(links); j += skip {
links[j] = chunk
}
}
}
if debug && !checkPrefixes(codes) {
panic(ErrCorrupt) // The codes do not form a valid prefix tree.
}
}
// checkPrefixes reports whether any codes have overlapping prefixes.
// This check is expensive and runs in O(n^2) time!
func checkPrefixes(codes []prefixCode) bool {
for i, c1 := range codes {
for j, c2 := range codes {
mask := uint32(1)<<c1.len - 1
if i != j && c1.len <= c2.len && c1.val&mask == c2.val&mask {
return false
}
}
}
return true
}

View File

@ -0,0 +1,9 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
type prefixEncoder struct{}
func (pe *prefixEncoder) Init(codes []prefixCode) {}

View File

@ -0,0 +1,5 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli

624
vendor/github.com/dsnet/compress/brotli/reader.go generated vendored Normal file
View File

@ -0,0 +1,624 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "io"
import "io/ioutil"
type Reader struct {
InputOffset int64 // Total number of bytes read from underlying io.Reader
OutputOffset int64 // Total number of bytes emitted from Read
rd bitReader // Input source
toRead []byte // Uncompressed data ready to be emitted from Read
blkLen int // Uncompressed bytes left to read in meta-block
insLen int // Bytes left to insert in current command
cpyLen int // Bytes left to copy in current command
last bool // Last block bit detected
err error // Persistent error
step func(*Reader) // Single step of decompression work (can panic)
stepState int // The sub-step state for certain steps
mtf moveToFront // Local move-to-front decoder
dict dictDecoder // Dynamic sliding dictionary
iacBlk blockDecoder // Insert-and-copy block decoder
litBlk blockDecoder // Literal block decoder
distBlk blockDecoder // Distance block decoder
// Literal decoding state fields.
litMapType []uint8 // The current literal context map for the current block type
litMap []uint8 // Literal context map
cmode uint8 // The current context mode
cmodes []uint8 // Literal context modes
// Distance decoding state fields.
distMap []uint8 // Distance context map
distMapType []uint8 // The current distance context map for the current block type
dist int // The current distance (may not be in dists)
dists [4]int // Last few distances (newest-to-oldest)
distZero bool // Implicit zero distance symbol found
npostfix uint8 // Postfix bits used in distance decoding
ndirect uint8 // Number of direct distance codes
// Static dictionary state fields.
word []byte // Transformed word obtained from static dictionary
wordBuf [maxWordSize]byte // Buffer to write a transformed word into
// Meta data fields.
metaRd io.LimitedReader // Local LimitedReader to reduce allocation
metaWr io.Writer // Writer to write meta data to
metaBuf []byte // Scratch space for reading meta data
}
type blockDecoder struct {
numTypes int // Total number of types
typeLen int // The number of blocks left for this type
types [2]uint8 // The current (0) and previous (1) block type
decType prefixDecoder // Prefix decoder for the type symbol
decLen prefixDecoder // Prefix decoder for block length
prefixes []prefixDecoder // Prefix decoders for each block type
}
type ReaderConfig struct {
_ struct{} // Blank field to prevent unkeyed struct literals
}
func NewReader(r io.Reader, conf *ReaderConfig) (*Reader, error) {
br := new(Reader)
br.Reset(r)
return br, nil
}
func (br *Reader) Read(buf []byte) (int, error) {
for {
if len(br.toRead) > 0 {
cnt := copy(buf, br.toRead)
br.toRead = br.toRead[cnt:]
br.OutputOffset += int64(cnt)
return cnt, nil
}
if br.err != nil {
return 0, br.err
}
// Perform next step in decompression process.
br.rd.offset = br.InputOffset
func() {
defer errRecover(&br.err)
br.step(br)
}()
br.InputOffset = br.rd.FlushOffset()
if br.err != nil {
br.toRead = br.dict.ReadFlush() // Flush what's left in case of error
}
}
}
func (br *Reader) Close() error {
if br.err == io.EOF || br.err == io.ErrClosedPipe {
br.toRead = nil // Make sure future reads fail
br.err = io.ErrClosedPipe
return nil
}
return br.err // Return the persistent error
}
func (br *Reader) Reset(r io.Reader) error {
*br = Reader{
rd: br.rd,
step: (*Reader).readStreamHeader,
dict: br.dict,
iacBlk: br.iacBlk,
litBlk: br.litBlk,
distBlk: br.distBlk,
word: br.word[:0],
cmodes: br.cmodes[:0],
litMap: br.litMap[:0],
distMap: br.distMap[:0],
dists: [4]int{4, 11, 15, 16}, // RFC section 4
// TODO(dsnet): Should we write meta data somewhere useful?
metaWr: ioutil.Discard,
metaBuf: br.metaBuf,
}
br.rd.Init(r)
return nil
}
// readStreamHeader reads the Brotli stream header according to RFC section 9.1.
func (br *Reader) readStreamHeader() {
wbits := uint(br.rd.ReadSymbol(&decWinBits))
if wbits == 0 {
panic(ErrCorrupt) // Reserved value used
}
size := int(1<<wbits) - 16
br.dict.Init(size)
br.readBlockHeader()
}
// readBlockHeader reads a meta-block header according to RFC section 9.2.
func (br *Reader) readBlockHeader() {
if br.last {
if br.rd.ReadPads() > 0 {
panic(ErrCorrupt)
}
panic(io.EOF)
}
// Read ISLAST and ISLASTEMPTY.
if br.last = br.rd.ReadBits(1) == 1; br.last {
if empty := br.rd.ReadBits(1) == 1; empty {
br.readBlockHeader() // Next call will terminate stream
return
}
}
// Read MLEN and MNIBBLES and process meta data.
var blkLen int // 1..1<<24
if nibbles := br.rd.ReadBits(2) + 4; nibbles == 7 {
if reserved := br.rd.ReadBits(1) == 1; reserved {
panic(ErrCorrupt)
}
var skipLen int // 0..1<<24
if skipBytes := br.rd.ReadBits(2); skipBytes > 0 {
skipLen = int(br.rd.ReadBits(skipBytes * 8))
if skipBytes > 1 && skipLen>>((skipBytes-1)*8) == 0 {
panic(ErrCorrupt) // Shortest representation not used
}
skipLen++
}
if br.rd.ReadPads() > 0 {
panic(ErrCorrupt)
}
br.blkLen = skipLen // Use blkLen to track meta data number of bytes
br.readMetaData()
return
} else {
blkLen = int(br.rd.ReadBits(nibbles * 4))
if nibbles > 4 && blkLen>>((nibbles-1)*4) == 0 {
panic(ErrCorrupt) // Shortest representation not used
}
blkLen++
}
br.blkLen = blkLen
// Read ISUNCOMPRESSED and process uncompressed data.
if !br.last {
if uncompressed := br.rd.ReadBits(1) == 1; uncompressed {
if br.rd.ReadPads() > 0 {
panic(ErrCorrupt)
}
br.readRawData()
return
}
}
br.readPrefixCodes()
}
// readMetaData reads meta data according to RFC section 9.2.
func (br *Reader) readMetaData() {
br.metaRd.R = &br.rd
br.metaRd.N = int64(br.blkLen)
if br.metaBuf == nil {
br.metaBuf = make([]byte, 4096) // Lazy allocate
}
if cnt, err := io.CopyBuffer(br.metaWr, &br.metaRd, br.metaBuf); err != nil {
panic(err) // Will never panic with io.EOF
} else if cnt < int64(br.blkLen) {
panic(io.ErrUnexpectedEOF)
}
br.step = (*Reader).readBlockHeader
}
// readRawData reads raw data according to RFC section 9.2.
func (br *Reader) readRawData() {
buf := br.dict.WriteSlice()
if len(buf) > br.blkLen {
buf = buf[:br.blkLen]
}
cnt, err := br.rd.Read(buf)
br.blkLen -= cnt
br.dict.WriteMark(cnt)
if err != nil {
if err == io.EOF {
err = io.ErrUnexpectedEOF
}
panic(err)
}
if br.blkLen > 0 {
br.toRead = br.dict.ReadFlush()
br.step = (*Reader).readRawData // We need to continue this work
return
}
br.step = (*Reader).readBlockHeader
}
// readPrefixCodes reads the prefix codes according to RFC section 9.2.
func (br *Reader) readPrefixCodes() {
// Read block types for literal, insert-and-copy, and distance blocks.
for _, bd := range []*blockDecoder{&br.litBlk, &br.iacBlk, &br.distBlk} {
// Note: According to RFC section 6, it is okay for the block count to
// *not* count down to zero. Thus, there is no need to validate that
// typeLen is within some reasonable range.
bd.types = [2]uint8{0, 1}
bd.typeLen = -1 // Stay on this type until next meta-block
bd.numTypes = int(br.rd.ReadSymbol(&decCounts)) // 1..256
if bd.numTypes >= 2 {
br.rd.ReadPrefixCode(&bd.decType, uint(bd.numTypes)+2)
br.rd.ReadPrefixCode(&bd.decLen, uint(numBlkCntSyms))
sym := br.rd.ReadSymbol(&bd.decLen)
bd.typeLen = int(br.rd.ReadOffset(sym, blkLenRanges))
}
}
// Read NPOSTFIX and NDIRECT.
npostfix := br.rd.ReadBits(2) // 0..3
ndirect := br.rd.ReadBits(4) << npostfix // 0..120
br.npostfix, br.ndirect = uint8(npostfix), uint8(ndirect)
numDistSyms := 16 + ndirect + 48<<npostfix
// Read CMODE, the literal context modes.
br.cmodes = allocUint8s(br.cmodes, br.litBlk.numTypes)
for i := range br.cmodes {
br.cmodes[i] = uint8(br.rd.ReadBits(2))
}
br.cmode = br.cmodes[0] // 0..3
// Read CMAPL, the literal context map.
numLitTrees := int(br.rd.ReadSymbol(&decCounts)) // 1..256
br.litMap = allocUint8s(br.litMap, maxLitContextIDs*br.litBlk.numTypes)
if numLitTrees >= 2 {
br.readContextMap(br.litMap, uint(numLitTrees))
} else {
for i := range br.litMap {
br.litMap[i] = 0
}
}
br.litMapType = br.litMap[0:] // First block type is zero
// Read CMAPD, the distance context map.
numDistTrees := int(br.rd.ReadSymbol(&decCounts)) // 1..256
br.distMap = allocUint8s(br.distMap, maxDistContextIDs*br.distBlk.numTypes)
if numDistTrees >= 2 {
br.readContextMap(br.distMap, uint(numDistTrees))
} else {
for i := range br.distMap {
br.distMap[i] = 0
}
}
br.distMapType = br.distMap[0:] // First block type is zero
// Read HTREEL[], HTREEI[], and HTREED[], the arrays of prefix codes.
br.litBlk.prefixes = extendDecoders(br.litBlk.prefixes, numLitTrees)
for i := range br.litBlk.prefixes {
br.rd.ReadPrefixCode(&br.litBlk.prefixes[i], numLitSyms)
}
br.iacBlk.prefixes = extendDecoders(br.iacBlk.prefixes, br.iacBlk.numTypes)
for i := range br.iacBlk.prefixes {
br.rd.ReadPrefixCode(&br.iacBlk.prefixes[i], numIaCSyms)
}
br.distBlk.prefixes = extendDecoders(br.distBlk.prefixes, numDistTrees)
for i := range br.distBlk.prefixes {
br.rd.ReadPrefixCode(&br.distBlk.prefixes[i], numDistSyms)
}
br.step = (*Reader).readCommands
}
// readCommands reads block commands according to RFC section 9.3.
func (br *Reader) readCommands() {
// Since Go does not support tail call optimization, we use goto statements
// to achieve higher performance processing each command. Each label can be
// thought of as a mini function, and each goto as a cheap function call.
// The following code follows this control flow.
//
// The bulk of the action will be in the following loop:
// startCommand -> readLiterals -> readDistance -> copyDynamicDict ->
// finishCommand -> startCommand -> ...
/*
readCommands()
|
+----------------> +
| |
| V
| +-- startCommand
| | |
| | V
| | readLiterals ----------+
| | | |
| | V |
| +-> readDistance |
| | |
| +--------+--------+ |
| | | |
| V V |
| copyDynamicDict copyStaticDict |
| | | |
| +--------+--------+ |
| | |
| V |
+----------- finishCommand <---------+
|
V
readBlockHeader()
*/
const (
stateInit = iota // Zero value must be stateInit
// Some labels (readLiterals, copyDynamicDict, copyStaticDict) require
// work to be continued if more buffer space is needed. This is achieved
// by the switch block right below, which continues the work at the
// right label based on the given sub-step value.
stateLiterals
stateDynamicDict
stateStaticDict
)
switch br.stepState {
case stateInit:
goto startCommand
case stateLiterals:
goto readLiterals
case stateDynamicDict:
goto copyDynamicDict
case stateStaticDict:
goto copyStaticDict
}
startCommand:
// Read the insert and copy lengths according to RFC section 5.
{
if br.iacBlk.typeLen == 0 {
br.readBlockSwitch(&br.iacBlk)
}
br.iacBlk.typeLen--
iacTree := &br.iacBlk.prefixes[br.iacBlk.types[0]]
iacSym, ok := br.rd.TryReadSymbol(iacTree)
if !ok {
iacSym = br.rd.ReadSymbol(iacTree)
}
rec := iacLUT[iacSym]
insExtra, ok := br.rd.TryReadBits(uint(rec.ins.bits))
if !ok {
insExtra = br.rd.ReadBits(uint(rec.ins.bits))
}
cpyExtra, ok := br.rd.TryReadBits(uint(rec.cpy.bits))
if !ok {
cpyExtra = br.rd.ReadBits(uint(rec.cpy.bits))
}
br.insLen = int(rec.ins.base) + int(insExtra)
br.cpyLen = int(rec.cpy.base) + int(cpyExtra)
br.distZero = iacSym < 128
if br.insLen > 0 {
goto readLiterals
} else {
goto readDistance
}
}
readLiterals:
// Read literal symbols as uncompressed data according to RFC section 9.3.
{
buf := br.dict.WriteSlice()
if len(buf) > br.insLen {
buf = buf[:br.insLen]
}
p1, p2 := br.dict.LastBytes()
for i := range buf {
if br.litBlk.typeLen == 0 {
br.readBlockSwitch(&br.litBlk)
br.litMapType = br.litMap[64*int(br.litBlk.types[0]):]
br.cmode = br.cmodes[br.litBlk.types[0]] // 0..3
}
br.litBlk.typeLen--
litCID := getLitContextID(p1, p2, br.cmode) // 0..63
litTree := &br.litBlk.prefixes[br.litMapType[litCID]]
litSym, ok := br.rd.TryReadSymbol(litTree)
if !ok {
litSym = br.rd.ReadSymbol(litTree)
}
buf[i] = byte(litSym)
p1, p2 = byte(litSym), p1
br.dict.WriteMark(1)
}
br.insLen -= len(buf)
br.blkLen -= len(buf)
if br.insLen > 0 {
br.toRead = br.dict.ReadFlush()
br.step = (*Reader).readCommands
br.stepState = stateLiterals // Need to continue work here
return
} else if br.blkLen > 0 {
goto readDistance
} else {
goto finishCommand
}
}
readDistance:
// Read and decode the distance length according to RFC section 9.3.
{
if br.distZero {
br.dist = br.dists[0]
} else {
if br.distBlk.typeLen == 0 {
br.readBlockSwitch(&br.distBlk)
br.distMapType = br.distMap[4*int(br.distBlk.types[0]):]
}
br.distBlk.typeLen--
distCID := getDistContextID(br.cpyLen) // 0..3
distTree := &br.distBlk.prefixes[br.distMapType[distCID]]
distSym, ok := br.rd.TryReadSymbol(distTree)
if !ok {
distSym = br.rd.ReadSymbol(distTree)
}
if distSym < 16 { // Short-code
rec := distShortLUT[distSym]
br.dist = br.dists[rec.index] + rec.delta
} else if distSym < uint(16+br.ndirect) { // Direct-code
br.dist = int(distSym - 15) // 1..ndirect
} else { // Long-code
rec := distLongLUT[br.npostfix][distSym-uint(16+br.ndirect)]
extra, ok := br.rd.TryReadBits(uint(rec.bits))
if !ok {
extra = br.rd.ReadBits(uint(rec.bits))
}
br.dist = int(br.ndirect) + int(rec.base) + int(extra<<br.npostfix)
}
br.distZero = bool(distSym == 0)
if br.dist <= 0 {
panic(ErrCorrupt)
}
}
if br.dist <= br.dict.HistSize() {
if !br.distZero {
br.dists[3] = br.dists[2]
br.dists[2] = br.dists[1]
br.dists[1] = br.dists[0]
br.dists[0] = br.dist
}
goto copyDynamicDict
} else {
goto copyStaticDict
}
}
copyDynamicDict:
// Copy a string from the past uncompressed data according to RFC section 2.
{
cnt := br.dict.WriteCopy(br.dist, br.cpyLen)
br.blkLen -= cnt
br.cpyLen -= cnt
if br.cpyLen > 0 {
br.toRead = br.dict.ReadFlush()
br.step = (*Reader).readCommands
br.stepState = stateDynamicDict // Need to continue work here
return
} else {
goto finishCommand
}
}
copyStaticDict:
// Copy a string from the static dictionary according to RFC section 8.
{
if len(br.word) == 0 {
if br.cpyLen < minDictLen || br.cpyLen > maxDictLen {
panic(ErrCorrupt)
}
wordIdx := br.dist - (br.dict.HistSize() + 1)
index := wordIdx % dictSizes[br.cpyLen]
offset := dictOffsets[br.cpyLen] + index*br.cpyLen
baseWord := dictLUT[offset : offset+br.cpyLen]
transformIdx := wordIdx >> uint(dictBitSizes[br.cpyLen])
if transformIdx >= len(transformLUT) {
panic(ErrCorrupt)
}
cnt := transformWord(br.wordBuf[:], baseWord, transformIdx)
br.word = br.wordBuf[:cnt]
}
buf := br.dict.WriteSlice()
cnt := copy(buf, br.word)
br.word = br.word[cnt:]
br.blkLen -= cnt
br.dict.WriteMark(cnt)
if len(br.word) > 0 {
br.toRead = br.dict.ReadFlush()
br.step = (*Reader).readCommands
br.stepState = stateStaticDict // Need to continue work here
return
} else {
goto finishCommand
}
}
finishCommand:
// Finish off this command and check if we need to loop again.
if br.blkLen < 0 {
panic(ErrCorrupt)
} else if br.blkLen > 0 {
goto startCommand // More commands in this block
}
// Done with this block.
br.toRead = br.dict.ReadFlush()
br.step = (*Reader).readBlockHeader
br.stepState = stateInit // Next call to readCommands must start here
return
}
// readContextMap reads the context map according to RFC section 7.3.
func (br *Reader) readContextMap(cm []uint8, numTrees uint) {
// TODO(dsnet): Test the following edge cases:
// * Test with largest and smallest MAXRLE sizes
// * Test with with very large MAXRLE value
// * Test inverseMoveToFront
maxRLE := br.rd.ReadSymbol(&decMaxRLE)
br.rd.ReadPrefixCode(&br.rd.prefix, maxRLE+numTrees)
for i := 0; i < len(cm); {
sym := br.rd.ReadSymbol(&br.rd.prefix)
if sym == 0 || sym > maxRLE {
// Single non-zero value.
if sym > 0 {
sym -= maxRLE
}
cm[i] = uint8(sym)
i++
} else {
// Repeated zeros.
n := int(br.rd.ReadOffset(sym-1, maxRLERanges))
if i+n > len(cm) {
panic(ErrCorrupt)
}
for j := i + n; i < j; i++ {
cm[i] = 0
}
}
}
if invert := br.rd.ReadBits(1) == 1; invert {
br.mtf.Decode(cm)
}
}
// readBlockSwitch handles a block switch command according to RFC section 6.
func (br *Reader) readBlockSwitch(bd *blockDecoder) {
symType := br.rd.ReadSymbol(&bd.decType)
switch symType {
case 0:
symType = uint(bd.types[1])
case 1:
symType = uint(bd.types[0]) + 1
if symType >= uint(bd.numTypes) {
symType -= uint(bd.numTypes)
}
default:
symType -= 2
}
bd.types = [2]uint8{uint8(symType), bd.types[0]}
symLen := br.rd.ReadSymbol(&bd.decLen)
bd.typeLen = int(br.rd.ReadOffset(symLen, blkLenRanges))
}

455
vendor/github.com/dsnet/compress/brotli/reader_test.go generated vendored Normal file
View File

@ -0,0 +1,455 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "io"
import "io/ioutil"
import "bufio"
import "bytes"
import "strings"
import "encoding/hex"
import "runtime"
import "testing"
func TestReader(t *testing.T) {
var vectors = []struct {
desc string // Description of the test
input string // Test input string in hex
output string // Expected output string in hex
inIdx int64 // Expected input offset after reading
outIdx int64 // Expected output offset after reading
err error // Expected error
}{{
desc: "empty string (truncated)",
err: io.ErrUnexpectedEOF,
}, {
desc: "empty last block (WBITS: 16)",
input: "06",
inIdx: 1,
}, {
desc: "empty last block (WBITS: 12)",
input: "c101",
inIdx: 2,
}, {
desc: "empty last block (WBITS: 17)",
input: "8101",
inIdx: 2,
}, {
desc: "empty last block (WBITS: 21)",
input: "39",
inIdx: 1,
}, {
desc: "empty last block (WBITS: invalid)",
input: "9101",
inIdx: 1,
err: ErrCorrupt,
}, {
desc: "empty last block (trash at the end)",
input: "06ff",
inIdx: 1,
}, {
desc: "empty last block (padding is non-zero)",
input: "16",
inIdx: 1,
err: ErrCorrupt,
}, {
desc: "empty meta data block (MLEN: 0)",
input: "0c03",
inIdx: 2,
}, {
desc: "meta data block",
input: "2c0648656c6c6f2c20776f726c642103",
inIdx: 16,
}, {
desc: "meta data block (truncated)",
input: "2c06",
inIdx: 2,
err: io.ErrUnexpectedEOF,
}, {
desc: "meta data block (use reserved bit)",
input: "3c0648656c6c6f2c20776f726c642103",
inIdx: 1,
err: ErrCorrupt,
}, {
desc: "meta data block (meta padding is non-zero)",
input: "2c8648656c6c6f2c20776f726c642103",
inIdx: 2,
err: ErrCorrupt,
}, {
desc: "meta data block (non-minimal MLEN)",
input: "4c060048656c6c6f2c20776f726c642103",
inIdx: 3,
err: ErrCorrupt,
}, {
desc: "meta data block (MLEN: 1<<0)",
input: "2c00ff03",
inIdx: 4,
}, {
desc: "meta data block (MLEN: 1<<24)",
input: "ecffff7f" + strings.Repeat("f0", 1<<24) + "03",
inIdx: 5 + 1<<24,
}, {
desc: "raw data block",
input: "c0001048656c6c6f2c20776f726c642103",
output: "48656c6c6f2c20776f726c6421",
inIdx: 17,
outIdx: 13,
}, {
desc: "raw data block (truncated)",
input: "c00010",
inIdx: 3,
err: io.ErrUnexpectedEOF,
}, {
desc: "raw data block (raw padding is non-zero)",
input: "c000f048656c6c6f2c20776f726c642103",
inIdx: 3,
err: ErrCorrupt,
}, {
desc: "raw data block (non-minimal MLEN)",
input: "c400000148656c6c6f2c20776f726c642103",
inIdx: 3,
err: ErrCorrupt,
}, {
desc: "raw data block (MLEN: 1<<0)",
input: "0000106103",
output: "61",
inIdx: 4 + 1<<0,
outIdx: 1 << 0,
}, {
desc: "raw data block (MLEN: 1<<24)",
input: "f8ffff1f" + strings.Repeat("f0", 1<<24) + "03",
output: strings.Repeat("f0", 1<<24),
inIdx: 5 + 1<<24,
outIdx: 1 << 24,
}, {
desc: "simple prefix (|L|:1 |I|:1 |D|:1 MLEN:1)",
input: "00000000c4682010c0",
output: "a3",
inIdx: 9,
outIdx: 1,
}, {
desc: "simple prefix, out-of-order (|L|:2 |I|:1 |D|:1 MLEN:1)",
input: "00000000d4a8682010c001",
output: "a3",
inIdx: 11,
outIdx: 1,
}, {
desc: "simple prefix, non-unique (|L|:2 |I|:1 |D|:1 MLEN:1)",
input: "00000000d4e8682010c001",
output: "",
inIdx: 7,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "simple prefix, out-of-order (|L|:3 |I|:1 |D|:1 MLEN:1)",
input: "0000000024e8e96820104003",
output: "a3",
inIdx: 12,
outIdx: 1,
}, {
desc: "simple prefix, out-of-order, no-tree-select (|L|:4 |I|:1 |D|:1 MLEN:1)",
input: "0000000034e8e968a840208006",
output: "a3",
inIdx: 13,
outIdx: 1,
}, {
desc: "simple prefix, out-of-order, yes-tree-select (|L|:4 |I|:1 |D|:1 MLEN:1)",
input: "0000000034e8e968e94020800d",
output: "a3",
inIdx: 13,
outIdx: 1,
}, {
desc: "simple prefix, max-sym-ok (|L|:1 |I|:2 |D|:1 MLEN:1)",
input: "00000000c46821f06b0006",
output: "a3",
inIdx: 11,
outIdx: 1,
}, {
desc: "simple prefix, max-sym-bad (|L|:1 |I|:2 |D|:1 MLEN:1)",
input: "00000000c46821006c0006",
output: "",
inIdx: 9,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, terminate-clens-codes (|L|:1 |I|:2 |D|:1 MLEN:1)",
input: "0000000070472010c001",
output: "01",
inIdx: 10,
outIdx: 1,
}, {
desc: "complex prefix, skip-zero, terminate-clens-codes (|L|:1 |I|:2 |D|:1 MLEN:1)",
input: "0000000070c01d080470",
output: "01",
inIdx: 10,
outIdx: 1,
}, {
desc: "complex prefix, skip-zero, terminate-clens-codes (|L|:1 |I|:2 |D|:1 MLEN:2)",
input: "1000000070c01d1004d0",
output: "0100",
inIdx: 10,
outIdx: 2,
}, {
desc: "complex prefix, skip-zero, terminate-codes (|L|:1 |I|:4 |D|:1 MLEN:3)",
input: "20000000b0c100000056151804700e",
output: "030201",
inIdx: 15,
outIdx: 3,
}, {
desc: "complex prefix, skip-zero, under-subscribed (|L|:1 |I|:4 |D|:1 MLEN:3)",
input: "20000000b0c1000000ae2a3008e01c",
output: "",
inIdx: 10,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, over-subscribed (|L|:1 |I|:4 |D|:1 MLEN:3)",
input: "20000000b0c1000000ac0a0c023807",
output: "",
inIdx: 10,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, single clens (|L|:1 |I|:256 |D|:1 MLEN:4)",
input: "30000000000000020001420000a5ff5503",
output: "00a5ffaa",
inIdx: 17,
outIdx: 4,
}, {
desc: "complex prefix, skip-zero, single clens (|L|:1 |I|:32 |D|:1 MLEN:4)",
input: "3000000000c001000004080100faf7",
output: "00051f1b",
inIdx: 15,
outIdx: 4,
}, {
desc: "complex prefix, skip-zero, single clens, zero clen (|L|:1 |I|:? |D|:1 MLEN:4)",
input: "30000000007000000004080100faf7",
output: "",
inIdx: 10,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, empty clens (|L|:1 |I|:? |D|:1 MLEN:4)",
input: "30000000000000000001420080fe3d",
output: "",
inIdx: 9,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, single clens, rep-last clen (|L|:1 |I|:256 |D|:1 MLEN:4)",
input: "3000000000002000006a014200aa33cc5503",
output: "55cc33aa",
inIdx: 18,
outIdx: 4,
}, {
desc: "complex prefix, skip-zero, single clens, rep-last clen, over-subscribed (|L|:1 |I|:257 |D|:1 MLEN:4)",
input: "300000000000200000aa014200aa33cc5503",
output: "",
inIdx: 10,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-zero, single clens, rep-last clen, integer overflow (|L|:1 |I|:1018 |D|:1 MLEN:4)",
input: "3000000000002000002a070801a8ce30570d",
output: "",
inIdx: 11,
outIdx: 0,
err: ErrCorrupt,
}, {
desc: "complex prefix, skip-two, single clens, rep-last clen (|L|:1 |I|:256 |D|:1 MLEN:4)",
input: "3000000008000f00805a801080ea0c73d5",
output: "55cc33aa",
inIdx: 17,
outIdx: 4,
}, {
desc: "complex prefix, skip-three, single clens, rep-last clen (|L|:1 |I|:256 |D|:1 MLEN:4)",
input: "300000000cc00300a0162004a03ac35c35",
output: "55cc33aa",
inIdx: 17,
outIdx: 4,
}, {
desc: "complex prefix, skip-zero, linear clens (|L|:1 |I|:16 |D|:1 MLEN:16)",
input: "f000000050555555ffff8bd5169058d43cb2fadcf77f201480dabdeff7f7efbf" +
"fffddffffbfffe7fffff01",
output: "6162636465666768696a6b6c6d6e6f70",
inIdx: 43,
outIdx: 16,
}, {
desc: "complex prefix, skip-zero, mixed clens (|L|:1 |I|:192 |D|:1 MLEN:16)",
input: "f000000050555555ffffe37a310f369a4d4b80756cc779b0619a02a1002c29ab" +
"ec066084eee99dfd67d8ac18",
output: "000240525356575e717a8abcbdbed7d9",
inIdx: 44,
outIdx: 16,
}, {
desc: "compressed string: \"Hello, world! Hello, world!\"",
input: "1b1a00008c946ed6540dc2825426d942de6a9668ea996c961e00",
output: "48656c6c6f2c20776f726c64212048656c6c6f2c20776f726c6421",
inIdx: 26,
outIdx: 27,
}, {
desc: "compressed string (padding is non-zero): \"Hello, world! Hello, world!\"",
input: "1b1a00008c946ed6540dc2825426d942de6a9668ea996c961e80",
output: "48656c6c6f2c20776f726c64212048656c6c6f2c20776f726c6421",
inIdx: 26,
outIdx: 27,
err: ErrCorrupt,
}}
for i, v := range vectors {
input, _ := hex.DecodeString(v.input)
rd, err := NewReader(bytes.NewReader(input), nil)
if err != nil {
t.Errorf("test %d, unexpected NewReader error: %v", i, err)
}
data, err := ioutil.ReadAll(rd)
output := hex.EncodeToString(data)
if err != v.err {
t.Errorf("test %d, %s\nerror mismatch: got %v, want %v", i, v.desc, err, v.err)
}
if output != v.output {
t.Errorf("test %d, %s\noutput mismatch:\ngot %v\nwant %v", i, v.desc, output, v.output)
}
if rd.InputOffset != v.inIdx {
t.Errorf("test %d, %s\ninput offset mismatch: got %d, want %d", i, v.desc, rd.InputOffset, v.inIdx)
}
if rd.OutputOffset != v.outIdx {
t.Errorf("test %d, %s\noutput offset mismatch: got %d, want %d", i, v.desc, rd.OutputOffset, v.outIdx)
}
}
}
func TestReaderGolden(t *testing.T) {
var vectors = []struct {
input string // Input filename
output string // Output filename
}{
{"empty.br", "empty"},
{"empty.00.br", "empty"},
{"empty.01.br", "empty"},
{"empty.02.br", "empty"},
{"empty.03.br", "empty"},
{"empty.04.br", "empty"},
{"empty.05.br", "empty"},
{"empty.06.br", "empty"},
{"empty.07.br", "empty"},
{"empty.08.br", "empty"},
{"empty.09.br", "empty"},
{"empty.10.br", "empty"},
{"empty.11.br", "empty"},
{"empty.12.br", "empty"},
{"empty.13.br", "empty"},
{"empty.14.br", "empty"},
{"empty.15.br", "empty"},
{"empty.16.br", "empty"},
{"empty.17.br", "empty"},
{"empty.18.br", "empty"},
{"zeros.br", "zeros"},
{"x.br", "x"},
{"x.00.br", "x"},
{"x.01.br", "x"},
{"x.02.br", "x"},
{"x.03.br", "x"},
{"xyzzy.br", "xyzzy"},
{"10x10y.br", "10x10y"},
{"64x.br", "64x"},
{"backward65536.br", "backward65536"},
{"quickfox.br", "quickfox"},
{"quickfox_repeated.br", "quickfox_repeated"},
{"ukkonooa.br", "ukkonooa"},
{"monkey.br", "monkey"},
{"random_org_10k.bin.br", "random_org_10k.bin"},
{"asyoulik.txt.br", "asyoulik.txt"},
{"compressed_file.br", "compressed_file"},
{"compressed_repeated.br", "compressed_repeated"},
{"alice29.txt.br", "alice29.txt"},
{"lcet10.txt.br", "lcet10.txt"},
{"mapsdatazrh.br", "mapsdatazrh"},
{"plrabn12.txt.br", "plrabn12.txt"},
}
for i, v := range vectors {
input, err := ioutil.ReadFile("testdata/" + v.input)
if err != nil {
t.Errorf("test %d: %s\n%v", i, v.input, err)
continue
}
output, err := ioutil.ReadFile("testdata/" + v.output)
if err != nil {
t.Errorf("test %d: %s\n%v", i, v.output, err)
continue
}
rd, err := NewReader(bytes.NewReader(input), nil)
if err != nil {
t.Errorf("test %d, unexpected NewReader error: %v", i, err)
}
data, err := ioutil.ReadAll(rd)
if err != nil {
t.Errorf("test %d, %s\nerror mismatch: got %v, want nil", i, v.input, err)
}
if string(data) != string(output) {
t.Errorf("test %d, %s\noutput mismatch:\ngot %q\nwant %q", i, v.input, string(data), string(output))
}
}
}
func benchmarkDecode(b *testing.B, testfile string) {
b.StopTimer()
b.ReportAllocs()
input, err := ioutil.ReadFile("testdata/" + testfile)
if err != nil {
b.Fatal(err)
}
r, err := NewReader(bytes.NewReader(input), nil)
if err != nil {
b.Fatal(err)
}
output, err := ioutil.ReadAll(r)
if err != nil {
b.Fatal(err)
}
nb := int64(len(output))
output = nil
runtime.GC()
b.SetBytes(nb)
b.StartTimer()
for i := 0; i < b.N; i++ {
r, err := NewReader(bufio.NewReader(bytes.NewReader(input)), nil)
if err != nil {
b.Fatal(err)
}
cnt, err := io.Copy(ioutil.Discard, r)
if err != nil {
b.Fatalf("unexpected error: %v", err)
}
if cnt != nb {
b.Fatalf("unexpected count: got %d, want %d", cnt, nb)
}
}
}
func BenchmarkDecodeDigitsSpeed1e4(b *testing.B) { benchmarkDecode(b, "digits-speed-1e4.br") }
func BenchmarkDecodeDigitsSpeed1e5(b *testing.B) { benchmarkDecode(b, "digits-speed-1e5.br") }
func BenchmarkDecodeDigitsSpeed1e6(b *testing.B) { benchmarkDecode(b, "digits-speed-1e6.br") }
func BenchmarkDecodeDigitsDefault1e4(b *testing.B) { benchmarkDecode(b, "digits-default-1e4.br") }
func BenchmarkDecodeDigitsDefault1e5(b *testing.B) { benchmarkDecode(b, "digits-default-1e5.br") }
func BenchmarkDecodeDigitsDefault1e6(b *testing.B) { benchmarkDecode(b, "digits-default-1e6.br") }
func BenchmarkDecodeDigitsCompress1e4(b *testing.B) { benchmarkDecode(b, "digits-best-1e4.br") }
func BenchmarkDecodeDigitsCompress1e5(b *testing.B) { benchmarkDecode(b, "digits-best-1e5.br") }
func BenchmarkDecodeDigitsCompress1e6(b *testing.B) { benchmarkDecode(b, "digits-best-1e6.br") }
func BenchmarkDecodeTwainSpeed1e4(b *testing.B) { benchmarkDecode(b, "twain-speed-1e4.br") }
func BenchmarkDecodeTwainSpeed1e5(b *testing.B) { benchmarkDecode(b, "twain-speed-1e5.br") }
func BenchmarkDecodeTwainSpeed1e6(b *testing.B) { benchmarkDecode(b, "twain-speed-1e6.br") }
func BenchmarkDecodeTwainDefault1e4(b *testing.B) { benchmarkDecode(b, "twain-default-1e4.br") }
func BenchmarkDecodeTwainDefault1e5(b *testing.B) { benchmarkDecode(b, "twain-default-1e5.br") }
func BenchmarkDecodeTwainDefault1e6(b *testing.B) { benchmarkDecode(b, "twain-default-1e6.br") }
func BenchmarkDecodeTwainCompress1e4(b *testing.B) { benchmarkDecode(b, "twain-best-1e4.br") }
func BenchmarkDecodeTwainCompress1e5(b *testing.B) { benchmarkDecode(b, "twain-best-1e5.br") }
func BenchmarkDecodeTwainCompress1e6(b *testing.B) { benchmarkDecode(b, "twain-best-1e6.br") }

11
vendor/github.com/dsnet/compress/brotli/release.go generated vendored Normal file
View File

@ -0,0 +1,11 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
// +build !debug
package brotli
const debug = false
func printLUTs() {}

227
vendor/github.com/dsnet/compress/brotli/transform.go generated vendored Normal file
View File

@ -0,0 +1,227 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
// RFC section 8.
// Maximum buffer size needed to store a word after a transformation.
const maxWordSize = maxDictLen + 13 + 1
// These constants are defined in Appendix B of the RFC.
const (
transformIdentity = iota
transformUppercaseFirst
transformUppercaseAll
transformOmitFirst1
transformOmitFirst2
transformOmitFirst3
transformOmitFirst4
transformOmitFirst5
transformOmitFirst6
transformOmitFirst7
transformOmitFirst8
transformOmitFirst9
transformOmitLast1
transformOmitLast2
transformOmitLast3
transformOmitLast4
transformOmitLast5
transformOmitLast6
transformOmitLast7
transformOmitLast8
transformOmitLast9
)
// This table is defined in Appendix B of the RFC.
var transformLUT = []struct {
prefix string
transform int
suffix string
}{
{"", transformIdentity, ""}, // 0
{"", transformIdentity, " "},
{" ", transformIdentity, " "},
{"", transformOmitFirst1, ""},
{"", transformUppercaseFirst, " "},
{"", transformIdentity, " the "},
{" ", transformIdentity, ""},
{"s ", transformIdentity, " "},
{"", transformIdentity, " of "},
{"", transformUppercaseFirst, ""},
{"", transformIdentity, " and "}, // 10
{"", transformOmitFirst2, ""},
{"", transformOmitLast1, ""},
{", ", transformIdentity, " "},
{"", transformIdentity, ", "},
{" ", transformUppercaseFirst, " "},
{"", transformIdentity, " in "},
{"", transformIdentity, " to "},
{"e ", transformIdentity, " "},
{"", transformIdentity, "\""},
{"", transformIdentity, "."}, // 20
{"", transformIdentity, "\">"},
{"", transformIdentity, "\n"},
{"", transformOmitLast3, ""},
{"", transformIdentity, "]"},
{"", transformIdentity, " for "},
{"", transformOmitFirst3, ""},
{"", transformOmitLast2, ""},
{"", transformIdentity, " a "},
{"", transformIdentity, " that "},
{" ", transformUppercaseFirst, ""}, // 30
{"", transformIdentity, ". "},
{".", transformIdentity, ""},
{" ", transformIdentity, ", "},
{"", transformOmitFirst4, ""},
{"", transformIdentity, " with "},
{"", transformIdentity, "'"},
{"", transformIdentity, " from "},
{"", transformIdentity, " by "},
{"", transformOmitFirst5, ""},
{"", transformOmitFirst6, ""}, // 40
{" the ", transformIdentity, ""},
{"", transformOmitLast4, ""},
{"", transformIdentity, ". The "},
{"", transformUppercaseAll, ""},
{"", transformIdentity, " on "},
{"", transformIdentity, " as "},
{"", transformIdentity, " is "},
{"", transformOmitLast7, ""},
{"", transformOmitLast1, "ing "},
{"", transformIdentity, "\n\t"}, // 50
{"", transformIdentity, ":"},
{" ", transformIdentity, ". "},
{"", transformIdentity, "ed "},
{"", transformOmitFirst9, ""},
{"", transformOmitFirst7, ""},
{"", transformOmitLast6, ""},
{"", transformIdentity, "("},
{"", transformUppercaseFirst, ", "},
{"", transformOmitLast8, ""},
{"", transformIdentity, " at "}, // 60
{"", transformIdentity, "ly "},
{" the ", transformIdentity, " of "},
{"", transformOmitLast5, ""},
{"", transformOmitLast9, ""},
{" ", transformUppercaseFirst, ", "},
{"", transformUppercaseFirst, "\""},
{".", transformIdentity, "("},
{"", transformUppercaseAll, " "},
{"", transformUppercaseFirst, "\">"},
{"", transformIdentity, "=\""}, // 70
{" ", transformIdentity, "."},
{".com/", transformIdentity, ""},
{" the ", transformIdentity, " of the "},
{"", transformUppercaseFirst, "'"},
{"", transformIdentity, ". This "},
{"", transformIdentity, ","},
{".", transformIdentity, " "},
{"", transformUppercaseFirst, "("},
{"", transformUppercaseFirst, "."},
{"", transformIdentity, " not "}, // 80
{" ", transformIdentity, "=\""},
{"", transformIdentity, "er "},
{" ", transformUppercaseAll, " "},
{"", transformIdentity, "al "},
{" ", transformUppercaseAll, ""},
{"", transformIdentity, "='"},
{"", transformUppercaseAll, "\""},
{"", transformUppercaseFirst, ". "},
{" ", transformIdentity, "("},
{"", transformIdentity, "ful "}, // 90
{" ", transformUppercaseFirst, ". "},
{"", transformIdentity, "ive "},
{"", transformIdentity, "less "},
{"", transformUppercaseAll, "'"},
{"", transformIdentity, "est "},
{" ", transformUppercaseFirst, "."},
{"", transformUppercaseAll, "\">"},
{" ", transformIdentity, "='"},
{"", transformUppercaseFirst, ","},
{"", transformIdentity, "ize "}, // 100
{"", transformUppercaseAll, "."},
{"\xc2\xa0", transformIdentity, ""},
{" ", transformIdentity, ","},
{"", transformUppercaseFirst, "=\""},
{"", transformUppercaseAll, "=\""},
{"", transformIdentity, "ous "},
{"", transformUppercaseAll, ", "},
{"", transformUppercaseFirst, "='"},
{" ", transformUppercaseFirst, ","},
{" ", transformUppercaseAll, "=\""}, // 110
{" ", transformUppercaseAll, ", "},
{"", transformUppercaseAll, ","},
{"", transformUppercaseAll, "("},
{"", transformUppercaseAll, ". "},
{" ", transformUppercaseAll, "."},
{"", transformUppercaseAll, "='"},
{" ", transformUppercaseAll, ". "},
{" ", transformUppercaseFirst, "=\""},
{" ", transformUppercaseAll, "='"},
{" ", transformUppercaseFirst, "='"}, // 120
}
// transformWord transform the input word and places the result in buf according
// to the transform primitives defined in RFC section 8.
//
// The following invariants must be kept:
// 0 <= id < len(transformLUT)
// len(word) <= maxDictLen
// len(buf) >= maxWordSize
func transformWord(buf, word []byte, id int) (cnt int) {
transform := transformLUT[id]
tid := transform.transform
cnt = copy(buf, transform.prefix)
switch {
case tid == transformIdentity:
cnt += copy(buf[cnt:], word)
case tid == transformUppercaseFirst:
buf2 := buf[cnt:]
cnt += copy(buf2, word)
transformUppercase(buf2[:len(word)], true)
case tid == transformUppercaseAll:
buf2 := buf[cnt:]
cnt += copy(buf2, word)
transformUppercase(buf2[:len(word)], false)
case tid <= transformOmitFirst9:
cut := tid - transformOmitFirst1 + 1 // 1..9
if len(word) > cut {
cnt += copy(buf[cnt:], word[cut:])
}
case tid <= transformOmitLast9:
cut := tid - transformOmitLast1 + 1 // 1..9
if len(word) > cut {
cnt += copy(buf[cnt:], word[:len(word)-cut])
}
}
cnt += copy(buf[cnt:], transform.suffix)
return cnt
}
// transformUppercase transform the word to be in uppercase using the algorithm
// presented in RFC section 8. If once is set, then loop only executes once.
func transformUppercase(word []byte, once bool) {
for i := 0; i < len(word); {
c := word[i]
if c < 192 {
if c >= 97 && c <= 122 {
word[i] ^= 32
}
i += 1
} else if c < 224 {
if i+1 < len(word) {
word[i+1] ^= 32
}
i += 2
} else {
if i+2 < len(word) {
word[i+2] ^= 5
}
i += 3
}
if once {
return
}
}
}

View File

@ -0,0 +1,61 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "testing"
func TestTransform(t *testing.T) {
var vectors = []struct {
id int
input string
output string
}{
{id: 0, input: "Hello, world!", output: "Hello, world!"},
{id: 23, input: "groups of", output: "groups"},
{id: 42, input: "s for the ", output: "s for "},
{id: 48, input: "presentation", output: "prese"},
{id: 56, input: "maintenance", output: "maint"},
{id: 23, input: "Alexandria", output: "Alexand"},
{id: 23, input: "archives", output: "archi"},
{id: 49, input: "fighty", output: "fighting "},
{id: 49, input: "12", output: "1ing "},
{id: 49, input: "1", output: "ing "},
{id: 49, input: "", output: "ing "},
{id: 64, input: "123456789a", output: "1"},
{id: 64, input: "123456789", output: ""},
{id: 64, input: "1", output: ""},
{id: 64, input: "", output: ""},
{id: 3, input: "afloat", output: "float"},
{id: 3, input: "12", output: "2"},
{id: 3, input: "1", output: ""},
{id: 3, input: "", output: ""},
{id: 54, input: "123456789a", output: "a"},
{id: 54, input: "123456789", output: ""},
{id: 54, input: "1", output: ""},
{id: 54, input: "", output: ""},
{id: 73, input: "", output: " the of the "},
{id: 73, input: "dichlorodifluoromethanes", output: " the dichlorodifluoromethanes of the "},
{id: 15, input: "", output: " "},
{id: 15, input: "meow", output: " Meow "},
{id: 15, input: "-scale", output: " -scale "},
{id: 15, input: "почти", output: " Почти "},
{id: 15, input: "互联网", output: " 亗联网 "},
{id: 119, input: "", output: " ='"},
{id: 119, input: "meow", output: " MEOW='"},
{id: 119, input: "-scale", output: " -SCALE='"},
{id: 119, input: "почти", output: " ПОѧѢИ='"},
{id: 119, input: "互联网", output: " 亗聑罔='"},
}
var buf [maxWordSize]byte
for i, v := range vectors {
cnt := transformWord(buf[:], []byte(v.input), v.id)
output := string(buf[:cnt])
if output != v.output {
t.Errorf("test %d, output mismatch: got %q, want %q", i, output, v.output)
}
}
}

35
vendor/github.com/dsnet/compress/brotli/writer.go generated vendored Normal file
View File

@ -0,0 +1,35 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli
import "io"
type writer struct {
InputOffset int64 // Total number of bytes issued to Write
OutputOffset int64 // Total number of bytes written to underlying io.Writer
wr bitWriter // Output destination
err error // Persistent error
}
type writerConfig struct {
_ struct{} // Blank field to prevent unkeyed struct literals
}
func newWriter(w io.Writer, conf *writerConfig) (*writer, error) {
return nil, nil
}
func (bw *writer) Write(buf []byte) (int, error) {
return 0, nil
}
func (bw *writer) Close() error {
return nil
}
func (bw *writer) Reset(w io.Writer) {
return
}

View File

@ -0,0 +1,5 @@
// Copyright 2015, Joe Tsai. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE.md file.
package brotli

7
vendor/manifest vendored
View File

@ -57,6 +57,13 @@
"revision": "f2193411bd642f7db03249fd79d5292c9b34916a",
"branch": "master"
},
{
"importpath": "github.com/dsnet/compress/brotli",
"repository": "https://github.com/dsnet/compress",
"revision": "b9aab3c6a04eef14c56384b4ad065e7b73438862",
"branch": "master",
"path": "/brotli"
},
{
"importpath": "github.com/golang/protobuf/proto",
"repository": "https://github.com/golang/protobuf",