Update all dependencies
This commit is contained in:
parent
628dc66685
commit
47bc78b80a
365 changed files with 37935 additions and 16866 deletions
26
vendor/github.com/dsnet/compress/brotli/bit_reader.go
generated
vendored
26
vendor/github.com/dsnet/compress/brotli/bit_reader.go
generated
vendored
|
@ -4,8 +4,12 @@
|
|||
|
||||
package brotli
|
||||
|
||||
import "io"
|
||||
import "bufio"
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
|
||||
"github.com/dsnet/compress/internal/errors"
|
||||
)
|
||||
|
||||
// The bitReader preserves the property that it will never read more bytes than
|
||||
// is necessary. However, this feature dramatically hurts performance because
|
||||
|
@ -101,7 +105,7 @@ func (br *bitReader) FeedBits(nb uint) {
|
|||
if err == io.EOF {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
panic(err)
|
||||
errors.Panic(err)
|
||||
}
|
||||
}
|
||||
cnt := int(64-br.numBits) / 8
|
||||
|
@ -125,7 +129,7 @@ func (br *bitReader) FeedBits(nb uint) {
|
|||
if err == io.EOF {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
panic(err)
|
||||
errors.Panic(err)
|
||||
}
|
||||
br.bufBits |= uint64(c) << br.numBits
|
||||
br.numBits += 8
|
||||
|
@ -137,7 +141,7 @@ func (br *bitReader) FeedBits(nb uint) {
|
|||
// Read reads up to len(buf) bytes into buf.
|
||||
func (br *bitReader) Read(buf []byte) (cnt int, err error) {
|
||||
if br.numBits%8 != 0 {
|
||||
return 0, Error("non-aligned bit buffer")
|
||||
return 0, errorf(errors.Invalid, "non-aligned bit buffer")
|
||||
}
|
||||
if br.numBits > 0 {
|
||||
for cnt = 0; len(buf) > cnt && br.numBits > 0; cnt++ {
|
||||
|
@ -206,7 +210,7 @@ func (br *bitReader) TryReadSymbol(pd *prefixDecoder) (uint, bool) {
|
|||
// ReadSymbol reads the next prefix symbol using the provided prefixDecoder.
|
||||
func (br *bitReader) ReadSymbol(pd *prefixDecoder) uint {
|
||||
if len(pd.chunks) == 0 {
|
||||
panic(ErrCorrupt) // Decode with empty tree
|
||||
errors.Panic(errInvalid) // Decode with empty tree
|
||||
}
|
||||
|
||||
nb := uint(pd.minBits)
|
||||
|
@ -256,12 +260,12 @@ func (br *bitReader) readSimplePrefixCode(pd *prefixDecoder, maxSyms uint) {
|
|||
codes[i].sym = uint32(br.ReadBits(clen))
|
||||
}
|
||||
|
||||
var copyLens = func(lens []uint) {
|
||||
copyLens := func(lens []uint) {
|
||||
for i := 0; i < nsym; i++ {
|
||||
codes[i].len = uint32(lens[i])
|
||||
}
|
||||
}
|
||||
var compareSwap = func(i, j int) {
|
||||
compareSwap := func(i, j int) {
|
||||
if codes[i].sym > codes[j].sym {
|
||||
codes[i], codes[j] = codes[j], codes[i]
|
||||
}
|
||||
|
@ -291,7 +295,7 @@ func (br *bitReader) readSimplePrefixCode(pd *prefixDecoder, maxSyms uint) {
|
|||
compareSwap(1, 2)
|
||||
}
|
||||
if uint(codes[nsym-1].sym) >= maxSyms {
|
||||
panic(ErrCorrupt) // Symbol goes beyond range of alphabet
|
||||
errors.Panic(errCorrupted) // Symbol goes beyond range of alphabet
|
||||
}
|
||||
pd.Init(codes[:nsym], true) // Must have 1..4 symbols
|
||||
}
|
||||
|
@ -317,7 +321,7 @@ func (br *bitReader) readComplexPrefixCode(pd *prefixDecoder, maxSyms, hskip uin
|
|||
}
|
||||
}
|
||||
if len(codeCLens) < 1 {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
br.prefix.Init(codeCLens, true) // Must have 1..len(complexLens) symbols
|
||||
|
||||
|
@ -367,7 +371,7 @@ func (br *bitReader) readComplexPrefixCode(pd *prefixDecoder, maxSyms, hskip uin
|
|||
}
|
||||
}
|
||||
if len(codes) < 2 || sym > maxSyms {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
pd.Init(codes, true) // Must have 2..maxSyms symbols
|
||||
}
|
||||
|
|
104
vendor/github.com/dsnet/compress/brotli/common.go
generated
vendored
104
vendor/github.com/dsnet/compress/brotli/common.go
generated
vendored
|
@ -2,41 +2,49 @@
|
|||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// Package brotli implements the Brotli compressed data format.
|
||||
// Package brotli implements the Brotli compressed data format,
|
||||
// described in RFC 7932.
|
||||
package brotli
|
||||
|
||||
import "runtime"
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
// Error is the wrapper type for errors specific to this library.
|
||||
type Error string
|
||||
|
||||
func (e Error) Error() string { return "brotli: " + string(e) }
|
||||
|
||||
var (
|
||||
ErrCorrupt error = Error("stream is corrupted")
|
||||
"github.com/dsnet/compress/internal/errors"
|
||||
)
|
||||
|
||||
func errRecover(err *error) {
|
||||
switch ex := recover().(type) {
|
||||
case nil:
|
||||
// Do nothing.
|
||||
case runtime.Error:
|
||||
panic(ex)
|
||||
case error:
|
||||
*err = ex
|
||||
default:
|
||||
panic(ex)
|
||||
func errorf(c int, f string, a ...interface{}) error {
|
||||
return errors.Error{Code: c, Pkg: "brotli", Msg: fmt.Sprintf(f, a...)}
|
||||
}
|
||||
|
||||
// errWrap converts a lower-level errors.Error to be one from this package.
|
||||
// The replaceCode passed in will be used to replace the code for any errors
|
||||
// with the errors.Invalid code.
|
||||
//
|
||||
// For the Reader, set this to errors.Corrupted.
|
||||
// For the Writer, set this to errors.Internal.
|
||||
func errWrap(err error, replaceCode int) error {
|
||||
if cerr, ok := err.(errors.Error); ok {
|
||||
if errors.IsInvalid(cerr) {
|
||||
cerr.Code = replaceCode
|
||||
}
|
||||
err = errorf(cerr.Code, "%s", cerr.Msg)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
errClosed = errorf(errors.Closed, "")
|
||||
errCorrupted = errorf(errors.Corrupted, "")
|
||||
errInvalid = errorf(errors.Invalid, "")
|
||||
errUnaligned = errorf(errors.Invalid, "non-aligned bit buffer")
|
||||
)
|
||||
|
||||
var (
|
||||
reverseLUT [256]uint8
|
||||
mtfLUT [256]uint8
|
||||
)
|
||||
|
||||
func init() {
|
||||
initLUTs()
|
||||
printLUTs() // Only occurs in debug mode
|
||||
}
|
||||
|
||||
func initLUTs() {
|
||||
|
@ -47,9 +55,6 @@ func initLUTs() {
|
|||
}
|
||||
|
||||
func initCommonLUTs() {
|
||||
for i := range mtfLUT {
|
||||
mtfLUT[i] = uint8(i)
|
||||
}
|
||||
for i := range reverseLUT {
|
||||
b := uint8(i)
|
||||
b = (b&0xaa)>>1 | (b&0x55)<<1
|
||||
|
@ -61,7 +66,7 @@ func initCommonLUTs() {
|
|||
|
||||
// neededBits computes the minimum number of bits needed to encode n elements.
|
||||
func neededBits(n uint32) (nb uint) {
|
||||
for n -= 1; n > 0; n >>= 1 {
|
||||
for n--; n > 0; n >>= 1 {
|
||||
nb++
|
||||
}
|
||||
return
|
||||
|
@ -81,55 +86,6 @@ func reverseBits(v uint32, n uint) uint32 {
|
|||
return reverseUint32(v << (32 - n))
|
||||
}
|
||||
|
||||
// moveToFront is a data structure that allows for more efficient move-to-front
|
||||
// transformations (described in RFC section 7.3). Since most transformations
|
||||
// only involve a fairly low number of symbols, it can be quite expensive
|
||||
// filling out the dict with values 0..255 for every call. Instead, we remember
|
||||
// what part of the dict was altered and make sure we reset it at the beginning
|
||||
// of every encode and decode operation.
|
||||
type moveToFront struct {
|
||||
dict [256]uint8 // Mapping from indexes to values
|
||||
tail int // Number of tail bytes that are already ordered
|
||||
}
|
||||
|
||||
func (m *moveToFront) Encode(vals []uint8) {
|
||||
// Reset dict to be identical to mtfLUT.
|
||||
copy(m.dict[:], mtfLUT[:256-m.tail])
|
||||
|
||||
var max int
|
||||
for i, val := range vals {
|
||||
var idx uint8 // Reverse lookup idx in dict
|
||||
for di, dv := range m.dict {
|
||||
if dv == val {
|
||||
idx = uint8(di)
|
||||
break
|
||||
}
|
||||
}
|
||||
vals[i] = idx
|
||||
|
||||
max |= int(idx)
|
||||
copy(m.dict[1:], m.dict[:idx])
|
||||
m.dict[0] = val
|
||||
}
|
||||
m.tail = 256 - max - 1
|
||||
}
|
||||
|
||||
func (m *moveToFront) Decode(idxs []uint8) {
|
||||
// Reset dict to be identical to mtfLUT.
|
||||
copy(m.dict[:], mtfLUT[:256-m.tail])
|
||||
|
||||
var max int
|
||||
for i, idx := range idxs {
|
||||
val := m.dict[idx] // Forward lookup val in dict
|
||||
idxs[i] = val
|
||||
|
||||
max |= int(idx)
|
||||
copy(m.dict[1:], m.dict[:idx])
|
||||
m.dict[0] = val
|
||||
}
|
||||
m.tail = 256 - max - 1
|
||||
}
|
||||
|
||||
func allocUint8s(s []uint8, n int) []uint8 {
|
||||
if cap(s) >= n {
|
||||
return s[:n]
|
||||
|
|
241
vendor/github.com/dsnet/compress/brotli/debug.go
generated
vendored
241
vendor/github.com/dsnet/compress/brotli/debug.go
generated
vendored
|
@ -1,241 +0,0 @@
|
|||
// Copyright 2015, Joe Tsai. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// +build debug
|
||||
|
||||
package brotli
|
||||
|
||||
import "os"
|
||||
import "fmt"
|
||||
import "strings"
|
||||
|
||||
const debug = true
|
||||
|
||||
func printLUTs() {
|
||||
var output = os.Stderr
|
||||
|
||||
printVar := func(name string, obj interface{}) {
|
||||
var body string
|
||||
if bs, ok := obj.([]uint8); ok && len(bs) >= 256 {
|
||||
// Special case handling for large []uint8 to form 16x16 blocks.
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
var s string
|
||||
for i, b := range bs {
|
||||
s += fmt.Sprintf("%02x ", b)
|
||||
if i%16 == 15 || i+1 == len(bs) {
|
||||
ss = append(ss, "\t"+s+"")
|
||||
s = ""
|
||||
}
|
||||
if i%256 == 255 && (i+1 != len(bs)) {
|
||||
ss = append(ss, "")
|
||||
}
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
body = strings.Join(ss, "\n")
|
||||
} else {
|
||||
body = fmt.Sprintf("%v", obj)
|
||||
}
|
||||
fmt.Fprintf(output, "var %s %T = %v\n", name, obj, body)
|
||||
}
|
||||
|
||||
// Common LUTs.
|
||||
printVar("reverseLUT", reverseLUT[:])
|
||||
printVar("mtfLUT", mtfLUT[:])
|
||||
fmt.Fprintln(output)
|
||||
|
||||
// Context LUTs.
|
||||
printVar("contextP1LUT", contextP1LUT[:])
|
||||
printVar("contextP2LUT", contextP2LUT[:])
|
||||
fmt.Fprintln(output)
|
||||
|
||||
// Static dictionary LUTs.
|
||||
printVar("dictBitSizes", dictBitSizes)
|
||||
printVar("dictSizes", dictSizes)
|
||||
printVar("dictOffsets", dictOffsets)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
// Prefix LUTs.
|
||||
printVar("simpleLens1", simpleLens1)
|
||||
printVar("simpleLens2", simpleLens2)
|
||||
printVar("simpleLens3", simpleLens3)
|
||||
printVar("simpleLens4a", simpleLens4a)
|
||||
printVar("simpleLens4b", simpleLens4b)
|
||||
printVar("complexLens", complexLens)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("insLenRanges", rangeCodes(insLenRanges))
|
||||
printVar("cpyLenRanges", rangeCodes(cpyLenRanges))
|
||||
printVar("blkLenRanges", rangeCodes(blkLenRanges))
|
||||
printVar("maxRLERanges", rangeCodes(maxRLERanges))
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("codeCLens", prefixCodes(codeCLens))
|
||||
printVar("decCLens", decCLens)
|
||||
printVar("encCLens", encCLens)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("codeMaxRLE", prefixCodes(codeMaxRLE))
|
||||
printVar("decMaxRLE", decMaxRLE)
|
||||
printVar("encMaxRLE", encMaxRLE)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("codeWinBits", prefixCodes(codeWinBits))
|
||||
printVar("decWinBits", decWinBits)
|
||||
printVar("encWinBits", encWinBits)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("codeCounts", prefixCodes(codeCounts))
|
||||
printVar("decCounts", decCounts)
|
||||
printVar("encCounts", encCounts)
|
||||
fmt.Fprintln(output)
|
||||
|
||||
printVar("iacLUT", typeIaCLUT(iacLUT))
|
||||
printVar("distShortLUT", typeDistShortLUT(distShortLUT))
|
||||
printVar("distLongLUT", typeDistLongLUT(distLongLUT))
|
||||
fmt.Fprintln(output)
|
||||
}
|
||||
|
||||
func tabs(s string, n int) string {
|
||||
tabs := strings.Repeat("\t", n)
|
||||
return strings.Join(strings.Split(s, "\n"), "\n"+tabs)
|
||||
}
|
||||
|
||||
type rangeCodes []rangeCode
|
||||
|
||||
func (rc rangeCodes) String() (s string) {
|
||||
var maxBits, maxBase int
|
||||
for _, c := range rc {
|
||||
if maxBits < int(c.bits) {
|
||||
maxBits = int(c.bits)
|
||||
}
|
||||
if maxBase < int(c.base) {
|
||||
maxBase = int(c.base)
|
||||
}
|
||||
}
|
||||
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
maxSymDig := len(fmt.Sprintf("%d", len(rc)-1))
|
||||
maxBitsDig := len(fmt.Sprintf("%d", maxBits))
|
||||
maxBaseDig := len(fmt.Sprintf("%d", maxBase))
|
||||
for i, c := range rc {
|
||||
base := fmt.Sprintf(fmt.Sprintf("%%%dd", maxBaseDig), c.base)
|
||||
if c.bits > 0 {
|
||||
base += fmt.Sprintf("-%d", c.base+1<<c.bits-1)
|
||||
}
|
||||
ss = append(ss, fmt.Sprintf(
|
||||
fmt.Sprintf("\t%%%dd: {bits: %%%dd, base: %%s},",
|
||||
maxSymDig, maxBitsDig),
|
||||
i, c.bits, base,
|
||||
))
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
||||
|
||||
type prefixCodes []prefixCode
|
||||
|
||||
func (pc prefixCodes) String() (s string) {
|
||||
var maxSym, maxLen int
|
||||
for _, c := range pc {
|
||||
if maxSym < int(c.sym) {
|
||||
maxSym = int(c.sym)
|
||||
}
|
||||
if maxLen < int(c.len) {
|
||||
maxLen = int(c.len)
|
||||
}
|
||||
}
|
||||
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
maxSymDig := len(fmt.Sprintf("%d", maxSym))
|
||||
for _, c := range pc {
|
||||
ss = append(ss, fmt.Sprintf(
|
||||
fmt.Sprintf("\t%%%dd:%s%%0%db,",
|
||||
maxSymDig, strings.Repeat(" ", 2+maxLen-int(c.len)), c.len),
|
||||
c.sym, c.val,
|
||||
))
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
||||
|
||||
func (pd prefixDecoder) String() string {
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
if len(pd.chunks) > 0 {
|
||||
ss = append(ss, "\tchunks: {")
|
||||
for i, c := range pd.chunks {
|
||||
l := "sym"
|
||||
if uint(c&prefixCountMask) > uint(pd.chunkBits) {
|
||||
l = "idx"
|
||||
}
|
||||
ss = append(ss, fmt.Sprintf(
|
||||
fmt.Sprintf("\t\t%%0%db: {%%s: %%3d, len: %%2d},", pd.chunkBits),
|
||||
i, l, c>>prefixCountBits, c&prefixCountMask,
|
||||
))
|
||||
}
|
||||
ss = append(ss, "\t},")
|
||||
|
||||
for j, links := range pd.links {
|
||||
ss = append(ss, fmt.Sprintf("\tlinks[%d]: {", j))
|
||||
linkBits := len(fmt.Sprintf("%b", pd.linkMask))
|
||||
for i, c := range links {
|
||||
ss = append(ss, fmt.Sprintf(
|
||||
fmt.Sprintf("\t\t%%0%db: {sym: %%3d, len: %%2d},", linkBits),
|
||||
i, c>>prefixCountBits, c&prefixCountMask,
|
||||
))
|
||||
}
|
||||
ss = append(ss, "\t},")
|
||||
}
|
||||
ss = append(ss, fmt.Sprintf("\tchunkMask: %b,", pd.chunkMask))
|
||||
ss = append(ss, fmt.Sprintf("\tlinkMask: %b,", pd.linkMask))
|
||||
ss = append(ss, fmt.Sprintf("\tchunkBits: %d,", pd.chunkBits))
|
||||
ss = append(ss, fmt.Sprintf("\tminBits: %d,", pd.minBits))
|
||||
ss = append(ss, fmt.Sprintf("\tnumSyms: %d,", pd.numSyms))
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
||||
|
||||
type typeIaCLUT [numIaCSyms]struct{ ins, cpy rangeCode }
|
||||
|
||||
func (t typeIaCLUT) String() string {
|
||||
var ss []string
|
||||
var ins, cpy rangeCodes
|
||||
for _, rec := range t {
|
||||
ins = append(ins, rec.ins)
|
||||
cpy = append(cpy, rec.cpy)
|
||||
}
|
||||
ss = append(ss, "{")
|
||||
ss = append(ss, "\tins: "+tabs(ins.String(), 1)+",")
|
||||
ss = append(ss, "\tcpy: "+tabs(cpy.String(), 1)+",")
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
||||
|
||||
type typeDistShortLUT [16]struct{ index, delta int }
|
||||
|
||||
func (t typeDistShortLUT) String() string {
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
for i, rec := range t {
|
||||
ss = append(ss, fmt.Sprintf("\t%2d: {index: %d, delta: %+2d},", i, rec.index, rec.delta))
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
||||
|
||||
type typeDistLongLUT [4][]rangeCode
|
||||
|
||||
func (t typeDistLongLUT) String() string {
|
||||
var ss []string
|
||||
ss = append(ss, "{")
|
||||
for i, rc := range t {
|
||||
ss = append(ss, fmt.Sprintf("\t%d: %s,", i, tabs(rangeCodes(rc).String(), 1)))
|
||||
}
|
||||
ss = append(ss, "}")
|
||||
return strings.Join(ss, "\n")
|
||||
}
|
14
vendor/github.com/dsnet/compress/brotli/prefix.go
generated
vendored
14
vendor/github.com/dsnet/compress/brotli/prefix.go
generated
vendored
|
@ -132,7 +132,7 @@ func initPrefixLUTs() {
|
|||
}
|
||||
|
||||
func initPrefixRangeLUTs() {
|
||||
var makeRanges = func(base uint, bits []uint) (rc []rangeCode) {
|
||||
makeRanges := func(base uint, bits []uint) (rc []rangeCode) {
|
||||
for _, nb := range bits {
|
||||
rc = append(rc, rangeCode{base: uint32(base), bits: uint32(nb)})
|
||||
base += 1 << nb
|
||||
|
@ -158,7 +158,7 @@ func initPrefixCodeLUTs() {
|
|||
// Prefix code for reading code lengths in RFC section 3.5.
|
||||
codeCLens = nil
|
||||
for sym, clen := range []uint{2, 4, 3, 2, 2, 4} {
|
||||
var code = prefixCode{sym: uint32(sym), len: uint32(clen)}
|
||||
code := prefixCode{sym: uint32(sym), len: uint32(clen)}
|
||||
codeCLens = append(codeCLens, code)
|
||||
}
|
||||
decCLens.Init(codeCLens, true)
|
||||
|
@ -167,7 +167,7 @@ func initPrefixCodeLUTs() {
|
|||
// Prefix code for reading RLEMAX in RFC section 7.3.
|
||||
codeMaxRLE = []prefixCode{{sym: 0, val: 0, len: 1}}
|
||||
for i := uint32(0); i < 16; i++ {
|
||||
var code = prefixCode{sym: i + 1, val: i<<1 | 1, len: 5}
|
||||
code := prefixCode{sym: i + 1, val: i<<1 | 1, len: 5}
|
||||
codeMaxRLE = append(codeMaxRLE, code)
|
||||
}
|
||||
decMaxRLE.Init(codeMaxRLE, false)
|
||||
|
@ -196,12 +196,12 @@ func initPrefixCodeLUTs() {
|
|||
// Prefix code for reading counts in RFC section 9.2.
|
||||
// This is used for: NBLTYPESL, NBLTYPESI, NBLTYPESD, NTREESL, and NTREESD.
|
||||
codeCounts = []prefixCode{{sym: 1, val: 0, len: 1}}
|
||||
var code = codeCounts[len(codeCounts)-1]
|
||||
code := codeCounts[len(codeCounts)-1]
|
||||
for i := uint32(0); i < 8; i++ {
|
||||
for j := uint32(0); j < 1<<i; j++ {
|
||||
code.sym = code.sym + 1
|
||||
code.val = j<<4 | i<<1 | 1
|
||||
code.len = uint32(i + 4)
|
||||
code.len = i + 4
|
||||
codeCounts = append(codeCounts, code)
|
||||
}
|
||||
}
|
||||
|
@ -254,9 +254,9 @@ func initLengthLUTs() {
|
|||
case distSym < 4:
|
||||
index, delta = distSym, 0
|
||||
case distSym < 10:
|
||||
index, delta = 0, int(distSym/2-1)
|
||||
index, delta = 0, distSym/2-1
|
||||
case distSym < 16:
|
||||
index, delta = 1, int(distSym/2-4)
|
||||
index, delta = 1, distSym/2-4
|
||||
}
|
||||
if distSym%2 == 0 {
|
||||
delta *= -1
|
||||
|
|
36
vendor/github.com/dsnet/compress/brotli/prefix_decoder.go
generated
vendored
36
vendor/github.com/dsnet/compress/brotli/prefix_decoder.go
generated
vendored
|
@ -4,6 +4,10 @@
|
|||
|
||||
package brotli
|
||||
|
||||
import (
|
||||
"github.com/dsnet/compress/internal/errors"
|
||||
)
|
||||
|
||||
// The algorithm used to decode variable length codes is based on the lookup
|
||||
// method in zlib. If the code is less-than-or-equal to prefixMaxChunkBits,
|
||||
// then the symbol can be decoded using a single lookup into the chunks table.
|
||||
|
@ -80,7 +84,7 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
minBits, maxBits, symLast := c0.len, c0.len, int(c0.sym)
|
||||
for _, c := range codes[1:] {
|
||||
if int(c.sym) <= symLast {
|
||||
panic(ErrCorrupt) // Non-unique or non-monotonically increasing
|
||||
errors.Panic(errCorrupted) // Non-unique or non-monotonically increasing
|
||||
}
|
||||
if minBits > c.len {
|
||||
minBits = c.len
|
||||
|
@ -92,10 +96,10 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
symLast = int(c.sym) // Keep track of last symbol
|
||||
}
|
||||
if maxBits >= 1<<prefixCountBits || minBits == 0 {
|
||||
panic(ErrCorrupt) // Bit-width is too long or too short
|
||||
errors.Panic(errCorrupted) // Bit-width is too long or too short
|
||||
}
|
||||
if symLast >= 1<<prefixSymbolBits {
|
||||
panic(ErrCorrupt) // Alphabet cardinality too large
|
||||
errors.Panic(errCorrupted) // Alphabet cardinality too large
|
||||
}
|
||||
|
||||
// Compute the next code for a symbol of a given bit length.
|
||||
|
@ -107,7 +111,7 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
code += bitCnts[i]
|
||||
}
|
||||
if code != 1<<maxBits {
|
||||
panic(ErrCorrupt) // Tree is under or over subscribed
|
||||
errors.Panic(errCorrupted) // Tree is under or over subscribed
|
||||
}
|
||||
|
||||
// Allocate chunks table if necessary.
|
||||
|
@ -134,7 +138,7 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
for linkIdx := range pd.links {
|
||||
code := reverseBits(uint32(baseCode)+uint32(linkIdx), uint(pd.chunkBits))
|
||||
pd.links[linkIdx] = allocUint32s(pd.links[linkIdx], numLinks)
|
||||
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | uint32(pd.chunkBits+1)
|
||||
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | (pd.chunkBits + 1)
|
||||
}
|
||||
} else {
|
||||
for i := range pd.chunks {
|
||||
|
@ -151,14 +155,14 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
linkIdx := len(pd.links)
|
||||
pd.links = extendSliceUints32s(pd.links, len(pd.links)+1)
|
||||
pd.links[linkIdx] = allocUint32s(pd.links[linkIdx], numLinks)
|
||||
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | uint32(pd.chunkBits+1)
|
||||
pd.chunks[code] = uint32(linkIdx<<prefixCountBits) | (pd.chunkBits + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fill out chunks and links tables with values.
|
||||
for i, c := range codes {
|
||||
chunk := c.sym<<prefixCountBits | uint32(c.len)
|
||||
chunk := c.sym<<prefixCountBits | c.len
|
||||
if assignCodes {
|
||||
codes[i].val = reverseBits(uint32(nextCodes[c.len]), uint(c.len))
|
||||
nextCodes[c.len]++
|
||||
|
@ -179,22 +183,4 @@ func (pd *prefixDecoder) Init(codes []prefixCode, assignCodes bool) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if debug && !checkPrefixes(codes) {
|
||||
panic(ErrCorrupt) // The codes do not form a valid prefix tree.
|
||||
}
|
||||
}
|
||||
|
||||
// checkPrefixes reports whether any codes have overlapping prefixes.
|
||||
// This check is expensive and runs in O(n^2) time!
|
||||
func checkPrefixes(codes []prefixCode) bool {
|
||||
for i, c1 := range codes {
|
||||
for j, c2 := range codes {
|
||||
mask := uint32(1)<<c1.len - 1
|
||||
if i != j && c1.len <= c2.len && c1.val&mask == c2.val&mask {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
|
94
vendor/github.com/dsnet/compress/brotli/reader.go
generated
vendored
94
vendor/github.com/dsnet/compress/brotli/reader.go
generated
vendored
|
@ -4,8 +4,13 @@
|
|||
|
||||
package brotli
|
||||
|
||||
import "io"
|
||||
import "io/ioutil"
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/dsnet/compress/internal"
|
||||
"github.com/dsnet/compress/internal/errors"
|
||||
)
|
||||
|
||||
type Reader struct {
|
||||
InputOffset int64 // Total number of bytes read from underlying io.Reader
|
||||
|
@ -22,11 +27,11 @@ type Reader struct {
|
|||
step func(*Reader) // Single step of decompression work (can panic)
|
||||
stepState int // The sub-step state for certain steps
|
||||
|
||||
mtf moveToFront // Local move-to-front decoder
|
||||
dict dictDecoder // Dynamic sliding dictionary
|
||||
iacBlk blockDecoder // Insert-and-copy block decoder
|
||||
litBlk blockDecoder // Literal block decoder
|
||||
distBlk blockDecoder // Distance block decoder
|
||||
mtf internal.MoveToFront // Local move-to-front decoder
|
||||
dict dictDecoder // Dynamic sliding dictionary
|
||||
iacBlk blockDecoder // Insert-and-copy block decoder
|
||||
litBlk blockDecoder // Literal block decoder
|
||||
distBlk blockDecoder // Distance block decoder
|
||||
|
||||
// Literal decoding state fields.
|
||||
litMapType []uint8 // The current literal context map for the current block type
|
||||
|
@ -87,7 +92,7 @@ func (br *Reader) Read(buf []byte) (int, error) {
|
|||
// Perform next step in decompression process.
|
||||
br.rd.offset = br.InputOffset
|
||||
func() {
|
||||
defer errRecover(&br.err)
|
||||
defer errors.Recover(&br.err)
|
||||
br.step(br)
|
||||
}()
|
||||
br.InputOffset = br.rd.FlushOffset()
|
||||
|
@ -131,9 +136,9 @@ func (br *Reader) Reset(r io.Reader) error {
|
|||
|
||||
// readStreamHeader reads the Brotli stream header according to RFC section 9.1.
|
||||
func (br *Reader) readStreamHeader() {
|
||||
wbits := uint(br.rd.ReadSymbol(&decWinBits))
|
||||
wbits := br.rd.ReadSymbol(&decWinBits)
|
||||
if wbits == 0 {
|
||||
panic(ErrCorrupt) // Reserved value used
|
||||
errors.Panic(errCorrupted) // Reserved value used
|
||||
}
|
||||
size := int(1<<wbits) - 16
|
||||
br.dict.Init(size)
|
||||
|
@ -144,9 +149,9 @@ func (br *Reader) readStreamHeader() {
|
|||
func (br *Reader) readBlockHeader() {
|
||||
if br.last {
|
||||
if br.rd.ReadPads() > 0 {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
panic(io.EOF)
|
||||
errors.Panic(io.EOF)
|
||||
}
|
||||
|
||||
// Read ISLAST and ISLASTEMPTY.
|
||||
|
@ -159,40 +164,39 @@ func (br *Reader) readBlockHeader() {
|
|||
|
||||
// Read MLEN and MNIBBLES and process meta data.
|
||||
var blkLen int // 1..1<<24
|
||||
if nibbles := br.rd.ReadBits(2) + 4; nibbles == 7 {
|
||||
nibbles := br.rd.ReadBits(2) + 4
|
||||
if nibbles == 7 {
|
||||
if reserved := br.rd.ReadBits(1) == 1; reserved {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
|
||||
var skipLen int // 0..1<<24
|
||||
if skipBytes := br.rd.ReadBits(2); skipBytes > 0 {
|
||||
skipLen = int(br.rd.ReadBits(skipBytes * 8))
|
||||
if skipBytes > 1 && skipLen>>((skipBytes-1)*8) == 0 {
|
||||
panic(ErrCorrupt) // Shortest representation not used
|
||||
errors.Panic(errCorrupted) // Shortest representation not used
|
||||
}
|
||||
skipLen++
|
||||
}
|
||||
|
||||
if br.rd.ReadPads() > 0 {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
br.blkLen = skipLen // Use blkLen to track meta data number of bytes
|
||||
br.blkLen = skipLen // Use blkLen to track metadata number of bytes
|
||||
br.readMetaData()
|
||||
return
|
||||
} else {
|
||||
blkLen = int(br.rd.ReadBits(nibbles * 4))
|
||||
if nibbles > 4 && blkLen>>((nibbles-1)*4) == 0 {
|
||||
panic(ErrCorrupt) // Shortest representation not used
|
||||
}
|
||||
blkLen++
|
||||
}
|
||||
br.blkLen = blkLen
|
||||
blkLen = int(br.rd.ReadBits(nibbles * 4))
|
||||
if nibbles > 4 && blkLen>>((nibbles-1)*4) == 0 {
|
||||
errors.Panic(errCorrupted) // Shortest representation not used
|
||||
}
|
||||
br.blkLen = blkLen + 1
|
||||
|
||||
// Read ISUNCOMPRESSED and process uncompressed data.
|
||||
if !br.last {
|
||||
if uncompressed := br.rd.ReadBits(1) == 1; uncompressed {
|
||||
if br.rd.ReadPads() > 0 {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
br.readRawData()
|
||||
return
|
||||
|
@ -209,9 +213,9 @@ func (br *Reader) readMetaData() {
|
|||
br.metaBuf = make([]byte, 4096) // Lazy allocate
|
||||
}
|
||||
if cnt, err := io.CopyBuffer(br.metaWr, &br.metaRd, br.metaBuf); err != nil {
|
||||
panic(err) // Will never panic with io.EOF
|
||||
errors.Panic(err) // Will never panic with io.EOF
|
||||
} else if cnt < int64(br.blkLen) {
|
||||
panic(io.ErrUnexpectedEOF)
|
||||
errors.Panic(io.ErrUnexpectedEOF)
|
||||
}
|
||||
br.step = (*Reader).readBlockHeader
|
||||
}
|
||||
|
@ -230,7 +234,7 @@ func (br *Reader) readRawData() {
|
|||
if err == io.EOF {
|
||||
err = io.ErrUnexpectedEOF
|
||||
}
|
||||
panic(err)
|
||||
errors.Panic(err)
|
||||
}
|
||||
|
||||
if br.blkLen > 0 {
|
||||
|
@ -402,9 +406,8 @@ startCommand:
|
|||
br.distZero = iacSym < 128
|
||||
if br.insLen > 0 {
|
||||
goto readLiterals
|
||||
} else {
|
||||
goto readDistance
|
||||
}
|
||||
goto readDistance
|
||||
}
|
||||
|
||||
readLiterals:
|
||||
|
@ -443,11 +446,11 @@ readLiterals:
|
|||
br.step = (*Reader).readCommands
|
||||
br.stepState = stateLiterals // Need to continue work here
|
||||
return
|
||||
} else if br.blkLen > 0 {
|
||||
goto readDistance
|
||||
} else {
|
||||
goto finishCommand
|
||||
}
|
||||
if br.blkLen > 0 {
|
||||
goto readDistance
|
||||
}
|
||||
goto finishCommand
|
||||
}
|
||||
|
||||
readDistance:
|
||||
|
@ -484,7 +487,7 @@ readDistance:
|
|||
}
|
||||
br.distZero = bool(distSym == 0)
|
||||
if br.dist <= 0 {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -496,9 +499,8 @@ readDistance:
|
|||
br.dists[0] = br.dist
|
||||
}
|
||||
goto copyDynamicDict
|
||||
} else {
|
||||
goto copyStaticDict
|
||||
}
|
||||
goto copyStaticDict
|
||||
}
|
||||
|
||||
copyDynamicDict:
|
||||
|
@ -513,9 +515,8 @@ copyDynamicDict:
|
|||
br.step = (*Reader).readCommands
|
||||
br.stepState = stateDynamicDict // Need to continue work here
|
||||
return
|
||||
} else {
|
||||
goto finishCommand
|
||||
}
|
||||
goto finishCommand
|
||||
}
|
||||
|
||||
copyStaticDict:
|
||||
|
@ -523,7 +524,7 @@ copyStaticDict:
|
|||
{
|
||||
if len(br.word) == 0 {
|
||||
if br.cpyLen < minDictLen || br.cpyLen > maxDictLen {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
wordIdx := br.dist - (br.dict.HistSize() + 1)
|
||||
index := wordIdx % dictSizes[br.cpyLen]
|
||||
|
@ -531,7 +532,7 @@ copyStaticDict:
|
|||
baseWord := dictLUT[offset : offset+br.cpyLen]
|
||||
transformIdx := wordIdx >> uint(dictBitSizes[br.cpyLen])
|
||||
if transformIdx >= len(transformLUT) {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
cnt := transformWord(br.wordBuf[:], baseWord, transformIdx)
|
||||
br.word = br.wordBuf[:cnt]
|
||||
|
@ -548,16 +549,16 @@ copyStaticDict:
|
|||
br.step = (*Reader).readCommands
|
||||
br.stepState = stateStaticDict // Need to continue work here
|
||||
return
|
||||
} else {
|
||||
goto finishCommand
|
||||
}
|
||||
goto finishCommand
|
||||
}
|
||||
|
||||
finishCommand:
|
||||
// Finish off this command and check if we need to loop again.
|
||||
if br.blkLen < 0 {
|
||||
panic(ErrCorrupt)
|
||||
} else if br.blkLen > 0 {
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
if br.blkLen > 0 {
|
||||
goto startCommand // More commands in this block
|
||||
}
|
||||
|
||||
|
@ -565,7 +566,6 @@ finishCommand:
|
|||
br.toRead = br.dict.ReadFlush()
|
||||
br.step = (*Reader).readBlockHeader
|
||||
br.stepState = stateInit // Next call to readCommands must start here
|
||||
return
|
||||
}
|
||||
|
||||
// readContextMap reads the context map according to RFC section 7.3.
|
||||
|
@ -590,7 +590,7 @@ func (br *Reader) readContextMap(cm []uint8, numTrees uint) {
|
|||
// Repeated zeros.
|
||||
n := int(br.rd.ReadOffset(sym-1, maxRLERanges))
|
||||
if i+n > len(cm) {
|
||||
panic(ErrCorrupt)
|
||||
errors.Panic(errCorrupted)
|
||||
}
|
||||
for j := i + n; i < j; i++ {
|
||||
cm[i] = 0
|
||||
|
|
11
vendor/github.com/dsnet/compress/brotli/release.go
generated
vendored
11
vendor/github.com/dsnet/compress/brotli/release.go
generated
vendored
|
@ -1,11 +0,0 @@
|
|||
// Copyright 2015, Joe Tsai. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE.md file.
|
||||
|
||||
// +build !debug
|
||||
|
||||
package brotli
|
||||
|
||||
const debug = false
|
||||
|
||||
func printLUTs() {}
|
4
vendor/github.com/dsnet/compress/brotli/writer.go
generated
vendored
4
vendor/github.com/dsnet/compress/brotli/writer.go
generated
vendored
|
@ -30,6 +30,6 @@ func (bw *writer) Close() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (bw *writer) Reset(w io.Writer) {
|
||||
return
|
||||
func (bw *writer) Reset(w io.Writer) error {
|
||||
return nil
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue