forked from forgejo/forgejo
Vendor Update Go Libs (#13166)
* update github.com/alecthomas/chroma v0.8.0 -> v0.8.1 * github.com/blevesearch/bleve v1.0.10 -> v1.0.12 * editorconfig-core-go v2.1.1 -> v2.3.7 * github.com/gliderlabs/ssh v0.2.2 -> v0.3.1 * migrate editorconfig.ParseBytes to Parse * github.com/shurcooL/vfsgen to 0d455de96546 * github.com/go-git/go-git/v5 v5.1.0 -> v5.2.0 * github.com/google/uuid v1.1.1 -> v1.1.2 * github.com/huandu/xstrings v1.3.0 -> v1.3.2 * github.com/klauspost/compress v1.10.11 -> v1.11.1 * github.com/markbates/goth v1.61.2 -> v1.65.0 * github.com/mattn/go-sqlite3 v1.14.0 -> v1.14.4 * github.com/mholt/archiver v3.3.0 -> v3.3.2 * github.com/microcosm-cc/bluemonday 4f7140c49acb -> v1.0.4 * github.com/minio/minio-go v7.0.4 -> v7.0.5 * github.com/olivere/elastic v7.0.9 -> v7.0.20 * github.com/urfave/cli v1.20.0 -> v1.22.4 * github.com/prometheus/client_golang v1.1.0 -> v1.8.0 * github.com/xanzy/go-gitlab v0.37.0 -> v0.38.1 * mvdan.cc/xurls v2.1.0 -> v2.2.0 Co-authored-by: Lauris BH <lauris@nix.lv>
This commit is contained in:
parent
91f2afdb54
commit
12a1f914f4
656 changed files with 52967 additions and 25229 deletions
67
vendor/github.com/klauspost/compress/zstd/blockenc.go
generated
vendored
67
vendor/github.com/klauspost/compress/zstd/blockenc.go
generated
vendored
|
@ -14,12 +14,13 @@ import (
|
|||
)
|
||||
|
||||
type blockEnc struct {
|
||||
size int
|
||||
literals []byte
|
||||
sequences []seq
|
||||
coders seqCoders
|
||||
litEnc *huff0.Scratch
|
||||
wr bitWriter
|
||||
size int
|
||||
literals []byte
|
||||
sequences []seq
|
||||
coders seqCoders
|
||||
litEnc *huff0.Scratch
|
||||
dictLitEnc *huff0.Scratch
|
||||
wr bitWriter
|
||||
|
||||
extraLits int
|
||||
last bool
|
||||
|
@ -314,19 +315,19 @@ func (b *blockEnc) encodeRawTo(dst, src []byte) []byte {
|
|||
}
|
||||
|
||||
// encodeLits can be used if the block is only litLen.
|
||||
func (b *blockEnc) encodeLits(raw bool) error {
|
||||
func (b *blockEnc) encodeLits(lits []byte, raw bool) error {
|
||||
var bh blockHeader
|
||||
bh.setLast(b.last)
|
||||
bh.setSize(uint32(len(b.literals)))
|
||||
bh.setSize(uint32(len(lits)))
|
||||
|
||||
// Don't compress extremely small blocks
|
||||
if len(b.literals) < 32 || raw {
|
||||
if len(lits) < 8 || (len(lits) < 32 && b.dictLitEnc == nil) || raw {
|
||||
if debug {
|
||||
println("Adding RAW block, length", len(b.literals), "last:", b.last)
|
||||
println("Adding RAW block, length", len(lits), "last:", b.last)
|
||||
}
|
||||
bh.setType(blockTypeRaw)
|
||||
b.output = bh.appendTo(b.output)
|
||||
b.output = append(b.output, b.literals...)
|
||||
b.output = append(b.output, lits...)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -335,13 +336,18 @@ func (b *blockEnc) encodeLits(raw bool) error {
|
|||
reUsed, single bool
|
||||
err error
|
||||
)
|
||||
if len(b.literals) >= 1024 {
|
||||
if b.dictLitEnc != nil {
|
||||
b.litEnc.TransferCTable(b.dictLitEnc)
|
||||
b.litEnc.Reuse = huff0.ReusePolicyAllow
|
||||
b.dictLitEnc = nil
|
||||
}
|
||||
if len(lits) >= 1024 {
|
||||
// Use 4 Streams.
|
||||
out, reUsed, err = huff0.Compress4X(b.literals, b.litEnc)
|
||||
} else if len(b.literals) > 32 {
|
||||
out, reUsed, err = huff0.Compress4X(lits, b.litEnc)
|
||||
} else if len(lits) > 32 {
|
||||
// Use 1 stream
|
||||
single = true
|
||||
out, reUsed, err = huff0.Compress1X(b.literals, b.litEnc)
|
||||
out, reUsed, err = huff0.Compress1X(lits, b.litEnc)
|
||||
} else {
|
||||
err = huff0.ErrIncompressible
|
||||
}
|
||||
|
@ -349,19 +355,19 @@ func (b *blockEnc) encodeLits(raw bool) error {
|
|||
switch err {
|
||||
case huff0.ErrIncompressible:
|
||||
if debug {
|
||||
println("Adding RAW block, length", len(b.literals), "last:", b.last)
|
||||
println("Adding RAW block, length", len(lits), "last:", b.last)
|
||||
}
|
||||
bh.setType(blockTypeRaw)
|
||||
b.output = bh.appendTo(b.output)
|
||||
b.output = append(b.output, b.literals...)
|
||||
b.output = append(b.output, lits...)
|
||||
return nil
|
||||
case huff0.ErrUseRLE:
|
||||
if debug {
|
||||
println("Adding RLE block, length", len(b.literals))
|
||||
println("Adding RLE block, length", len(lits))
|
||||
}
|
||||
bh.setType(blockTypeRLE)
|
||||
b.output = bh.appendTo(b.output)
|
||||
b.output = append(b.output, b.literals[0])
|
||||
b.output = append(b.output, lits[0])
|
||||
return nil
|
||||
default:
|
||||
return err
|
||||
|
@ -384,7 +390,7 @@ func (b *blockEnc) encodeLits(raw bool) error {
|
|||
lh.setType(literalsBlockCompressed)
|
||||
}
|
||||
// Set sizes
|
||||
lh.setSizes(len(out), len(b.literals), single)
|
||||
lh.setSizes(len(out), len(lits), single)
|
||||
bh.setSize(uint32(len(out) + lh.size() + 1))
|
||||
|
||||
// Write block headers.
|
||||
|
@ -444,13 +450,19 @@ func fuzzFseEncoder(data []byte) int {
|
|||
}
|
||||
|
||||
// encode will encode the block and append the output in b.output.
|
||||
func (b *blockEnc) encode(raw, rawAllLits bool) error {
|
||||
// Previous offset codes must be pushed if more blocks are expected.
|
||||
func (b *blockEnc) encode(org []byte, raw, rawAllLits bool) error {
|
||||
if len(b.sequences) == 0 {
|
||||
return b.encodeLits(rawAllLits)
|
||||
return b.encodeLits(b.literals, rawAllLits)
|
||||
}
|
||||
// We want some difference
|
||||
if len(b.literals) > (b.size - (b.size >> 5)) {
|
||||
return errIncompressible
|
||||
// We want some difference to at least account for the headers.
|
||||
saved := b.size - len(b.literals) - (b.size >> 5)
|
||||
if saved < 16 {
|
||||
if org == nil {
|
||||
return errIncompressible
|
||||
}
|
||||
b.popOffsets()
|
||||
return b.encodeLits(org, rawAllLits)
|
||||
}
|
||||
|
||||
var bh blockHeader
|
||||
|
@ -466,6 +478,11 @@ func (b *blockEnc) encode(raw, rawAllLits bool) error {
|
|||
reUsed, single bool
|
||||
err error
|
||||
)
|
||||
if b.dictLitEnc != nil {
|
||||
b.litEnc.TransferCTable(b.dictLitEnc)
|
||||
b.litEnc.Reuse = huff0.ReusePolicyAllow
|
||||
b.dictLitEnc = nil
|
||||
}
|
||||
if len(b.literals) >= 1024 && !raw {
|
||||
// Use 4 Streams.
|
||||
out, reUsed, err = huff0.Compress4X(b.literals, b.litEnc)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue