mirror of
https://github.com/coredns/coredns.git
synced 2025-12-21 01:25:11 -05:00
Dep ensure -update (#1912)
* dep ensure -update Signed-off-by: Miek Gieben <miek@miek.nl> * Add new files Signed-off-by: Miek Gieben <miek@miek.nl>
This commit is contained in:
2
vendor/github.com/pierrec/lz4/.gitignore
generated
vendored
2
vendor/github.com/pierrec/lz4/.gitignore
generated
vendored
@@ -29,3 +29,5 @@ Temporary Items
|
||||
.apdisk
|
||||
|
||||
# End of https://www.gitignore.io/api/macos
|
||||
|
||||
lz4c/lz4c
|
||||
|
||||
14
vendor/github.com/pierrec/lz4/.travis.yml
generated
vendored
14
vendor/github.com/pierrec/lz4/.travis.yml
generated
vendored
@@ -1,8 +1,18 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- 1.10.x
|
||||
- master
|
||||
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
- go: master
|
||||
|
||||
sudo: false
|
||||
|
||||
script:
|
||||
- go test -v -cpu=2
|
||||
- go test -v -cpu=2 -race
|
||||
- go test -v -cpu=2 -race
|
||||
|
||||
17
vendor/github.com/pierrec/lz4/README.md
generated
vendored
17
vendor/github.com/pierrec/lz4/README.md
generated
vendored
@@ -1,8 +1,7 @@
|
||||
[](https://godoc.org/github.com/pierrec/lz4)
|
||||
[](https://travis-ci.org/pierrec/lz4)
|
||||
|
||||
# lz4
|
||||
LZ4 compression and decompression in pure Go
|
||||
LZ4 compression and decompression in pure Go.
|
||||
|
||||
## Usage
|
||||
|
||||
@@ -11,21 +10,13 @@ import "github.com/pierrec/lz4"
|
||||
```
|
||||
|
||||
## Description
|
||||
|
||||
Package lz4 implements reading and writing lz4 compressed data (a frame),
|
||||
as specified in http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html,
|
||||
using an io.Reader (decompression) and io.Writer (compression).
|
||||
It is designed to minimize memory usage while maximizing throughput by being able to
|
||||
[de]compress data concurrently.
|
||||
|
||||
The Reader and the Writer support concurrent processing provided the supplied buffers are
|
||||
large enough (in multiples of BlockMaxSize) and there is no block dependency.
|
||||
Reader.WriteTo and Writer.ReadFrom do leverage the concurrency transparently.
|
||||
The runtime.GOMAXPROCS() value is used to apply concurrency or not.
|
||||
|
||||
as specified in http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html.
|
||||
Although the block level compression and decompression functions are exposed and are fully compatible
|
||||
with the lz4 block format definition, they are low level and should not be used directly.
|
||||
|
||||
For a complete description of an lz4 compressed block, see:
|
||||
http://fastcompression.blogspot.fr/2011/05/lz4-explained.html
|
||||
|
||||
See https://github.com/Cyan4973/lz4 for the reference C implementation.
|
||||
|
||||
|
||||
119
vendor/github.com/pierrec/lz4/bench_test.go
generated
vendored
Normal file
119
vendor/github.com/pierrec/lz4/bench_test.go
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
package lz4_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/pierrec/lz4"
|
||||
)
|
||||
|
||||
func BenchmarkCompress(b *testing.B) {
|
||||
var hashTable [1 << 16]int
|
||||
buf := make([]byte, len(pg1661))
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
lz4.CompressBlock(pg1661, buf, hashTable[:])
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompressHC(b *testing.B) {
|
||||
buf := make([]byte, len(pg1661))
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
lz4.CompressBlockHC(pg1661, buf, 16)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUncompress(b *testing.B) {
|
||||
buf := make([]byte, len(pg1661))
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
lz4.UncompressBlock(pg1661LZ4, buf)
|
||||
}
|
||||
}
|
||||
|
||||
func mustLoadFile(f string) []byte {
|
||||
b, err := ioutil.ReadFile(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
var (
|
||||
pg1661 = mustLoadFile("testdata/pg1661.txt")
|
||||
digits = mustLoadFile("testdata/e.txt")
|
||||
twain = mustLoadFile("testdata/Mark.Twain-Tom.Sawyer.txt")
|
||||
random = mustLoadFile("testdata/random.data")
|
||||
pg1661LZ4 = mustLoadFile("testdata/pg1661.txt.lz4")
|
||||
digitsLZ4 = mustLoadFile("testdata/e.txt.lz4")
|
||||
twainLZ4 = mustLoadFile("testdata/Mark.Twain-Tom.Sawyer.txt.lz4")
|
||||
randomLZ4 = mustLoadFile("testdata/random.data.lz4")
|
||||
)
|
||||
|
||||
func benchmarkUncompress(b *testing.B, compressed []byte) {
|
||||
r := bytes.NewReader(compressed)
|
||||
zr := lz4.NewReader(r)
|
||||
|
||||
// Determine the uncompressed size of testfile.
|
||||
uncompressedSize, err := io.Copy(ioutil.Discard, zr)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
b.SetBytes(uncompressedSize)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
r.Reset(compressed)
|
||||
zr.Reset(r)
|
||||
io.Copy(ioutil.Discard, zr)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkUncompressPg1661(b *testing.B) { benchmarkUncompress(b, pg1661LZ4) }
|
||||
func BenchmarkUncompressDigits(b *testing.B) { benchmarkUncompress(b, digitsLZ4) }
|
||||
func BenchmarkUncompressTwain(b *testing.B) { benchmarkUncompress(b, twainLZ4) }
|
||||
func BenchmarkUncompressRand(b *testing.B) { benchmarkUncompress(b, randomLZ4) }
|
||||
|
||||
func benchmarkCompress(b *testing.B, uncompressed []byte) {
|
||||
w := bytes.NewBuffer(nil)
|
||||
zw := lz4.NewWriter(w)
|
||||
r := bytes.NewReader(uncompressed)
|
||||
|
||||
// Determine the compressed size of testfile.
|
||||
compressedSize, err := io.Copy(zw, r)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
if err := zw.Close(); err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
b.SetBytes(compressedSize)
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
r.Reset(uncompressed)
|
||||
zw.Reset(w)
|
||||
io.Copy(zw, r)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompressPg1661(b *testing.B) { benchmarkCompress(b, pg1661) }
|
||||
func BenchmarkCompressDigits(b *testing.B) { benchmarkCompress(b, digits) }
|
||||
func BenchmarkCompressTwain(b *testing.B) { benchmarkCompress(b, twain) }
|
||||
func BenchmarkCompressRand(b *testing.B) { benchmarkCompress(b, random) }
|
||||
451
vendor/github.com/pierrec/lz4/block.go
generated
vendored
451
vendor/github.com/pierrec/lz4/block.go
generated
vendored
@@ -5,188 +5,164 @@ import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
// block represents a frame data block.
|
||||
// Used when compressing or decompressing frame blocks concurrently.
|
||||
type block struct {
|
||||
compressed bool
|
||||
zdata []byte // compressed data
|
||||
data []byte // decompressed data
|
||||
offset int // offset within the data as with block dependency the 64Kb window is prepended to it
|
||||
checksum uint32 // compressed data checksum
|
||||
err error // error while [de]compressing
|
||||
}
|
||||
|
||||
var (
|
||||
// ErrInvalidSource is returned by UncompressBlock when a compressed block is corrupted.
|
||||
ErrInvalidSource = errors.New("lz4: invalid source")
|
||||
// ErrShortBuffer is returned by UncompressBlock, CompressBlock or CompressBlockHC when
|
||||
// the supplied buffer for [de]compression is too small.
|
||||
ErrShortBuffer = errors.New("lz4: short buffer")
|
||||
// ErrInvalidSourceShortBuffer is returned by UncompressBlock or CompressBLock when a compressed
|
||||
// block is corrupted or the destination buffer is not large enough for the uncompressed data.
|
||||
ErrInvalidSourceShortBuffer = errors.New("lz4: invalid source or destination buffer too short")
|
||||
// ErrInvalid is returned when reading an invalid LZ4 archive.
|
||||
ErrInvalid = errors.New("lz4: bad magic number")
|
||||
)
|
||||
|
||||
// blockHash hashes 4 bytes into a value < winSize.
|
||||
func blockHash(x uint32) uint32 {
|
||||
const hasher uint32 = 2654435761 // Knuth multiplicative hash.
|
||||
return x * hasher >> hashShift
|
||||
}
|
||||
|
||||
// CompressBlockBound returns the maximum size of a given buffer of size n, when not compressible.
|
||||
func CompressBlockBound(n int) int {
|
||||
return n + n/255 + 16
|
||||
}
|
||||
|
||||
// UncompressBlock decompresses the source buffer into the destination one,
|
||||
// starting at the di index and returning the decompressed size.
|
||||
// UncompressBlock uncompresses the source buffer into the destination one,
|
||||
// and returns the uncompressed size.
|
||||
//
|
||||
// The destination buffer must be sized appropriately.
|
||||
//
|
||||
// An error is returned if the source data is invalid or the destination buffer is too small.
|
||||
func UncompressBlock(src, dst []byte, di int) (int, error) {
|
||||
si, sn, di0 := 0, len(src), di
|
||||
func UncompressBlock(src, dst []byte) (si int, err error) {
|
||||
defer func() {
|
||||
// It is now faster to let the runtime panic and recover on out of bound slice access
|
||||
// than checking indices as we go along.
|
||||
if recover() != nil {
|
||||
err = ErrInvalidSourceShortBuffer
|
||||
}
|
||||
}()
|
||||
sn := len(src)
|
||||
if sn == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
var di int
|
||||
|
||||
for {
|
||||
// literals and match lengths (token)
|
||||
lLen := int(src[si] >> 4)
|
||||
mLen := int(src[si] & 0xF)
|
||||
if si++; si == sn {
|
||||
return di, ErrInvalidSource
|
||||
}
|
||||
// Literals and match lengths (token).
|
||||
b := int(src[si])
|
||||
si++
|
||||
|
||||
// literals
|
||||
if lLen > 0 {
|
||||
// Literals.
|
||||
if lLen := b >> 4; lLen > 0 {
|
||||
if lLen == 0xF {
|
||||
for src[si] == 0xFF {
|
||||
lLen += 0xFF
|
||||
if si++; si == sn {
|
||||
return di - di0, ErrInvalidSource
|
||||
}
|
||||
si++
|
||||
}
|
||||
lLen += int(src[si])
|
||||
if si++; si == sn {
|
||||
return di - di0, ErrInvalidSource
|
||||
}
|
||||
si++
|
||||
}
|
||||
if len(dst)-di < lLen || si+lLen > sn {
|
||||
return di - di0, ErrShortBuffer
|
||||
}
|
||||
di += copy(dst[di:], src[si:si+lLen])
|
||||
i := si
|
||||
si += lLen
|
||||
di += copy(dst[di:], src[i:si])
|
||||
|
||||
if si += lLen; si >= sn {
|
||||
return di - di0, nil
|
||||
if si >= sn {
|
||||
return di, nil
|
||||
}
|
||||
}
|
||||
|
||||
if si += 2; si >= sn {
|
||||
return di, ErrInvalidSource
|
||||
}
|
||||
offset := int(src[si-2]) | int(src[si-1])<<8
|
||||
if di-offset < 0 || offset == 0 {
|
||||
return di - di0, ErrInvalidSource
|
||||
}
|
||||
si++
|
||||
_ = src[si] // Bound check elimination.
|
||||
offset := int(src[si-1]) | int(src[si])<<8
|
||||
si++
|
||||
|
||||
// match
|
||||
// Match.
|
||||
mLen := b & 0xF
|
||||
if mLen == 0xF {
|
||||
for src[si] == 0xFF {
|
||||
mLen += 0xFF
|
||||
if si++; si == sn {
|
||||
return di - di0, ErrInvalidSource
|
||||
}
|
||||
si++
|
||||
}
|
||||
mLen += int(src[si])
|
||||
if si++; si == sn {
|
||||
return di - di0, ErrInvalidSource
|
||||
}
|
||||
}
|
||||
// minimum match length is 4
|
||||
mLen += 4
|
||||
if len(dst)-di <= mLen {
|
||||
return di - di0, ErrShortBuffer
|
||||
si++
|
||||
}
|
||||
mLen += minMatch
|
||||
|
||||
// copy the match (NB. match is at least 4 bytes long)
|
||||
if mLen >= offset {
|
||||
// Copy the match.
|
||||
i := di - offset
|
||||
if offset > 0 && mLen >= offset {
|
||||
// Efficiently copy the match dst[di-offset:di] into the dst slice.
|
||||
bytesToCopy := offset * (mLen / offset)
|
||||
// Efficiently copy the match dst[di-offset:di] into the slice
|
||||
// dst[di:di+bytesToCopy]
|
||||
expanded := dst[di-offset : di+bytesToCopy]
|
||||
n := offset
|
||||
for n <= bytesToCopy+offset {
|
||||
expanded := dst[i:]
|
||||
for n := offset; n <= bytesToCopy+offset; n *= 2 {
|
||||
copy(expanded[n:], expanded[:n])
|
||||
n *= 2
|
||||
}
|
||||
di += bytesToCopy
|
||||
mLen -= bytesToCopy
|
||||
}
|
||||
|
||||
di += copy(dst[di:], dst[di-offset:di-offset+mLen])
|
||||
di += copy(dst[di:], dst[i:i+mLen])
|
||||
}
|
||||
}
|
||||
|
||||
// CompressBlock compresses the source buffer starting at soffet into the destination one.
|
||||
// CompressBlock compresses the source buffer into the destination one.
|
||||
// This is the fast version of LZ4 compression and also the default one.
|
||||
// The size of hashTable must be at least 64Kb.
|
||||
//
|
||||
// The size of the compressed data is returned. If it is 0 and no error, then the data is incompressible.
|
||||
//
|
||||
// An error is returned if the destination buffer is too small.
|
||||
func CompressBlock(src, dst []byte, soffset int) (int, error) {
|
||||
func CompressBlock(src, dst []byte, hashTable []int) (di int, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
err = ErrInvalidSourceShortBuffer
|
||||
}
|
||||
}()
|
||||
|
||||
sn, dn := len(src)-mfLimit, len(dst)
|
||||
if sn <= 0 || dn == 0 || soffset >= sn {
|
||||
if sn <= 0 || dn == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
var si, di int
|
||||
var si int
|
||||
|
||||
// fast scan strategy:
|
||||
// we only need a hash table to store the last sequences (4 bytes)
|
||||
var hashTable [1 << hashLog]int
|
||||
var hashShift = uint((minMatch * 8) - hashLog)
|
||||
// Fast scan strategy: the hash table only stores the last 4 bytes sequences.
|
||||
// const accInit = 1 << skipStrength
|
||||
|
||||
// Initialise the hash table with the first 64Kb of the input buffer
|
||||
// (used when compressing dependent blocks)
|
||||
for si < soffset {
|
||||
h := binary.LittleEndian.Uint32(src[si:]) * hasher >> hashShift
|
||||
si++
|
||||
anchor := si // Position of the current literals.
|
||||
// acc := accInit // Variable step: improves performance on non-compressible data.
|
||||
|
||||
for si < sn {
|
||||
// Hash the next 4 bytes (sequence)...
|
||||
match := binary.LittleEndian.Uint32(src[si:])
|
||||
h := blockHash(match)
|
||||
|
||||
ref := hashTable[h]
|
||||
hashTable[h] = si
|
||||
}
|
||||
|
||||
anchor := si
|
||||
fma := 1 << skipStrength
|
||||
for si < sn-minMatch {
|
||||
// hash the next 4 bytes (sequence)...
|
||||
h := binary.LittleEndian.Uint32(src[si:]) * hasher >> hashShift
|
||||
// -1 to separate existing entries from new ones
|
||||
ref := hashTable[h] - 1
|
||||
// ...and store the position of the hash in the hash table (+1 to compensate the -1 upon saving)
|
||||
hashTable[h] = si + 1
|
||||
// no need to check the last 3 bytes in the first literal 4 bytes as
|
||||
// this guarantees that the next match, if any, is compressed with
|
||||
// a lower size, since to have some compression we must have:
|
||||
// ll+ml-overlap > 1 + (ll-15)/255 + (ml-4-15)/255 + 2 (uncompressed size>compressed size)
|
||||
// => ll+ml>3+2*overlap => ll+ml>= 4+2*overlap
|
||||
// and by definition we do have:
|
||||
// ll >= 1, ml >= 4
|
||||
// => ll+ml >= 5
|
||||
// => so overlap must be 0
|
||||
|
||||
// the sequence is new, out of bound (64kb) or not valid: try next sequence
|
||||
if ref < 0 || fma&(1<<skipStrength-1) < 4 ||
|
||||
(si-ref)>>winSizeLog > 0 ||
|
||||
src[ref] != src[si] ||
|
||||
src[ref+1] != src[si+1] ||
|
||||
src[ref+2] != src[si+2] ||
|
||||
src[ref+3] != src[si+3] {
|
||||
// variable step: improves performance on non-compressible data
|
||||
si += fma >> skipStrength
|
||||
fma++
|
||||
if ref >= sn { // Invalid reference (dirty hashtable).
|
||||
si++
|
||||
continue
|
||||
}
|
||||
// match found
|
||||
fma = 1 << skipStrength
|
||||
lLen := si - anchor
|
||||
offset := si - ref
|
||||
if offset <= 0 || offset >= winSize || // Out of window.
|
||||
match != binary.LittleEndian.Uint32(src[ref:]) { // Hash collision on different matches.
|
||||
// si += acc >> skipStrength
|
||||
// acc++
|
||||
si++
|
||||
continue
|
||||
}
|
||||
|
||||
// encode match length part 1
|
||||
// Match found.
|
||||
// acc = accInit
|
||||
lLen := si - anchor // Literal length.
|
||||
|
||||
// Encode match length part 1.
|
||||
si += minMatch
|
||||
mLen := si // match length has minMatch already
|
||||
for si <= sn && src[si] == src[si-offset] {
|
||||
mLen := si // Match length has minMatch already.
|
||||
// Find the longest match, first looking by batches of 8 bytes.
|
||||
for si < sn && binary.LittleEndian.Uint64(src[si:]) == binary.LittleEndian.Uint64(src[si-offset:]) {
|
||||
si += 8
|
||||
}
|
||||
// Then byte by byte.
|
||||
for si < sn && src[si] == src[si-offset] {
|
||||
si++
|
||||
}
|
||||
|
||||
mLen = si - mLen
|
||||
if mLen < 0xF {
|
||||
dst[di] = byte(mLen)
|
||||
@@ -194,169 +170,160 @@ func CompressBlock(src, dst []byte, soffset int) (int, error) {
|
||||
dst[di] = 0xF
|
||||
}
|
||||
|
||||
// encode literals length
|
||||
// Encode literals length.
|
||||
if lLen < 0xF {
|
||||
dst[di] |= byte(lLen << 4)
|
||||
} else {
|
||||
dst[di] |= 0xF0
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
l := lLen - 0xF
|
||||
for ; l >= 0xFF; l -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(l)
|
||||
}
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
|
||||
// literals
|
||||
if di+lLen >= dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di += copy(dst[di:], src[anchor:anchor+lLen])
|
||||
// Literals.
|
||||
copy(dst[di:], src[anchor:anchor+lLen])
|
||||
di += lLen + 2
|
||||
anchor = si
|
||||
|
||||
// encode offset
|
||||
if di += 2; di >= dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
// Encode offset.
|
||||
_ = dst[di] // Bound check elimination.
|
||||
dst[di-2], dst[di-1] = byte(offset), byte(offset>>8)
|
||||
|
||||
// encode match length part 2
|
||||
// Encode match length part 2.
|
||||
if mLen >= 0xF {
|
||||
for mLen -= 0xF; mLen >= 0xFF; mLen -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(mLen)
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
}
|
||||
|
||||
if anchor == 0 {
|
||||
// incompressible
|
||||
// Incompressible.
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// last literals
|
||||
// Last literals.
|
||||
lLen := len(src) - anchor
|
||||
if lLen < 0xF {
|
||||
dst[di] = byte(lLen << 4)
|
||||
} else {
|
||||
dst[di] = 0xF0
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
lLen -= 0xF
|
||||
for ; lLen >= 0xFF; lLen -= 0xFF {
|
||||
di++
|
||||
for lLen -= 0xF; lLen >= 0xFF; lLen -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(lLen)
|
||||
}
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
|
||||
// write literals
|
||||
src = src[anchor:]
|
||||
switch n := di + len(src); {
|
||||
case n > dn:
|
||||
return di, ErrShortBuffer
|
||||
case n >= sn:
|
||||
// incompressible
|
||||
// Write the last literals.
|
||||
if di >= anchor {
|
||||
// Incompressible.
|
||||
return 0, nil
|
||||
}
|
||||
di += copy(dst[di:], src)
|
||||
di += copy(dst[di:], src[anchor:])
|
||||
return di, nil
|
||||
}
|
||||
|
||||
// CompressBlockHC compresses the source buffer starting at soffet into the destination one.
|
||||
// CompressBlockHC compresses the source buffer src into the destination dst
|
||||
// with max search depth (use 0 or negative value for no max).
|
||||
//
|
||||
// CompressBlockHC compression ratio is better than CompressBlock but it is also slower.
|
||||
//
|
||||
// The size of the compressed data is returned. If it is 0 and no error, then the data is not compressible.
|
||||
//
|
||||
// An error is returned if the destination buffer is too small.
|
||||
func CompressBlockHC(src, dst []byte, soffset int) (int, error) {
|
||||
func CompressBlockHC(src, dst []byte, depth int) (di int, err error) {
|
||||
defer func() {
|
||||
if recover() != nil {
|
||||
err = ErrInvalidSourceShortBuffer
|
||||
}
|
||||
}()
|
||||
|
||||
sn, dn := len(src)-mfLimit, len(dst)
|
||||
if sn <= 0 || dn == 0 || soffset >= sn {
|
||||
if sn <= 0 || dn == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
var si, di int
|
||||
var si int
|
||||
|
||||
// Hash Chain strategy:
|
||||
// we need a hash table and a chain table
|
||||
// the chain table cannot contain more entries than the window size (64Kb entries)
|
||||
var hashTable [1 << hashLog]int
|
||||
var chainTable [winSize]int
|
||||
var hashShift = uint((minMatch * 8) - hashLog)
|
||||
// hashTable: stores the last position found for a given hash
|
||||
// chaingTable: stores previous positions for a given hash
|
||||
var hashTable, chainTable [winSize]int
|
||||
|
||||
// Initialise the hash table with the first 64Kb of the input buffer
|
||||
// (used when compressing dependent blocks)
|
||||
for si < soffset {
|
||||
h := binary.LittleEndian.Uint32(src[si:]) * hasher >> hashShift
|
||||
chainTable[si&winMask] = hashTable[h]
|
||||
si++
|
||||
hashTable[h] = si
|
||||
if depth <= 0 {
|
||||
depth = winSize
|
||||
}
|
||||
|
||||
anchor := si
|
||||
for si < sn-minMatch {
|
||||
// hash the next 4 bytes (sequence)...
|
||||
h := binary.LittleEndian.Uint32(src[si:]) * hasher >> hashShift
|
||||
for si < sn {
|
||||
// Hash the next 4 bytes (sequence).
|
||||
match := binary.LittleEndian.Uint32(src[si:])
|
||||
h := blockHash(match)
|
||||
|
||||
// follow the chain until out of window and give the longest match
|
||||
// Follow the chain until out of window and give the longest match.
|
||||
mLen := 0
|
||||
offset := 0
|
||||
for next := hashTable[h] - 1; next > 0 && next > si-winSize; next = chainTable[next&winMask] - 1 {
|
||||
// the first (mLen==0) or next byte (mLen>=minMatch) at current match length must match to improve on the match length
|
||||
if src[next+mLen] == src[si+mLen] {
|
||||
for ml := 0; ; ml++ {
|
||||
if src[next+ml] != src[si+ml] || si+ml > sn {
|
||||
// found a longer match, keep its position and length
|
||||
if mLen < ml && ml >= minMatch {
|
||||
mLen = ml
|
||||
offset = si - next
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
for next, try := hashTable[h], depth; try > 0 && next > 0 && si-next < winSize; next = chainTable[next&winMask] {
|
||||
// The first (mLen==0) or next byte (mLen>=minMatch) at current match length
|
||||
// must match to improve on the match length.
|
||||
if src[next+mLen] != src[si+mLen] {
|
||||
continue
|
||||
}
|
||||
ml := 0
|
||||
// Compare the current position with a previous with the same hash.
|
||||
for ml < sn-si && binary.LittleEndian.Uint64(src[next+ml:]) == binary.LittleEndian.Uint64(src[si+ml:]) {
|
||||
ml += 8
|
||||
}
|
||||
for ml < sn-si && src[next+ml] == src[si+ml] {
|
||||
ml++
|
||||
}
|
||||
if ml+1 < minMatch || ml <= mLen {
|
||||
// Match too small (<minMath) or smaller than the current match.
|
||||
continue
|
||||
}
|
||||
// Found a longer match, keep its position and length.
|
||||
mLen = ml
|
||||
offset = si - next
|
||||
// Try another previous position with the same hash.
|
||||
try--
|
||||
}
|
||||
chainTable[si&winMask] = hashTable[h]
|
||||
hashTable[h] = si + 1
|
||||
hashTable[h] = si
|
||||
|
||||
// no match found
|
||||
// No match found.
|
||||
if mLen == 0 {
|
||||
si++
|
||||
continue
|
||||
}
|
||||
|
||||
// match found
|
||||
// update hash/chain tables with overlaping bytes:
|
||||
// si already hashed, add everything from si+1 up to the match length
|
||||
for si, ml := si+1, si+mLen; si < ml; {
|
||||
h := binary.LittleEndian.Uint32(src[si:]) * hasher >> hashShift
|
||||
// Match found.
|
||||
// Update hash/chain tables with overlapping bytes:
|
||||
// si already hashed, add everything from si+1 up to the match length.
|
||||
winStart := si + 1
|
||||
if ws := si + mLen - winSize; ws > winStart {
|
||||
winStart = ws
|
||||
}
|
||||
for si, ml := winStart, si+mLen; si < ml; {
|
||||
match >>= 8
|
||||
match |= uint32(src[si+3]) << 24
|
||||
h := blockHash(match)
|
||||
chainTable[si&winMask] = hashTable[h]
|
||||
si++
|
||||
hashTable[h] = si
|
||||
si++
|
||||
}
|
||||
|
||||
lLen := si - anchor
|
||||
si += mLen
|
||||
mLen -= minMatch // match length does not include minMatch
|
||||
mLen -= minMatch // Match length does not include minMatch.
|
||||
|
||||
if mLen < 0xF {
|
||||
dst[di] = byte(mLen)
|
||||
@@ -364,91 +331,67 @@ func CompressBlockHC(src, dst []byte, soffset int) (int, error) {
|
||||
dst[di] = 0xF
|
||||
}
|
||||
|
||||
// encode literals length
|
||||
// Encode literals length.
|
||||
if lLen < 0xF {
|
||||
dst[di] |= byte(lLen << 4)
|
||||
} else {
|
||||
dst[di] |= 0xF0
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
l := lLen - 0xF
|
||||
for ; l >= 0xFF; l -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(l)
|
||||
}
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
|
||||
// literals
|
||||
if di+lLen >= dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di += copy(dst[di:], src[anchor:anchor+lLen])
|
||||
// Literals.
|
||||
copy(dst[di:], src[anchor:anchor+lLen])
|
||||
di += lLen
|
||||
anchor = si
|
||||
|
||||
// encode offset
|
||||
if di += 2; di >= dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
// Encode offset.
|
||||
di += 2
|
||||
dst[di-2], dst[di-1] = byte(offset), byte(offset>>8)
|
||||
|
||||
// encode match length part 2
|
||||
// Encode match length part 2.
|
||||
if mLen >= 0xF {
|
||||
for mLen -= 0xF; mLen >= 0xFF; mLen -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(mLen)
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
}
|
||||
|
||||
if anchor == 0 {
|
||||
// incompressible
|
||||
// Incompressible.
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// last literals
|
||||
// Last literals.
|
||||
lLen := len(src) - anchor
|
||||
if lLen < 0xF {
|
||||
dst[di] = byte(lLen << 4)
|
||||
} else {
|
||||
dst[di] = 0xF0
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
lLen -= 0xF
|
||||
for ; lLen >= 0xFF; lLen -= 0xFF {
|
||||
dst[di] = 0xFF
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
}
|
||||
dst[di] = byte(lLen)
|
||||
}
|
||||
if di++; di == dn {
|
||||
return di, ErrShortBuffer
|
||||
}
|
||||
di++
|
||||
|
||||
// write literals
|
||||
src = src[anchor:]
|
||||
switch n := di + len(src); {
|
||||
case n > dn:
|
||||
return di, ErrShortBuffer
|
||||
case n >= sn:
|
||||
// incompressible
|
||||
// Write the last literals.
|
||||
if di >= anchor {
|
||||
// Incompressible.
|
||||
return 0, nil
|
||||
}
|
||||
di += copy(dst[di:], src)
|
||||
di += copy(dst[di:], src[anchor:])
|
||||
return di, nil
|
||||
}
|
||||
|
||||
98
vendor/github.com/pierrec/lz4/block_test.go
generated
vendored
Normal file
98
vendor/github.com/pierrec/lz4/block_test.go
generated
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
//+build go1.9
|
||||
|
||||
package lz4_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/pierrec/lz4"
|
||||
)
|
||||
|
||||
type testcase struct {
|
||||
file string
|
||||
compressible bool
|
||||
src []byte
|
||||
}
|
||||
|
||||
var rawFiles = []testcase{
|
||||
// {"testdata/207326ba-36f8-11e7-954a-aca46ba8ca73.png", true, nil},
|
||||
{"testdata/e.txt", true, nil},
|
||||
{"testdata/gettysburg.txt", true, nil},
|
||||
{"testdata/Mark.Twain-Tom.Sawyer.txt", true, nil},
|
||||
{"testdata/pg1661.txt", true, nil},
|
||||
{"testdata/pi.txt", true, nil},
|
||||
{"testdata/random.data", false, nil},
|
||||
{"testdata/repeat.txt", true, nil},
|
||||
}
|
||||
|
||||
func TestCompressUncompressBlock(t *testing.T) {
|
||||
type compressor func(s, d []byte) (int, error)
|
||||
|
||||
run := func(tc testcase, compress compressor) int {
|
||||
t.Helper()
|
||||
src := tc.src
|
||||
|
||||
// Compress the data.
|
||||
zbuf := make([]byte, lz4.CompressBlockBound(len(src)))
|
||||
n, err := compress(src, zbuf)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return 0
|
||||
}
|
||||
zbuf = zbuf[:n]
|
||||
|
||||
// Make sure that it was actually compressed unless not compressible.
|
||||
if !tc.compressible {
|
||||
return 0
|
||||
}
|
||||
|
||||
if n == 0 || n >= len(src) {
|
||||
t.Errorf("data not compressed: %d/%d", n, len(src))
|
||||
return 0
|
||||
}
|
||||
|
||||
// Uncompress the data.
|
||||
buf := make([]byte, len(src))
|
||||
n, err = lz4.UncompressBlock(zbuf, buf)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
buf = buf[:n]
|
||||
if !reflect.DeepEqual(src, buf) {
|
||||
t.Error("uncompressed compressed data not matching initial input")
|
||||
return 0
|
||||
}
|
||||
|
||||
return len(zbuf)
|
||||
}
|
||||
|
||||
for _, tc := range rawFiles {
|
||||
src, err := ioutil.ReadFile(tc.file)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
tc.src = src
|
||||
|
||||
var n, nhc int
|
||||
t.Run("", func(t *testing.T) {
|
||||
tc := tc
|
||||
t.Run(tc.file, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
n = run(tc, func(src, dst []byte) (int, error) {
|
||||
var ht [1 << 16]int
|
||||
return lz4.CompressBlock(src, dst, ht[:])
|
||||
})
|
||||
})
|
||||
t.Run(fmt.Sprintf("%s HC", tc.file), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
nhc = run(tc, func(src, dst []byte) (int, error) {
|
||||
return lz4.CompressBlockHC(src, dst, -1)
|
||||
})
|
||||
})
|
||||
})
|
||||
fmt.Printf("%-40s: %8d / %8d / %8d\n", tc.file, n, nhc, len(src))
|
||||
}
|
||||
}
|
||||
21
vendor/github.com/pierrec/lz4/debug.go
generated
vendored
Normal file
21
vendor/github.com/pierrec/lz4/debug.go
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
// +build lz4debug
|
||||
|
||||
package lz4
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func debug(args ...interface{}) {
|
||||
_, file, line, _ := runtime.Caller(1)
|
||||
file = filepath.Base(file)
|
||||
|
||||
f := fmt.Sprintf("LZ4: %s:%d %s", file, line, args[0])
|
||||
if f[len(f)-1] != '\n' {
|
||||
f += "\n"
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, f, args[1:]...)
|
||||
}
|
||||
5
vendor/github.com/pierrec/lz4/debug_stub.go
generated
vendored
Normal file
5
vendor/github.com/pierrec/lz4/debug_stub.go
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
// +build !lz4debug
|
||||
|
||||
package lz4
|
||||
|
||||
func debug(args ...interface{}) {}
|
||||
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/0519d86e62cc577b98e9a4836b071ba1692c7674-30
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/0519d86e62cc577b98e9a4836b071ba1692c7674-30
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/0608f9eba5e6fd4d70241a81a6950ca51d78eb64-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/0608f9eba5e6fd4d70241a81a6950ca51d78eb64-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/17871030a73ac4d12ada652948135cb4639d679c-34
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/17871030a73ac4d12ada652948135cb4639d679c-34
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1971e6ed6c6f6069fc2a9ed3038101e89bbcc381-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1971e6ed6c6f6069fc2a9ed3038101e89bbcc381-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1a58f02dc83ac8315a85babdea6d757cbff2bb03-30
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1a58f02dc83ac8315a85babdea6d757cbff2bb03-30
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1a5a08b67764facaad851b9f1cbc5cfb31b7fb56-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1a5a08b67764facaad851b9f1cbc5cfb31b7fb56-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1c944d5065b1a2b30e412604a14aa52565a5765b-35
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/1c944d5065b1a2b30e412604a14aa52565a5765b-35
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/2065ba3177c7dc5047742faa7158b3faeaac1f3c-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/2065ba3177c7dc5047742faa7158b3faeaac1f3c-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/21c8be1bb9eeea5b141500dee4987ab7fbd40d4a-23
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/21c8be1bb9eeea5b141500dee4987ab7fbd40d4a-23
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/31c6c22708d346ed9e936fa7e77c8d9ab6da8d1e-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/31c6c22708d346ed9e936fa7e77c8d9ab6da8d1e-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/344d38ec2ec90cb617e809439938b4cbf3b11f02-10
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/344d38ec2ec90cb617e809439938b4cbf3b11f02-10
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/352631eab692c4a2c378b231fb3407ebcc0c3039-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/352631eab692c4a2c378b231fb3407ebcc0c3039-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/396146e06d3a4b2468d080f89ab5862348073424-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/396146e06d3a4b2468d080f89ab5862348073424-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/3b6fd6da48bb34284390a75e22940e7234dbbd28-34
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/3b6fd6da48bb34284390a75e22940e7234dbbd28-34
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4114fd99aaa4dc95365dc4bbcb3c9a8a03434a5a-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4114fd99aaa4dc95365dc4bbcb3c9a8a03434a5a-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4131f155339a3476898088b065b8588a2b28278e-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4131f155339a3476898088b065b8588a2b28278e-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/42544ff3318fe86dd466e9a05068e752a1057fcc-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/42544ff3318fe86dd466e9a05068e752a1057fcc-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4a14a3883f5c8819405319e8fb96234f5746a0ef-22
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/4a14a3883f5c8819405319e8fb96234f5746a0ef-22
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/51075c34f23d161fb97edcf6f1b73ee6005009a0-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/51075c34f23d161fb97edcf6f1b73ee6005009a0-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/517d39f406222f0a0121b7a1961953204674c251-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/517d39f406222f0a0121b7a1961953204674c251-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/5e19e298d051aac48b7683dc24577b46268b630c-35
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/5e19e298d051aac48b7683dc24577b46268b630c-35
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/5f946423d1138924933334c6e5d3eb13e1020e9c-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/5f946423d1138924933334c6e5d3eb13e1020e9c-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/633df0cd78621cd45067a58d23c6ed67bb1b60cb-31
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/633df0cd78621cd45067a58d23c6ed67bb1b60cb-31
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/66c34847568ac9cb3ccbb8be26f494988a3e0628-7
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/66c34847568ac9cb3ccbb8be26f494988a3e0628-7
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/67534dbd68040fb9a8867e6af384d33ea323758b-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/67534dbd68040fb9a8867e6af384d33ea323758b-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/68612136c2017f9caf87122155f82a25f57c2d2a-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/68612136c2017f9caf87122155f82a25f57c2d2a-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/6981397d97c481e39d563d43916377fb3c74c60e-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/6981397d97c481e39d563d43916377fb3c74c60e-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/69c2accb74456005e2a9bbef15ccad3d076f2124-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/69c2accb74456005e2a9bbef15ccad3d076f2124-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/69fcd886042d5c3ebe89afd561782ac25619e35b-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/69fcd886042d5c3ebe89afd561782ac25619e35b-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/6b72fdd9989971ecc3b50c34ee420f56a03e1026-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/6b72fdd9989971ecc3b50c34ee420f56a03e1026-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/72c738d7492d3055c6fe7391198422984b9e4702-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/72c738d7492d3055c6fe7391198422984b9e4702-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/764571571e4d46f4397ed534d0160718ce578da4-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/764571571e4d46f4397ed534d0160718ce578da4-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/78e59daada9b9be755d1b508dd392fa9fc6fa9c2-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/78e59daada9b9be755d1b508dd392fa9fc6fa9c2-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/78ef686662a059f053f80c1c63c2921deff073fb-31
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/78ef686662a059f053f80c1c63c2921deff073fb-31
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7a0fc8dacceae32a59589711dce63800085c22c7-23
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7a0fc8dacceae32a59589711dce63800085c22c7-23
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7b919213d591e6ce4355c635dc1ecc0d8e78befe-30
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7b919213d591e6ce4355c635dc1ecc0d8e78befe-30
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7f8c3b163798c8d5e1b65e03f411b56b6c9384bb-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/7f8c3b163798c8d5e1b65e03f411b56b6c9384bb-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/82a499521f34b6a9aff3b71d5f8bfd358933a4b2-36
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/82a499521f34b6a9aff3b71d5f8bfd358933a4b2-36
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/82c627991d65c5c4e88c9ccac39be082cca40765-24
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/82c627991d65c5c4e88c9ccac39be082cca40765-24
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/8435aa58e67c4de798375b44c11bffa5b680f615-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/8435aa58e67c4de798375b44c11bffa5b680f615-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/87caf7737ebb025ec2d908224818ceb2bc76b658-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/87caf7737ebb025ec2d908224818ceb2bc76b658-28
generated
vendored
Normal file
Binary file not shown.
1
vendor/github.com/pierrec/lz4/fuzz/corpus/8d70b7de160bbef22ab46f798d687a69dbda772c-5
generated
vendored
Normal file
1
vendor/github.com/pierrec/lz4/fuzz/corpus/8d70b7de160bbef22ab46f798d687a69dbda772c-5
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
"MbT
|
||||
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/8f4788d30edd22ebcfef0e52bbf9e8c3d1e8d7e9-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/8f4788d30edd22ebcfef0e52bbf9e8c3d1e8d7e9-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/92d41e4fca52311e848fac274144b6293d9260f7-34
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/92d41e4fca52311e848fac274144b6293d9260f7-34
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/955c823909722e2693dd7cea3eadc17833dddf86-24
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/955c823909722e2693dd7cea3eadc17833dddf86-24
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/95ca8da5556065f33b46c2c8186c2f1cebb1b5da-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/95ca8da5556065f33b46c2c8186c2f1cebb1b5da-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/995d50f1cb750cbf038246d6cb0cf8db11d7e60e-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/995d50f1cb750cbf038246d6cb0cf8db11d7e60e-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/9a5ab6c72a445b3b27129004d2a1a417cd4d8440-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/9a5ab6c72a445b3b27129004d2a1a417cd4d8440-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/9e160ae007fc11092a3fd877ebe706c4d841db49-19
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/9e160ae007fc11092a3fd877ebe706c4d841db49-19
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/a97d9bf241e8ec73f99205b32c24fcd64194f0b9-8
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/a97d9bf241e8ec73f99205b32c24fcd64194f0b9-8
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b53101ec4348e9c329c13e22790ffde246743030-35
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b53101ec4348e9c329c13e22790ffde246743030-35
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b58429fd1107617191026029cf327b2ebed963bb-18
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b58429fd1107617191026029cf327b2ebed963bb-18
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b92c70d3f12e67c69ba5db9ad491b7a4e075ece8-7
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/b92c70d3f12e67c69ba5db9ad491b7a4e075ece8-7
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/bc3ac4aae07cba8d7f657a8739d1774e44bde613-31
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/bc3ac4aae07cba8d7f657a8739d1774e44bde613-31
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/bdc123d9da19a7ae0ff87ca0741002fbd8bb2cca-34
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/bdc123d9da19a7ae0ff87ca0741002fbd8bb2cca-34
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c1972d0c898848e6188b69bcdbb7d14fcc780ee5-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c1972d0c898848e6188b69bcdbb7d14fcc780ee5-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c42ae63ab9584753959f4692cef9fd8513b54691-30
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c42ae63ab9584753959f4692cef9fd8513b54691-30
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c8b01a7ea9c1b84e4ee5eb68121c64f183e7ea10-9
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/c8b01a7ea9c1b84e4ee5eb68121c64f183e7ea10-9
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/cb1314cc880a1a389cedf5c16cc4b8ad505b4506-23
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/cb1314cc880a1a389cedf5c16cc4b8ad505b4506-23
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ceb22e7f581d85ed876e3d61da7df65da8954bf2-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ceb22e7f581d85ed876e3d61da7df65da8954bf2-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/d8873ec9a0344ea23f70d1ffd78c2fd0435b9885-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/d8873ec9a0344ea23f70d1ffd78c2fd0435b9885-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/da3418e70658be491531ef6524f6ef7984ff9e96-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/da3418e70658be491531ef6524f6ef7984ff9e96-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/daffc68f738bd5945de9c7babd4e01cc4438fae8-31
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/daffc68f738bd5945de9c7babd4e01cc4438fae8-31
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/df5bd5044e9b74c648b5f5fcb4dbdf953175f9f9-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/df5bd5044e9b74c648b5f5fcb4dbdf953175f9f9-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e22a5ac115e8bfd3468c9e6ad73ea11b8743798a-30
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e22a5ac115e8bfd3468c9e6ad73ea11b8743798a-30
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e544de8de59a005934dd4b7fd465c5bb0046482e-26
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e544de8de59a005934dd4b7fd465c5bb0046482e-26
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e7f55f4c85203100c3cd819cdc87abb0e9e86374-32
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/e7f55f4c85203100c3cd819cdc87abb0e9e86374-32
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ea83e3b78398628e8a85e2e618fa956c0ffbd733-35
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ea83e3b78398628e8a85e2e618fa956c0ffbd733-35
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/eb967d9cb0407c2328bbdbf98b5602274452d900-23
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/eb967d9cb0407c2328bbdbf98b5602274452d900-23
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ec93fb54ce508e132c89b6637913f84c3c78bafd-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/ec93fb54ce508e132c89b6637913f84c3c78bafd-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/efd3db86b12d209db7f0b24281a2cccebff526cd-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/efd3db86b12d209db7f0b24281a2cccebff526cd-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f31dcf6e3044e050f2396b601ebe420e89749c07-27
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f31dcf6e3044e050f2396b601ebe420e89749c07-27
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f3f49f3016c41052be090544cf110c322bc7ef63-24
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f3f49f3016c41052be090544cf110c322bc7ef63-24
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f4003ca01b90a4ee1be5701a5dd7d5f04e00c8f8-28
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f4003ca01b90a4ee1be5701a5dd7d5f04e00c8f8-28
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f5ecb47dfd92bb0564588beefd03ffcb0bbdae54-29
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f5ecb47dfd92bb0564588beefd03ffcb0bbdae54-29
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f9bcd3660c355799a865fedd15cb27a18591f244-33
generated
vendored
Normal file
BIN
vendor/github.com/pierrec/lz4/fuzz/corpus/f9bcd3660c355799a865fedd15cb27a18591f244-33
generated
vendored
Normal file
Binary file not shown.
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/0b8f7fcd1f53d5bd839e5728ba92db050f5e0968
generated
vendored
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/0b8f7fcd1f53d5bd839e5728ba92db050f5e0968
generated
vendored
Binary file not shown.
@@ -1,51 +0,0 @@
|
||||
program hanged (timeout 10 seconds)
|
||||
|
||||
SIGABRT: abort
|
||||
PC=0x5e9e2 m=0
|
||||
|
||||
goroutine 1 [running]:
|
||||
github.com/pierrec/lz4.UncompressBlock(0x820282830, 0x6, 0x6, 0x82032e000, 0x10000, 0x10000, 0x0, 0x6, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/block.go:104 +0xec2 fp=0x8202b59d8 sp=0x8202b5900
|
||||
github.com/pierrec/lz4.(*Reader).decompressBlock(0x8203085a0, 0x820290240, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/reader.go:271 +0x189 fp=0x8202b5a48 sp=0x8202b59d8
|
||||
github.com/pierrec/lz4.(*Reader).Read(0x8203085a0, 0x82030b400, 0x200, 0x200, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/reader.go:188 +0x1156 fp=0x8202b5c38 sp=0x8202b5a48
|
||||
bytes.(*Buffer).ReadFrom(0x8202b5d68, 0x882042d260, 0x8203085a0, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/bytes/buffer.go:173 +0x3db fp=0x8202b5ce8 sp=0x8202b5c38
|
||||
io/ioutil.readAll(0x882042d260, 0x8203085a0, 0x200, 0x0, 0x0, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/io/ioutil/ioutil.go:33 +0x1ed fp=0x8202b5de0 sp=0x8202b5ce8
|
||||
io/ioutil.ReadAll(0x882042d260, 0x8203085a0, 0x0, 0x0, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/io/ioutil/ioutil.go:42 +0x80 fp=0x8202b5e28 sp=0x8202b5de0
|
||||
github.com/pierrec/lz4/fuzz.Fuzz(0x8820479000, 0x1b, 0x200000, 0x3)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/fuzz/lz4.go:11 +0x15f fp=0x8202b5ea0 sp=0x8202b5e28
|
||||
github.com/dvyukov/go-fuzz/go-fuzz-dep.Main(0x1a7f18)
|
||||
/Users/pierrecurto/sandbox/src/github.com/dvyukov/go-fuzz/go-fuzz-dep/main.go:47 +0x14c fp=0x8202b5f40 sp=0x8202b5ea0
|
||||
main.main()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/go-fuzz-main/main.go:10 +0x23 fp=0x8202b5f50 sp=0x8202b5f40
|
||||
runtime.main()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/runtime/proc.go:111 +0x2b0 fp=0x8202b5fa0 sp=0x8202b5f50
|
||||
runtime.goexit()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/runtime/asm_amd64.s:1696 +0x1 fp=0x8202b5fa8 sp=0x8202b5fa0
|
||||
|
||||
rax 0x0
|
||||
rbx 0x0
|
||||
rcx 0x0
|
||||
rdx 0x82032e000
|
||||
rdi 0x82032e000
|
||||
rsi 0x82032e000
|
||||
rbp 0x0
|
||||
rsp 0x8202b5900
|
||||
r8 0x10000
|
||||
r9 0x82032e000
|
||||
r10 0x10000
|
||||
r11 0x82032e000
|
||||
r12 0x10000
|
||||
r13 0x10000
|
||||
r14 0x1
|
||||
r15 0x8
|
||||
rip 0x5e9e2
|
||||
rflags 0x206
|
||||
cs 0x2b
|
||||
fs 0x0
|
||||
gs 0x0
|
||||
exit status 2
|
||||
@@ -1,2 +0,0 @@
|
||||
"\x04\"M\x18M@\x00\x00B*M\f\x00'\x01\x06\x00\x00\x00\x00" +
|
||||
"\x00\x00\x16\xe3\x00\x10\x1e"
|
||||
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/169b44c5a64fec4d8e969d25d3e4764c9c3b604b
generated
vendored
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/169b44c5a64fec4d8e969d25d3e4764c9c3b604b
generated
vendored
Binary file not shown.
@@ -1,54 +0,0 @@
|
||||
program hanged (timeout 10 seconds)
|
||||
|
||||
SIGABRT: abort
|
||||
PC=0x5669b m=0
|
||||
|
||||
goroutine 0 [idle]:
|
||||
runtime.mach_semaphore_wait(0x703, 0x7fff5fbff9a8, 0x8202fe401, 0x0, 0x1, 0x238cc0, 0x49b09, 0xffffffffffffffff, 0x600, 0x7fff5fbff90c, ...)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/sys_darwin_amd64.s:407 +0xb
|
||||
runtime.semasleep1(0xffffffffffffffff, 0x600)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/os1_darwin.go:385 +0xe5
|
||||
runtime.semasleep.func1()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/os1_darwin.go:401 +0x29
|
||||
runtime.systemstack(0x7fff5fbff910)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/asm_amd64.s:278 +0xab
|
||||
runtime.semasleep(0xffffffffffffffff, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/os1_darwin.go:402 +0x36
|
||||
runtime.notesleep(0x239110)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/lock_sema.go:169 +0x100
|
||||
runtime.stopm()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/proc1.go:1128 +0x112
|
||||
runtime.findrunnable(0x8202a4000, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/proc1.go:1530 +0x69e
|
||||
runtime.schedule()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/proc1.go:1639 +0x267
|
||||
runtime.park_m(0x82028af00)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/proc1.go:1698 +0x18b
|
||||
runtime.mcall(0x7fff5fbffa90)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build890014890/src/runtime/asm_amd64.s:204 +0x5b
|
||||
|
||||
goroutine 1 [running]:
|
||||
goroutine running on other thread; stack unavailable
|
||||
|
||||
rax 0xe
|
||||
rbx 0x703
|
||||
rcx 0x7fff5fbff898
|
||||
rdx 0x7fff5fbff910
|
||||
rdi 0x703
|
||||
rsi 0x238cc0
|
||||
rbp 0x239000
|
||||
rsp 0x7fff5fbff898
|
||||
r8 0x239000
|
||||
r9 0x8820290330
|
||||
r10 0x25ee08c1e
|
||||
r11 0x286
|
||||
r12 0x0
|
||||
r13 0x6d9e8a1cfd40
|
||||
r14 0x13fde99489843000
|
||||
r15 0x238960
|
||||
rip 0x5669b
|
||||
rflags 0x286
|
||||
cs 0x7
|
||||
fs 0x0
|
||||
gs 0x0
|
||||
exit status 2
|
||||
@@ -1,4 +0,0 @@
|
||||
"\x04\"M\x18na\x84Ƚ\xbf\xef]\x00\x01\x00\x02\x00\x00\x00\x18" +
|
||||
"N\x02funcn\x02\x00\x00\x00\x18n\x02\x00\x00\x00\x18\x00\x02" +
|
||||
"\x00\x00\x00\x18n\x02\x00\x00\x00\x80|\x18n\x00\x18n\x02\x00\x00\x00" +
|
||||
"\x18n\x02\x00\x18n"
|
||||
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/ea0a00651ba4143c05fe7b5c85f69fe16a29a458
generated
vendored
BIN
vendor/github.com/pierrec/lz4/fuzz/crashers/ea0a00651ba4143c05fe7b5c85f69fe16a29a458
generated
vendored
Binary file not shown.
@@ -1,23 +0,0 @@
|
||||
panic: runtime error: slice bounds out of range [recovered]
|
||||
panic: runtime error: slice bounds out of range
|
||||
|
||||
goroutine 1 [running]:
|
||||
io/ioutil.readAll.func1(0x8202b1e10)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/io/ioutil/ioutil.go:30 +0x228
|
||||
github.com/pierrec/lz4.(*Reader).readBlock(0x820312000, 0x820316000, 0x10000, 0x10000, 0x8202900c0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/reader.go:241 +0xc62
|
||||
github.com/pierrec/lz4.(*Reader).Read(0x820312000, 0x820314000, 0x200, 0x200, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/reader.go:178 +0x7a6
|
||||
bytes.(*Buffer).ReadFrom(0x8202b1d68, 0x88204290f0, 0x820312000, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/bytes/buffer.go:173 +0x3db
|
||||
io/ioutil.readAll(0x88204290f0, 0x820312000, 0x200, 0x0, 0x0, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/io/ioutil/ioutil.go:33 +0x1ed
|
||||
io/ioutil.ReadAll(0x88204290f0, 0x820312000, 0x0, 0x0, 0x0, 0x0, 0x0)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/io/ioutil/ioutil.go:42 +0x80
|
||||
github.com/pierrec/lz4/fuzz.Fuzz(0x8820479000, 0x13, 0x200000, 0x8202900b8)
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/github.com/pierrec/lz4/fuzz/lz4.go:11 +0x15f
|
||||
github.com/dvyukov/go-fuzz/go-fuzz-dep.Main(0x1a7f18)
|
||||
/Users/pierrecurto/sandbox/src/github.com/dvyukov/go-fuzz/go-fuzz-dep/main.go:47 +0x14c
|
||||
main.main()
|
||||
/var/folders/bw/wf4p9qr50pg23qb4py4028140000gp/T/go-fuzz-build320605510/src/go-fuzz-main/main.go:10 +0x23
|
||||
exit status 2
|
||||
@@ -1 +0,0 @@
|
||||
"\x04\"M\x18M@\x00\x00B*M\f\x00'\x01000\xe4"
|
||||
BIN
vendor/github.com/pierrec/lz4/fuzz/lz4-fuzz.zip
generated
vendored
BIN
vendor/github.com/pierrec/lz4/fuzz/lz4-fuzz.zip
generated
vendored
Binary file not shown.
@@ -1 +0,0 @@
|
||||
SIGABRT: abort
|
||||
10
vendor/github.com/pierrec/lz4/fuzz/suppressions/d159e91cdd6fcbee9e37460f96c597b70c590886
generated
vendored
10
vendor/github.com/pierrec/lz4/fuzz/suppressions/d159e91cdd6fcbee9e37460f96c597b70c590886
generated
vendored
@@ -1,10 +0,0 @@
|
||||
panic: runtime error: slice bounds out of range [recovered]
|
||||
io/ioutil.readAll.func1
|
||||
github.com/pierrec/lz4.(*Reader).readBlock
|
||||
github.com/pierrec/lz4.(*Reader).Read
|
||||
bytes.(*Buffer).ReadFrom
|
||||
io/ioutil.readAll
|
||||
io/ioutil.ReadAll
|
||||
github.com/pierrec/lz4/fuzz.Fuzz
|
||||
github.com/dvyukov/go-fuzz/go-fuzz-dep.Main
|
||||
main.main
|
||||
222
vendor/github.com/pierrec/lz4/internal/xxh32/xxh32zero.go
generated
vendored
Normal file
222
vendor/github.com/pierrec/lz4/internal/xxh32/xxh32zero.go
generated
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
// Package xxh32 implements the very fast XXH hashing algorithm (32 bits version).
|
||||
// (https://github.com/Cyan4973/XXH/)
|
||||
package xxh32
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
)
|
||||
|
||||
const (
|
||||
prime32_1 uint32 = 2654435761
|
||||
prime32_2 uint32 = 2246822519
|
||||
prime32_3 uint32 = 3266489917
|
||||
prime32_4 uint32 = 668265263
|
||||
prime32_5 uint32 = 374761393
|
||||
|
||||
prime32_1plus2 uint32 = 606290984
|
||||
prime32_minus1 uint32 = 1640531535
|
||||
)
|
||||
|
||||
// XXHZero represents an xxhash32 object with seed 0.
|
||||
type XXHZero struct {
|
||||
v1 uint32
|
||||
v2 uint32
|
||||
v3 uint32
|
||||
v4 uint32
|
||||
totalLen uint64
|
||||
buf [16]byte
|
||||
bufused int
|
||||
}
|
||||
|
||||
// Sum appends the current hash to b and returns the resulting slice.
|
||||
// It does not change the underlying hash state.
|
||||
func (xxh XXHZero) Sum(b []byte) []byte {
|
||||
h32 := xxh.Sum32()
|
||||
return append(b, byte(h32), byte(h32>>8), byte(h32>>16), byte(h32>>24))
|
||||
}
|
||||
|
||||
// Reset resets the Hash to its initial state.
|
||||
func (xxh *XXHZero) Reset() {
|
||||
xxh.v1 = prime32_1plus2
|
||||
xxh.v2 = prime32_2
|
||||
xxh.v3 = 0
|
||||
xxh.v4 = prime32_minus1
|
||||
xxh.totalLen = 0
|
||||
xxh.bufused = 0
|
||||
}
|
||||
|
||||
// Size returns the number of bytes returned by Sum().
|
||||
func (xxh *XXHZero) Size() int {
|
||||
return 4
|
||||
}
|
||||
|
||||
// BlockSize gives the minimum number of bytes accepted by Write().
|
||||
func (xxh *XXHZero) BlockSize() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
// Write adds input bytes to the Hash.
|
||||
// It never returns an error.
|
||||
func (xxh *XXHZero) Write(input []byte) (int, error) {
|
||||
if xxh.totalLen == 0 {
|
||||
xxh.Reset()
|
||||
}
|
||||
n := len(input)
|
||||
m := xxh.bufused
|
||||
|
||||
xxh.totalLen += uint64(n)
|
||||
|
||||
r := len(xxh.buf) - m
|
||||
if n < r {
|
||||
copy(xxh.buf[m:], input)
|
||||
xxh.bufused += len(input)
|
||||
return n, nil
|
||||
}
|
||||
|
||||
p := 0
|
||||
// Causes compiler to work directly from registers instead of stack:
|
||||
v1, v2, v3, v4 := xxh.v1, xxh.v2, xxh.v3, xxh.v4
|
||||
if m > 0 {
|
||||
// some data left from previous update
|
||||
copy(xxh.buf[xxh.bufused:], input[:r])
|
||||
xxh.bufused += len(input) - r
|
||||
|
||||
// fast rotl(13)
|
||||
buf := xxh.buf[:16] // BCE hint.
|
||||
v1 = rol13(v1+binary.LittleEndian.Uint32(buf[:])*prime32_2) * prime32_1
|
||||
v2 = rol13(v2+binary.LittleEndian.Uint32(buf[4:])*prime32_2) * prime32_1
|
||||
v3 = rol13(v3+binary.LittleEndian.Uint32(buf[8:])*prime32_2) * prime32_1
|
||||
v4 = rol13(v4+binary.LittleEndian.Uint32(buf[12:])*prime32_2) * prime32_1
|
||||
p = r
|
||||
xxh.bufused = 0
|
||||
}
|
||||
|
||||
for n := n - 16; p <= n; p += 16 {
|
||||
sub := input[p:][:16] //BCE hint for compiler
|
||||
v1 = rol13(v1+binary.LittleEndian.Uint32(sub[:])*prime32_2) * prime32_1
|
||||
v2 = rol13(v2+binary.LittleEndian.Uint32(sub[4:])*prime32_2) * prime32_1
|
||||
v3 = rol13(v3+binary.LittleEndian.Uint32(sub[8:])*prime32_2) * prime32_1
|
||||
v4 = rol13(v4+binary.LittleEndian.Uint32(sub[12:])*prime32_2) * prime32_1
|
||||
}
|
||||
xxh.v1, xxh.v2, xxh.v3, xxh.v4 = v1, v2, v3, v4
|
||||
|
||||
copy(xxh.buf[xxh.bufused:], input[p:])
|
||||
xxh.bufused += len(input) - p
|
||||
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// Sum32 returns the 32 bits Hash value.
|
||||
func (xxh *XXHZero) Sum32() uint32 {
|
||||
h32 := uint32(xxh.totalLen)
|
||||
if h32 >= 16 {
|
||||
h32 += rol1(xxh.v1) + rol7(xxh.v2) + rol12(xxh.v3) + rol18(xxh.v4)
|
||||
} else {
|
||||
h32 += prime32_5
|
||||
}
|
||||
|
||||
p := 0
|
||||
n := xxh.bufused
|
||||
buf := xxh.buf
|
||||
for n := n - 4; p <= n; p += 4 {
|
||||
h32 += binary.LittleEndian.Uint32(buf[p:p+4]) * prime32_3
|
||||
h32 = rol17(h32) * prime32_4
|
||||
}
|
||||
for ; p < n; p++ {
|
||||
h32 += uint32(buf[p]) * prime32_5
|
||||
h32 = rol11(h32) * prime32_1
|
||||
}
|
||||
|
||||
h32 ^= h32 >> 15
|
||||
h32 *= prime32_2
|
||||
h32 ^= h32 >> 13
|
||||
h32 *= prime32_3
|
||||
h32 ^= h32 >> 16
|
||||
|
||||
return h32
|
||||
}
|
||||
|
||||
// ChecksumZero returns the 32bits Hash value.
|
||||
func ChecksumZero(input []byte) uint32 {
|
||||
n := len(input)
|
||||
h32 := uint32(n)
|
||||
|
||||
if n < 16 {
|
||||
h32 += prime32_5
|
||||
} else {
|
||||
v1 := prime32_1plus2
|
||||
v2 := prime32_2
|
||||
v3 := uint32(0)
|
||||
v4 := prime32_minus1
|
||||
p := 0
|
||||
for n := n - 16; p <= n; p += 16 {
|
||||
sub := input[p:][:16] //BCE hint for compiler
|
||||
v1 = rol13(v1+binary.LittleEndian.Uint32(sub[:])*prime32_2) * prime32_1
|
||||
v2 = rol13(v2+binary.LittleEndian.Uint32(sub[4:])*prime32_2) * prime32_1
|
||||
v3 = rol13(v3+binary.LittleEndian.Uint32(sub[8:])*prime32_2) * prime32_1
|
||||
v4 = rol13(v4+binary.LittleEndian.Uint32(sub[12:])*prime32_2) * prime32_1
|
||||
}
|
||||
input = input[p:]
|
||||
n -= p
|
||||
h32 += rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
|
||||
}
|
||||
|
||||
p := 0
|
||||
for n := n - 4; p <= n; p += 4 {
|
||||
h32 += binary.LittleEndian.Uint32(input[p:p+4]) * prime32_3
|
||||
h32 = rol17(h32) * prime32_4
|
||||
}
|
||||
for p < n {
|
||||
h32 += uint32(input[p]) * prime32_5
|
||||
h32 = rol11(h32) * prime32_1
|
||||
p++
|
||||
}
|
||||
|
||||
h32 ^= h32 >> 15
|
||||
h32 *= prime32_2
|
||||
h32 ^= h32 >> 13
|
||||
h32 *= prime32_3
|
||||
h32 ^= h32 >> 16
|
||||
|
||||
return h32
|
||||
}
|
||||
|
||||
// Uint32Zero hashes x with seed 0.
|
||||
func Uint32Zero(x uint32) uint32 {
|
||||
h := prime32_5 + 4 + x*prime32_3
|
||||
h = rol17(h) * prime32_4
|
||||
h ^= h >> 15
|
||||
h *= prime32_2
|
||||
h ^= h >> 13
|
||||
h *= prime32_3
|
||||
h ^= h >> 16
|
||||
return h
|
||||
}
|
||||
|
||||
func rol1(u uint32) uint32 {
|
||||
return u<<1 | u>>31
|
||||
}
|
||||
|
||||
func rol7(u uint32) uint32 {
|
||||
return u<<7 | u>>25
|
||||
}
|
||||
|
||||
func rol11(u uint32) uint32 {
|
||||
return u<<11 | u>>21
|
||||
}
|
||||
|
||||
func rol12(u uint32) uint32 {
|
||||
return u<<12 | u>>20
|
||||
}
|
||||
|
||||
func rol13(u uint32) uint32 {
|
||||
return u<<13 | u>>19
|
||||
}
|
||||
|
||||
func rol17(u uint32) uint32 {
|
||||
return u<<17 | u>>15
|
||||
}
|
||||
|
||||
func rol18(u uint32) uint32 {
|
||||
return u<<18 | u>>14
|
||||
}
|
||||
161
vendor/github.com/pierrec/lz4/internal/xxh32/xxh32zero_test.go
generated
vendored
Normal file
161
vendor/github.com/pierrec/lz4/internal/xxh32/xxh32zero_test.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package xxh32_test
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"hash/crc32"
|
||||
"hash/fnv"
|
||||
"testing"
|
||||
|
||||
"github.com/pierrec/lz4/internal/xxh32"
|
||||
)
|
||||
|
||||
type test struct {
|
||||
sum uint32
|
||||
data, printable string
|
||||
}
|
||||
|
||||
var testdata = []test{
|
||||
// {0x02cc5d05, "", ""},
|
||||
// {0x550d7456, "a", ""},
|
||||
// {0x4999fc53, "ab", ""},
|
||||
// {0x32d153ff, "abc", ""},
|
||||
// {0xa3643705, "abcd", ""},
|
||||
// {0x9738f19b, "abcde", ""},
|
||||
// {0x8b7cd587, "abcdef", ""},
|
||||
// {0x9dd093b3, "abcdefg", ""},
|
||||
// {0x0bb3c6bb, "abcdefgh", ""},
|
||||
// {0xd03c13fd, "abcdefghi", ""},
|
||||
// {0x8b988cfe, "abcdefghij", ""},
|
||||
{0x9d2d8b62, "abcdefghijklmnop", ""},
|
||||
// {0x42ae804d, "abcdefghijklmnopqrstuvwxyz0123456789", ""},
|
||||
// {0x62b4ed00, "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.", ""},
|
||||
}
|
||||
|
||||
func init() {
|
||||
for i := range testdata {
|
||||
d := &testdata[i]
|
||||
if len(d.data) > 20 {
|
||||
d.printable = d.data[:20]
|
||||
} else {
|
||||
d.printable = d.data
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroBlockSize(t *testing.T) {
|
||||
var xxh xxh32.XXHZero
|
||||
if s := xxh.BlockSize(); s <= 0 {
|
||||
t.Errorf("invalid BlockSize: %d", s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroSize(t *testing.T) {
|
||||
var xxh xxh32.XXHZero
|
||||
if s := xxh.Size(); s != 4 {
|
||||
t.Errorf("invalid Size: got %d expected 4", s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroData(t *testing.T) {
|
||||
for i, td := range testdata {
|
||||
var xxh xxh32.XXHZero
|
||||
data := []byte(td.data)
|
||||
xxh.Write(data)
|
||||
if h := xxh.Sum32(); h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
if h := xxh32.ChecksumZero(data); h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroSplitData(t *testing.T) {
|
||||
for i, td := range testdata {
|
||||
var xxh xxh32.XXHZero
|
||||
data := []byte(td.data)
|
||||
l := len(data) / 2
|
||||
xxh.Write(data[0:l])
|
||||
xxh.Write(data[l:])
|
||||
h := xxh.Sum32()
|
||||
if h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroSum(t *testing.T) {
|
||||
for i, td := range testdata {
|
||||
var xxh xxh32.XXHZero
|
||||
data := []byte(td.data)
|
||||
xxh.Write(data)
|
||||
b := xxh.Sum(data)
|
||||
if h := binary.LittleEndian.Uint32(b[len(data):]); h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroChecksum(t *testing.T) {
|
||||
for i, td := range testdata {
|
||||
data := []byte(td.data)
|
||||
h := xxh32.ChecksumZero(data)
|
||||
if h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroReset(t *testing.T) {
|
||||
var xxh xxh32.XXHZero
|
||||
for i, td := range testdata {
|
||||
xxh.Write([]byte(td.data))
|
||||
h := xxh.Sum32()
|
||||
if h != td.sum {
|
||||
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.data[:40], h, td.sum)
|
||||
t.FailNow()
|
||||
}
|
||||
xxh.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
// Benchmarks
|
||||
//
|
||||
var testdata1 = []byte(testdata[len(testdata)-1].data)
|
||||
|
||||
func Benchmark_XXH32(b *testing.B) {
|
||||
var h xxh32.XXHZero
|
||||
for n := 0; n < b.N; n++ {
|
||||
h.Write(testdata1)
|
||||
h.Sum32()
|
||||
h.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_XXH32_Checksum(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
xxh32.ChecksumZero(testdata1)
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_CRC32(b *testing.B) {
|
||||
t := crc32.MakeTable(0)
|
||||
for i := 0; i < b.N; i++ {
|
||||
crc32.Checksum(testdata1, t)
|
||||
}
|
||||
}
|
||||
|
||||
func Benchmark_Fnv32(b *testing.B) {
|
||||
h := fnv.New32()
|
||||
for i := 0; i < b.N; i++ {
|
||||
h.Write(testdata1)
|
||||
h.Sum32()
|
||||
h.Reset()
|
||||
}
|
||||
}
|
||||
83
vendor/github.com/pierrec/lz4/lz4.go
generated
vendored
83
vendor/github.com/pierrec/lz4/lz4.go
generated
vendored
@@ -1,13 +1,5 @@
|
||||
// Package lz4 implements reading and writing lz4 compressed data (a frame),
|
||||
// as specified in http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html,
|
||||
// using an io.Reader (decompression) and io.Writer (compression).
|
||||
// It is designed to minimize memory usage while maximizing throughput by being able to
|
||||
// [de]compress data concurrently.
|
||||
//
|
||||
// The Reader and the Writer support concurrent processing provided the supplied buffers are
|
||||
// large enough (in multiples of BlockMaxSize) and there is no block dependency.
|
||||
// Reader.WriteTo and Writer.ReadFrom do leverage the concurrency transparently.
|
||||
// The runtime.GOMAXPROCS() value is used to apply concurrency or not.
|
||||
// as specified in http://fastcompression.blogspot.fr/2013/04/lz4-streaming-format-final.html.
|
||||
//
|
||||
// Although the block level compression and decompression functions are exposed and are fully compatible
|
||||
// with the lz4 block format definition, they are low level and should not be used directly.
|
||||
@@ -15,29 +7,25 @@
|
||||
// http://fastcompression.blogspot.fr/2011/05/lz4-explained.html
|
||||
//
|
||||
// See https://github.com/Cyan4973/lz4 for the reference C implementation.
|
||||
//
|
||||
package lz4
|
||||
|
||||
import (
|
||||
"hash"
|
||||
"sync"
|
||||
|
||||
"github.com/pierrec/xxHash/xxHash32"
|
||||
)
|
||||
|
||||
const (
|
||||
// Extension is the LZ4 frame file name extension
|
||||
Extension = ".lz4"
|
||||
// Version is the LZ4 frame format version
|
||||
Version = 1
|
||||
|
||||
frameMagic = uint32(0x184D2204)
|
||||
frameSkipMagic = uint32(0x184D2A50)
|
||||
frameMagic uint32 = 0x184D2204
|
||||
frameSkipMagic uint32 = 0x184D2A50
|
||||
|
||||
// The following constants are used to setup the compression algorithm.
|
||||
minMatch = 4 // the minimum size of the match sequence size (4 bytes)
|
||||
winSizeLog = 16 // LZ4 64Kb window size limit
|
||||
winSize = 1 << winSizeLog
|
||||
winMask = winSize - 1 // 64Kb window of previous data for dependent blocks
|
||||
minMatch = 4 // the minimum size of the match sequence size (4 bytes)
|
||||
winSizeLog = 16 // LZ4 64Kb window size limit
|
||||
winSize = 1 << winSizeLog
|
||||
winMask = winSize - 1 // 64Kb window of previous data for dependent blocks
|
||||
compressedBlockFlag = 1 << 31
|
||||
compressedBlockMask = compressedBlockFlag - 1
|
||||
|
||||
// hashLog determines the size of the hash table used to quickly find a previous match position.
|
||||
// Its value influences the compression speed and memory usage, the lower the faster,
|
||||
@@ -49,13 +37,13 @@ const (
|
||||
|
||||
mfLimit = 8 + minMatch // The last match cannot start within the last 12 bytes.
|
||||
skipStrength = 6 // variable step for fast scan
|
||||
|
||||
hasher = uint32(2654435761) // prime number used to hash minMatch
|
||||
)
|
||||
|
||||
// map the block max size id with its value in bytes: 64Kb, 256Kb, 1Mb and 4Mb.
|
||||
var bsMapID = map[byte]int{4: 64 << 10, 5: 256 << 10, 6: 1 << 20, 7: 4 << 20}
|
||||
var bsMapValue = map[int]byte{}
|
||||
var (
|
||||
bsMapID = map[byte]int{4: 64 << 10, 5: 256 << 10, 6: 1 << 20, 7: 4 << 20}
|
||||
bsMapValue = make(map[int]byte, len(bsMapID))
|
||||
)
|
||||
|
||||
// Reversed.
|
||||
func init() {
|
||||
@@ -65,41 +53,16 @@ func init() {
|
||||
}
|
||||
|
||||
// Header describes the various flags that can be set on a Writer or obtained from a Reader.
|
||||
// The default values match those of the LZ4 frame format definition (http://fastcompression.blogspot.com/2013/04/lz4-streaming-format-final.html).
|
||||
// The default values match those of the LZ4 frame format definition
|
||||
// (http://fastcompression.blogspot.com/2013/04/lz4-streaming-format-final.html).
|
||||
//
|
||||
// NB. in a Reader, in case of concatenated frames, the Header values may change between Read() calls.
|
||||
// It is the caller responsibility to check them if necessary (typically when using the Reader concurrency).
|
||||
// It is the caller responsibility to check them if necessary.
|
||||
type Header struct {
|
||||
BlockDependency bool // compressed blocks are dependent (one block depends on the last 64Kb of the previous one)
|
||||
BlockChecksum bool // compressed blocks are checksumed
|
||||
NoChecksum bool // frame checksum
|
||||
BlockMaxSize int // the size of the decompressed data block (one of [64KB, 256KB, 1MB, 4MB]). Default=4MB.
|
||||
Size uint64 // the frame total size. It is _not_ computed by the Writer.
|
||||
HighCompression bool // use high compression (only for the Writer)
|
||||
done bool // whether the descriptor was processed (Read or Write and checked)
|
||||
// Removed as not supported
|
||||
// Dict bool // a dictionary id is to be used
|
||||
// DictID uint32 // the dictionary id read from the frame, if any.
|
||||
}
|
||||
|
||||
// xxhPool wraps the standard pool for xxHash items.
|
||||
// Putting items back in the pool automatically resets them.
|
||||
type xxhPool struct {
|
||||
sync.Pool
|
||||
}
|
||||
|
||||
func (p *xxhPool) Get() hash.Hash32 {
|
||||
return p.Pool.Get().(hash.Hash32)
|
||||
}
|
||||
|
||||
func (p *xxhPool) Put(h hash.Hash32) {
|
||||
h.Reset()
|
||||
p.Pool.Put(h)
|
||||
}
|
||||
|
||||
// hashPool is used by readers and writers and contains xxHash items.
|
||||
var hashPool = xxhPool{
|
||||
Pool: sync.Pool{
|
||||
New: func() interface{} { return xxHash32.New(0) },
|
||||
},
|
||||
BlockChecksum bool // Compressed blocks checksum flag.
|
||||
NoChecksum bool // Frame checksum flag.
|
||||
BlockMaxSize int // Size of the uncompressed data block (one of [64KB, 256KB, 1MB, 4MB]). Default=4MB.
|
||||
Size uint64 // Frame total size. It is _not_ computed by the Writer.
|
||||
CompressionLevel int // Compression level (higher is better, use 0 for fastest compression).
|
||||
done bool // Header processed flag (Read or Write and checked).
|
||||
}
|
||||
|
||||
29
vendor/github.com/pierrec/lz4/lz4_go1.10.go
generated
vendored
Normal file
29
vendor/github.com/pierrec/lz4/lz4_go1.10.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
//+build go1.10
|
||||
|
||||
package lz4
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func (h Header) String() string {
|
||||
var s strings.Builder
|
||||
|
||||
s.WriteString(fmt.Sprintf("%T{", h))
|
||||
if h.BlockChecksum {
|
||||
s.WriteString("BlockChecksum: true ")
|
||||
}
|
||||
if h.NoChecksum {
|
||||
s.WriteString("NoChecksum: true ")
|
||||
}
|
||||
if bs := h.BlockMaxSize; bs != 0 && bs != 4<<20 {
|
||||
s.WriteString(fmt.Sprintf("BlockMaxSize: %d ", bs))
|
||||
}
|
||||
if l := h.CompressionLevel; l != 0 {
|
||||
s.WriteString(fmt.Sprintf("CompressionLevel: %d ", l))
|
||||
}
|
||||
s.WriteByte('}')
|
||||
|
||||
return s.String()
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user