Dep helper (#2151)

* Add dep task to update go dependencies

* Update go dependencies
This commit is contained in:
Manuel Alejandro de Brito Fontes
2018-09-29 19:47:07 -03:00
committed by Miek Gieben
parent 8f8b81f56b
commit 0e8977761d
764 changed files with 172 additions and 267451 deletions

View File

@@ -1,119 +0,0 @@
package lz4_test
import (
"bytes"
"io"
"io/ioutil"
"testing"
"github.com/pierrec/lz4"
)
func BenchmarkCompress(b *testing.B) {
var hashTable [1 << 16]int
buf := make([]byte, len(pg1661))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
lz4.CompressBlock(pg1661, buf, hashTable[:])
}
}
func BenchmarkCompressHC(b *testing.B) {
buf := make([]byte, len(pg1661))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
lz4.CompressBlockHC(pg1661, buf, 16)
}
}
func BenchmarkUncompress(b *testing.B) {
buf := make([]byte, len(pg1661))
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
lz4.UncompressBlock(pg1661LZ4, buf)
}
}
func mustLoadFile(f string) []byte {
b, err := ioutil.ReadFile(f)
if err != nil {
panic(err)
}
return b
}
var (
pg1661 = mustLoadFile("testdata/pg1661.txt")
digits = mustLoadFile("testdata/e.txt")
twain = mustLoadFile("testdata/Mark.Twain-Tom.Sawyer.txt")
random = mustLoadFile("testdata/random.data")
pg1661LZ4 = mustLoadFile("testdata/pg1661.txt.lz4")
digitsLZ4 = mustLoadFile("testdata/e.txt.lz4")
twainLZ4 = mustLoadFile("testdata/Mark.Twain-Tom.Sawyer.txt.lz4")
randomLZ4 = mustLoadFile("testdata/random.data.lz4")
)
func benchmarkUncompress(b *testing.B, compressed []byte) {
r := bytes.NewReader(compressed)
zr := lz4.NewReader(r)
// Determine the uncompressed size of testfile.
uncompressedSize, err := io.Copy(ioutil.Discard, zr)
if err != nil {
b.Fatal(err)
}
b.SetBytes(uncompressedSize)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
r.Reset(compressed)
zr.Reset(r)
io.Copy(ioutil.Discard, zr)
}
}
func BenchmarkUncompressPg1661(b *testing.B) { benchmarkUncompress(b, pg1661LZ4) }
func BenchmarkUncompressDigits(b *testing.B) { benchmarkUncompress(b, digitsLZ4) }
func BenchmarkUncompressTwain(b *testing.B) { benchmarkUncompress(b, twainLZ4) }
func BenchmarkUncompressRand(b *testing.B) { benchmarkUncompress(b, randomLZ4) }
func benchmarkCompress(b *testing.B, uncompressed []byte) {
w := bytes.NewBuffer(nil)
zw := lz4.NewWriter(w)
r := bytes.NewReader(uncompressed)
// Determine the compressed size of testfile.
compressedSize, err := io.Copy(zw, r)
if err != nil {
b.Fatal(err)
}
if err := zw.Close(); err != nil {
b.Fatal(err)
}
b.SetBytes(compressedSize)
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
r.Reset(uncompressed)
zw.Reset(w)
io.Copy(zw, r)
}
}
func BenchmarkCompressPg1661(b *testing.B) { benchmarkCompress(b, pg1661) }
func BenchmarkCompressDigits(b *testing.B) { benchmarkCompress(b, digits) }
func BenchmarkCompressTwain(b *testing.B) { benchmarkCompress(b, twain) }
func BenchmarkCompressRand(b *testing.B) { benchmarkCompress(b, random) }

View File

@@ -1,98 +0,0 @@
//+build go1.9
package lz4_test
import (
"fmt"
"io/ioutil"
"reflect"
"testing"
"github.com/pierrec/lz4"
)
type testcase struct {
file string
compressible bool
src []byte
}
var rawFiles = []testcase{
// {"testdata/207326ba-36f8-11e7-954a-aca46ba8ca73.png", true, nil},
{"testdata/e.txt", true, nil},
{"testdata/gettysburg.txt", true, nil},
{"testdata/Mark.Twain-Tom.Sawyer.txt", true, nil},
{"testdata/pg1661.txt", true, nil},
{"testdata/pi.txt", true, nil},
{"testdata/random.data", false, nil},
{"testdata/repeat.txt", true, nil},
}
func TestCompressUncompressBlock(t *testing.T) {
type compressor func(s, d []byte) (int, error)
run := func(tc testcase, compress compressor) int {
t.Helper()
src := tc.src
// Compress the data.
zbuf := make([]byte, lz4.CompressBlockBound(len(src)))
n, err := compress(src, zbuf)
if err != nil {
t.Error(err)
return 0
}
zbuf = zbuf[:n]
// Make sure that it was actually compressed unless not compressible.
if !tc.compressible {
return 0
}
if n == 0 || n >= len(src) {
t.Errorf("data not compressed: %d/%d", n, len(src))
return 0
}
// Uncompress the data.
buf := make([]byte, len(src))
n, err = lz4.UncompressBlock(zbuf, buf)
if err != nil {
t.Fatal(err)
}
buf = buf[:n]
if !reflect.DeepEqual(src, buf) {
t.Error("uncompressed compressed data not matching initial input")
return 0
}
return len(zbuf)
}
for _, tc := range rawFiles {
src, err := ioutil.ReadFile(tc.file)
if err != nil {
t.Fatal(err)
}
tc.src = src
var n, nhc int
t.Run("", func(t *testing.T) {
tc := tc
t.Run(tc.file, func(t *testing.T) {
t.Parallel()
n = run(tc, func(src, dst []byte) (int, error) {
var ht [1 << 16]int
return lz4.CompressBlock(src, dst, ht[:])
})
})
t.Run(fmt.Sprintf("%s HC", tc.file), func(t *testing.T) {
t.Parallel()
nhc = run(tc, func(src, dst []byte) (int, error) {
return lz4.CompressBlockHC(src, dst, -1)
})
})
})
fmt.Printf("%-40s: %8d / %8d / %8d\n", tc.file, n, nhc, len(src))
}
}

View File

@@ -1,13 +0,0 @@
// Expose some internals for testing purposes
package lz4
// expose the possible block max sizes
var BlockMaxSizeItems []int
func init() {
for s := range bsMapValue {
BlockMaxSizeItems = append(BlockMaxSizeItems, s)
}
}
var FrameSkipMagic = frameSkipMagic

View File

@@ -1,161 +0,0 @@
package xxh32_test
import (
"encoding/binary"
"hash/crc32"
"hash/fnv"
"testing"
"github.com/pierrec/lz4/internal/xxh32"
)
type test struct {
sum uint32
data, printable string
}
var testdata = []test{
// {0x02cc5d05, "", ""},
// {0x550d7456, "a", ""},
// {0x4999fc53, "ab", ""},
// {0x32d153ff, "abc", ""},
// {0xa3643705, "abcd", ""},
// {0x9738f19b, "abcde", ""},
// {0x8b7cd587, "abcdef", ""},
// {0x9dd093b3, "abcdefg", ""},
// {0x0bb3c6bb, "abcdefgh", ""},
// {0xd03c13fd, "abcdefghi", ""},
// {0x8b988cfe, "abcdefghij", ""},
{0x9d2d8b62, "abcdefghijklmnop", ""},
// {0x42ae804d, "abcdefghijklmnopqrstuvwxyz0123456789", ""},
// {0x62b4ed00, "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.", ""},
}
func init() {
for i := range testdata {
d := &testdata[i]
if len(d.data) > 20 {
d.printable = d.data[:20]
} else {
d.printable = d.data
}
}
}
func TestZeroBlockSize(t *testing.T) {
var xxh xxh32.XXHZero
if s := xxh.BlockSize(); s <= 0 {
t.Errorf("invalid BlockSize: %d", s)
}
}
func TestZeroSize(t *testing.T) {
var xxh xxh32.XXHZero
if s := xxh.Size(); s != 4 {
t.Errorf("invalid Size: got %d expected 4", s)
}
}
func TestZeroData(t *testing.T) {
for i, td := range testdata {
var xxh xxh32.XXHZero
data := []byte(td.data)
xxh.Write(data)
if h := xxh.Sum32(); h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
t.FailNow()
}
if h := xxh32.ChecksumZero(data); h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
t.FailNow()
}
}
}
func TestZeroSplitData(t *testing.T) {
for i, td := range testdata {
var xxh xxh32.XXHZero
data := []byte(td.data)
l := len(data) / 2
xxh.Write(data[0:l])
xxh.Write(data[l:])
h := xxh.Sum32()
if h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
t.FailNow()
}
}
}
func TestZeroSum(t *testing.T) {
for i, td := range testdata {
var xxh xxh32.XXHZero
data := []byte(td.data)
xxh.Write(data)
b := xxh.Sum(data)
if h := binary.LittleEndian.Uint32(b[len(data):]); h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
t.FailNow()
}
}
}
func TestZeroChecksum(t *testing.T) {
for i, td := range testdata {
data := []byte(td.data)
h := xxh32.ChecksumZero(data)
if h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.printable, h, td.sum)
t.FailNow()
}
}
}
func TestZeroReset(t *testing.T) {
var xxh xxh32.XXHZero
for i, td := range testdata {
xxh.Write([]byte(td.data))
h := xxh.Sum32()
if h != td.sum {
t.Errorf("test %d: xxh32(%s)=0x%x expected 0x%x", i, td.data[:40], h, td.sum)
t.FailNow()
}
xxh.Reset()
}
}
///////////////////////////////////////////////////////////////////////////////
// Benchmarks
//
var testdata1 = []byte(testdata[len(testdata)-1].data)
func Benchmark_XXH32(b *testing.B) {
var h xxh32.XXHZero
for n := 0; n < b.N; n++ {
h.Write(testdata1)
h.Sum32()
h.Reset()
}
}
func Benchmark_XXH32_Checksum(b *testing.B) {
for n := 0; n < b.N; n++ {
xxh32.ChecksumZero(testdata1)
}
}
func Benchmark_CRC32(b *testing.B) {
t := crc32.MakeTable(0)
for i := 0; i < b.N; i++ {
crc32.Checksum(testdata1, t)
}
}
func Benchmark_Fnv32(b *testing.B) {
h := fnv.New32()
for i := 0; i < b.N; i++ {
h.Write(testdata1)
h.Sum32()
h.Reset()
}
}

View File

@@ -1,59 +0,0 @@
package lz4_test
import (
"bytes"
"io"
"io/ioutil"
"os"
"reflect"
"strings"
"testing"
"github.com/pierrec/lz4"
)
func TestReader(t *testing.T) {
goldenFiles := []string{
"testdata/e.txt.lz4",
"testdata/gettysburg.txt.lz4",
"testdata/Mark.Twain-Tom.Sawyer.txt.lz4",
"testdata/pg1661.txt.lz4",
"testdata/pi.txt.lz4",
"testdata/random.data.lz4",
"testdata/repeat.txt.lz4",
}
for _, fname := range goldenFiles {
t.Run(fname, func(t *testing.T) {
fname := fname
t.Parallel()
f, err := os.Open(fname)
if err != nil {
t.Fatal(err)
}
defer f.Close()
rawfile := strings.TrimSuffix(fname, ".lz4")
raw, err := ioutil.ReadFile(rawfile)
if err != nil {
t.Fatal(err)
}
var out bytes.Buffer
zr := lz4.NewReader(f)
n, err := io.Copy(&out, zr)
if err != nil {
t.Fatal(err)
}
if got, want := int(n), len(raw); got != want {
t.Errorf("invalid sizes: got %d; want %d", got, want)
}
if got, want := out.Bytes(), raw; !reflect.DeepEqual(got, want) {
t.Fatal("uncompressed data does not match original")
}
})
}
}

View File

@@ -1,78 +0,0 @@
package lz4_test
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"reflect"
"testing"
"github.com/pierrec/lz4"
)
func TestWriter(t *testing.T) {
goldenFiles := []string{
"testdata/e.txt",
"testdata/gettysburg.txt",
"testdata/Mark.Twain-Tom.Sawyer.txt",
"testdata/pg1661.txt",
"testdata/pi.txt",
"testdata/random.data",
"testdata/repeat.txt",
}
for _, fname := range goldenFiles {
for _, header := range []lz4.Header{
{}, // Default header.
{BlockChecksum: true},
{NoChecksum: true},
{BlockMaxSize: 64 << 10}, // 64Kb
{CompressionLevel: 10},
{Size: 123},
} {
label := fmt.Sprintf("%s/%s", fname, header)
t.Run(label, func(t *testing.T) {
fname := fname
header := header
t.Parallel()
raw, err := ioutil.ReadFile(fname)
if err != nil {
t.Fatal(err)
}
r := bytes.NewReader(raw)
// Compress.
var zout bytes.Buffer
zw := lz4.NewWriter(&zout)
zw.Header = header
_, err = io.Copy(zw, r)
if err != nil {
t.Fatal(err)
}
err = zw.Close()
if err != nil {
t.Fatal(err)
}
// Uncompress.
var out bytes.Buffer
zr := lz4.NewReader(&zout)
n, err := io.Copy(&out, zr)
if err != nil {
t.Fatal(err)
}
// The uncompressed data must be the same as the initial input.
if got, want := int(n), len(raw); got != want {
t.Errorf("invalid sizes: got %d; want %d", got, want)
}
if got, want := out.Bytes(), raw; !reflect.DeepEqual(got, want) {
t.Fatal("uncompressed data does not match original")
}
})
}
}
}