Restore decompression benchmarks
The benchmarks were deleted in #2640 but we could use that to evaluate zstd further. Signed-off-by: Kazuyoshi Kato <katokazu@amazon.com>
This commit is contained in:
parent
42c6be80e3
commit
a42688639c
68
archive/compression/benchmark_test.go
Normal file
68
archive/compression/benchmark_test.go
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
/*
|
||||||
|
Copyright The containerd Authors.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package compression
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const benchmarkTestDataURL = "https://git.io/fADcl"
|
||||||
|
|
||||||
|
func BenchmarkDecompression(b *testing.B) {
|
||||||
|
resp, err := http.Get(benchmarkTestDataURL)
|
||||||
|
require.NoError(b, err)
|
||||||
|
|
||||||
|
data, err := ioutil.ReadAll(resp.Body)
|
||||||
|
require.NoError(b, err)
|
||||||
|
resp.Body.Close()
|
||||||
|
|
||||||
|
const mib = 1024 * 1024
|
||||||
|
sizes := []int{32, 64, 128, 256}
|
||||||
|
|
||||||
|
for _, sizeInMiB := range sizes {
|
||||||
|
size := sizeInMiB * mib
|
||||||
|
for len(data) < size {
|
||||||
|
data = append(data, data...)
|
||||||
|
}
|
||||||
|
data = data[0:size]
|
||||||
|
|
||||||
|
gz := testCompress(b, data, Gzip)
|
||||||
|
zstd := testCompress(b, data, Zstd)
|
||||||
|
|
||||||
|
b.Run(fmt.Sprintf("size=%dMiB", sizeInMiB), func(b *testing.B) {
|
||||||
|
b.Run("gzip", func(b *testing.B) {
|
||||||
|
testDecompress(b, gz)
|
||||||
|
})
|
||||||
|
b.Run("zstd", func(b *testing.B) {
|
||||||
|
testDecompress(b, zstd)
|
||||||
|
})
|
||||||
|
if unpigzPath != "" {
|
||||||
|
original := unpigzPath
|
||||||
|
unpigzPath = ""
|
||||||
|
b.Run("gzipPureGo", func(b *testing.B) {
|
||||||
|
testDecompress(b, gz)
|
||||||
|
})
|
||||||
|
unpigzPath = original
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -40,7 +40,7 @@ func TestMain(m *testing.M) {
|
|||||||
// generateData generates data that composed of 2 random parts
|
// generateData generates data that composed of 2 random parts
|
||||||
// and single zero-filled part within them.
|
// and single zero-filled part within them.
|
||||||
// Typically, the compression ratio would be about 67%.
|
// Typically, the compression ratio would be about 67%.
|
||||||
func generateData(t *testing.T, size int) []byte {
|
func generateData(t testing.TB, size int) []byte {
|
||||||
part0 := size / 3 // random
|
part0 := size / 3 // random
|
||||||
part2 := size / 3 // random
|
part2 := size / 3 // random
|
||||||
part1 := size - part0 - part2 // zero-filled
|
part1 := size - part0 - part2 // zero-filled
|
||||||
@ -56,8 +56,8 @@ func generateData(t *testing.T, size int) []byte {
|
|||||||
return append(part0Data, append(part1Data, part2Data...)...)
|
return append(part0Data, append(part1Data, part2Data...)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testCompressDecompress(t *testing.T, size int, compression Compression) DecompressReadCloser {
|
func testCompress(t testing.TB, orig []byte, compression Compression) []byte {
|
||||||
orig := generateData(t, size)
|
size := len(orig)
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
compressor, err := CompressStream(&b, compression)
|
compressor, err := CompressStream(&b, compression)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -67,14 +67,11 @@ func testCompressDecompress(t *testing.T, size int, compression Compression) Dec
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
compressor.Close()
|
compressor.Close()
|
||||||
compressed := b.Bytes()
|
|
||||||
t.Logf("compressed %d bytes to %d bytes (%.2f%%)",
|
return b.Bytes()
|
||||||
len(orig), len(compressed), 100.0*float32(len(compressed))/float32(len(orig)))
|
|
||||||
if compared := bytes.Compare(orig, compressed); (compression == Uncompressed && compared != 0) ||
|
|
||||||
(compression != Uncompressed && compared == 0) {
|
|
||||||
t.Fatal("strange compressed data")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func testDecompress(t testing.TB, compressed []byte) ([]byte, DecompressReadCloser) {
|
||||||
decompressor, err := DecompressStream(bytes.NewReader(compressed))
|
decompressor, err := DecompressStream(bytes.NewReader(compressed))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
@ -83,6 +80,20 @@ func testCompressDecompress(t *testing.T, size int, compression Compression) Dec
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
return decompressed, decompressor
|
||||||
|
}
|
||||||
|
|
||||||
|
func testCompressDecompress(t testing.TB, size int, compression Compression) DecompressReadCloser {
|
||||||
|
orig := generateData(t, size)
|
||||||
|
compressed := testCompress(t, orig, compression)
|
||||||
|
t.Logf("compressed %d bytes to %d bytes (%.2f%%)",
|
||||||
|
len(orig), len(compressed), 100.0*float32(len(compressed))/float32(len(orig)))
|
||||||
|
if compared := bytes.Compare(orig, compressed); (compression == Uncompressed && compared != 0) ||
|
||||||
|
(compression != Uncompressed && compared == 0) {
|
||||||
|
t.Fatal("strange compressed data")
|
||||||
|
}
|
||||||
|
|
||||||
|
decompressed, decompressor := testDecompress(t, compressed)
|
||||||
if !bytes.Equal(orig, decompressed) {
|
if !bytes.Equal(orig, decompressed) {
|
||||||
t.Fatal("strange decompressed data")
|
t.Fatal("strange decompressed data")
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user