[#2184] compression: Properly calculate upper bound

If the data is not compressible allocating `len(data)` will lead to a
slice reallocation. For a compressible data the results for small size
are flaky and we allocate a bit more. However, it feels right to use a
provided function if we need to pick any size at all.

```
name                                                           old time/op    new time/op    delta
Compression/size=128/zeroed_slice-8                              2.23µs ±12%    2.06µs ± 6%   -7.35%  (p=0.009 n=10+10)
Compression/size=128/not_so_random_slice_(block_=_123)-8         19.0µs ±10%    15.8µs ±16%  -17.09%  (p=0.000 n=9+10)
Compression/size=128/random_slice-8                              17.6µs ±15%    16.1µs ±16%     ~     (p=0.075 n=10+10)
Compression/size=1024/zeroed_slice-8                             3.05µs ±11%    2.84µs ±10%     ~     (p=0.089 n=10+10)
Compression/size=1024/not_so_random_slice_(block_=_123)-8        18.1µs ± 6%    18.2µs ±12%     ~     (p=0.971 n=10+10)
Compression/size=1024/random_slice-8                             48.6µs ± 6%    45.6µs ± 5%   -6.07%  (p=0.006 n=10+9)
Compression/size=32768/zeroed_slice-8                            26.8µs ± 3%    28.7µs ± 8%   +7.23%  (p=0.001 n=10+10)
Compression/size=32768/not_so_random_slice_(block_=_123)-8       44.3µs ± 8%    43.7µs ±13%     ~     (p=0.762 n=8+10)
Compression/size=32768/random_slice-8                            97.3µs ±32%    68.9µs ±15%  -29.13%  (p=0.000 n=10+10)
Compression/size=33554432/zeroed_slice-8                         29.8ms ± 9%    30.3ms ±17%     ~     (p=1.000 n=9+9)
Compression/size=33554432/not_so_random_slice_(block_=_123)-8    33.1ms ±14%    30.3ms ±11%   -8.61%  (p=0.043 n=10+10)
Compression/size=33554432/random_slice-8                         41.7ms ± 3%    30.1ms ± 8%  -27.72%  (p=0.000 n=9+10)

name                                                           old alloc/op   new alloc/op   delta
Compression/size=128/zeroed_slice-8                                128B ± 0%      144B ± 0%  +12.50%  (p=0.000 n=10+10)
Compression/size=128/not_so_random_slice_(block_=_123)-8           384B ± 0%      144B ± 0%  -62.50%  (p=0.000 n=10+10)
Compression/size=128/random_slice-8                                384B ± 0%      144B ± 0%  -62.50%  (p=0.000 n=10+10)
Compression/size=1024/zeroed_slice-8                             1.02kB ± 0%    1.15kB ± 0%  +12.50%  (p=0.000 n=10+10)
Compression/size=1024/not_so_random_slice_(block_=_123)-8        1.02kB ± 0%    1.15kB ± 0%  +12.50%  (p=0.000 n=10+10)
Compression/size=1024/random_slice-8                             2.56kB ± 0%    1.15kB ± 0%  -55.00%  (p=0.000 n=10+10)
Compression/size=32768/zeroed_slice-8                            32.8kB ± 0%    41.0kB ± 0%  +25.00%  (p=0.000 n=10+10)
Compression/size=32768/not_so_random_slice_(block_=_123)-8       32.8kB ± 0%    41.0kB ± 0%  +25.00%  (p=0.000 n=10+10)
Compression/size=32768/random_slice-8                            81.9kB ± 0%    41.0kB ± 0%  -50.00%  (p=0.000 n=10+10)
Compression/size=33554432/zeroed_slice-8                         33.6MB ± 0%    33.6MB ± 0%   +0.02%  (p=0.000 n=9+9)
Compression/size=33554432/not_so_random_slice_(block_=_123)-8    33.6MB ± 0%    33.6MB ± 0%   +0.02%  (p=0.000 n=8+10)
Compression/size=33554432/random_slice-8                         75.5MB ± 0%    33.6MB ± 0%  -55.55%  (p=0.000 n=10+10)

name                                                           old allocs/op  new allocs/op  delta
Compression/size=128/zeroed_slice-8                                1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=128/not_so_random_slice_(block_=_123)-8           2.00 ± 0%      1.00 ± 0%  -50.00%  (p=0.000 n=10+10)
Compression/size=128/random_slice-8                                2.00 ± 0%      1.00 ± 0%  -50.00%  (p=0.000 n=10+10)
Compression/size=1024/zeroed_slice-8                               1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=1024/not_so_random_slice_(block_=_123)-8          1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=1024/random_slice-8                               2.00 ± 0%      1.00 ± 0%  -50.00%  (p=0.000 n=10+10)
Compression/size=32768/zeroed_slice-8                              1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=32768/not_so_random_slice_(block_=_123)-8         1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=32768/random_slice-8                              2.00 ± 0%      1.00 ± 0%  -50.00%  (p=0.000 n=10+10)
Compression/size=33554432/zeroed_slice-8                           1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=33554432/not_so_random_slice_(block_=_123)-8      1.00 ± 0%      1.00 ± 0%     ~     (all equal)
Compression/size=33554432/random_slice-8                           2.00 ± 0%      1.00 ± 0%  -50.00%  (p=0.000 n=10+10)
```

Signed-off-by: Evgenii Stratonikov <e.stratonikov@yadro.com>
This commit is contained in:
Evgenii Stratonikov 2022-12-28 15:47:58 +03:00 committed by Anton Nikiforov
parent 874be99076
commit 0272218eb9
5 changed files with 53 additions and 2 deletions

View file

@ -9,6 +9,7 @@ Changelog for NeoFS Node
### Removed ### Removed
### Updated ### Updated
- `neo-go` to `v0.100.1` - `neo-go` to `v0.100.1`
- `github.com/klauspost/compress` to `v1.15.13`
### Updating from v0.35.0 ### Updating from v0.35.0

2
go.mod
View file

@ -14,7 +14,7 @@ require (
github.com/google/go-github/v39 v39.2.0 github.com/google/go-github/v39 v39.2.0
github.com/google/uuid v1.3.0 github.com/google/uuid v1.3.0
github.com/hashicorp/golang-lru v0.5.4 github.com/hashicorp/golang-lru v0.5.4
github.com/klauspost/compress v1.15.9 github.com/klauspost/compress v1.15.13
github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/go-homedir v1.1.0
github.com/mr-tron/base58 v1.2.0 github.com/mr-tron/base58 v1.2.0
github.com/multiformats/go-multiaddr v0.4.0 github.com/multiformats/go-multiaddr v0.4.0

BIN
go.sum

Binary file not shown.

View file

@ -0,0 +1,49 @@
package compression
import (
"crypto/rand"
"fmt"
"testing"
"github.com/stretchr/testify/require"
)
func BenchmarkCompression(b *testing.B) {
c := Config{Enabled: true}
require.NoError(b, c.Init())
for _, size := range []int{128, 1024, 32 * 1024, 32 * 1024 * 1024} {
b.Run(fmt.Sprintf("size=%d", size), func(b *testing.B) {
b.Run("zeroed slice", func(b *testing.B) {
data := make([]byte, size)
benchWith(b, c, data)
})
b.Run("not so random slice (block = 123)", func(b *testing.B) {
data := notSoRandomSlice(size, 123)
benchWith(b, c, data)
})
b.Run("random slice", func(b *testing.B) {
data := make([]byte, size)
rand.Read(data)
benchWith(b, c, data)
})
})
}
}
func benchWith(b *testing.B, c Config, data []byte) {
b.ResetTimer()
b.ReportAllocs()
for i := 0; i < b.N; i++ {
_ = c.Compress(data)
}
}
func notSoRandomSlice(size, blockSize int) []byte {
data := make([]byte, size)
rand.Read(data[:blockSize])
for i := blockSize; i < size; i += blockSize {
copy(data[i:], data[:blockSize])
}
return data
}

View file

@ -86,7 +86,8 @@ func (c *Config) Compress(data []byte) []byte {
if c == nil || !c.Enabled { if c == nil || !c.Enabled {
return data return data
} }
return c.encoder.EncodeAll(data, make([]byte, 0, len(data))) maxSize := c.encoder.MaxEncodedSize(len(data))
return c.encoder.EncodeAll(data, make([]byte, 0, maxSize))
} }
// Close closes encoder and decoder, returns any error occurred. // Close closes encoder and decoder, returns any error occurred.