storage: optimize (*MemCachedStore).Persist for memory-backed ps

Most of the time it's persisted into the MemoryStore or MemCachedStore, when
that's the case there is no real need to go through the Batch mechanism as it
incurs multiple copies of the data.

Importing 1.5M mainnet blocks with verification turned off, before:
real    12m39,484s
user    20m48,300s
sys     2m25,022s

After:
real    11m15,053s
user    18m2,755s
sys     2m4,162s

So it's around 10% improvement which looks good enough.
This commit is contained in:
Roman Khimov 2020-03-27 15:40:23 +03:00
parent 198fffb9b8
commit 4758de71ec
2 changed files with 49 additions and 13 deletions

View file

@ -93,20 +93,42 @@ func (s *MemCachedStore) Seek(key []byte, f func(k, v []byte)) {
// Persist flushes all the MemoryStore contents into the (supposedly) persistent // Persist flushes all the MemoryStore contents into the (supposedly) persistent
// store ps. // store ps.
func (s *MemCachedStore) Persist() (int, error) { func (s *MemCachedStore) Persist() (int, error) {
var err error
var keys, dkeys int
s.mut.Lock() s.mut.Lock()
defer s.mut.Unlock() defer s.mut.Unlock()
keys = len(s.mem)
dkeys = len(s.del)
if keys == 0 && dkeys == 0 {
return 0, nil
}
memStore, ok := s.ps.(*MemoryStore)
if !ok {
memCachedStore, ok := s.ps.(*MemCachedStore)
if ok {
memStore = &memCachedStore.MemoryStore
}
}
if memStore != nil {
memStore.mut.Lock()
for k := range s.mem {
memStore.put(k, s.mem[k])
}
for k := range s.del {
memStore.drop(k)
}
memStore.mut.Unlock()
} else {
batch := s.ps.Batch() batch := s.ps.Batch()
keys, dkeys := 0, 0 for k := range s.mem {
for k, v := range s.mem { batch.Put([]byte(k), s.mem[k])
batch.Put([]byte(k), v)
keys++
} }
for k := range s.del { for k := range s.del {
batch.Delete([]byte(k)) batch.Delete([]byte(k))
dkeys++
} }
var err error
if keys != 0 || dkeys != 0 {
err = s.ps.PutBatch(batch) err = s.ps.PutBatch(batch)
} }
if err == nil { if err == nil {

View file

@ -7,9 +7,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestMemCachedStorePersist(t *testing.T) { func testMemCachedStorePersist(t *testing.T, ps Store) {
// persistent Store
ps := NewMemoryStore()
// cached Store // cached Store
ts := NewMemCachedStore(ps) ts := NewMemCachedStore(ps)
// persisting nothing should do nothing // persisting nothing should do nothing
@ -94,6 +92,22 @@ func checkBatch(t *testing.T, ts *MemCachedStore, put []KeyValue, del []KeyValue
} }
} }
func TestMemCachedPersist(t *testing.T) {
t.Run("MemoryStore", func(t *testing.T) {
ps := NewMemoryStore()
testMemCachedStorePersist(t, ps)
})
t.Run("MemoryCachedStore", func(t *testing.T) {
ps1 := NewMemoryStore()
ps2 := NewMemCachedStore(ps1)
testMemCachedStorePersist(t, ps2)
})
t.Run("BoltDBStore", func(t *testing.T) {
ps := newBoltStoreForTesting(t)
testMemCachedStorePersist(t, ps)
})
}
func TestCachedGetFromPersistent(t *testing.T) { func TestCachedGetFromPersistent(t *testing.T) {
key := []byte("key") key := []byte("key")
value := []byte("value") value := []byte("value")