2020-04-07 09:41:12 +00:00
|
|
|
package dao
|
2019-11-25 17:39:11 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
2021-10-06 12:54:44 +00:00
|
|
|
"context"
|
2019-11-25 17:39:11 +00:00
|
|
|
"encoding/binary"
|
2020-10-15 11:45:29 +00:00
|
|
|
"errors"
|
2022-02-25 10:44:14 +00:00
|
|
|
"fmt"
|
2020-11-11 15:43:28 +00:00
|
|
|
iocore "io"
|
2019-11-25 17:39:11 +00:00
|
|
|
|
2020-03-03 14:21:42 +00:00
|
|
|
"github.com/nspcc-dev/neo-go/pkg/core/block"
|
|
|
|
"github.com/nspcc-dev/neo-go/pkg/core/state"
|
|
|
|
"github.com/nspcc-dev/neo-go/pkg/core/storage"
|
|
|
|
"github.com/nspcc-dev/neo-go/pkg/core/transaction"
|
|
|
|
"github.com/nspcc-dev/neo-go/pkg/io"
|
2020-11-11 15:43:28 +00:00
|
|
|
"github.com/nspcc-dev/neo-go/pkg/smartcontract/trigger"
|
2020-03-03 14:21:42 +00:00
|
|
|
"github.com/nspcc-dev/neo-go/pkg/util"
|
2022-02-16 20:33:53 +00:00
|
|
|
"github.com/nspcc-dev/neo-go/pkg/util/slice"
|
2019-11-25 17:39:11 +00:00
|
|
|
)
|
|
|
|
|
2021-05-12 20:17:03 +00:00
|
|
|
// HasTransaction errors.
|
2020-10-15 11:45:29 +00:00
|
|
|
var (
|
|
|
|
// ErrAlreadyExists is returned when transaction exists in dao.
|
|
|
|
ErrAlreadyExists = errors.New("transaction already exists")
|
|
|
|
// ErrHasConflicts is returned when transaction is in the list of conflicting
|
|
|
|
// transactions which are already in dao.
|
|
|
|
ErrHasConflicts = errors.New("transaction has conflicts")
|
|
|
|
)
|
|
|
|
|
2020-04-07 09:41:12 +00:00
|
|
|
// Simple is memCached wrapper around DB, simple DAO implementation.
|
|
|
|
type Simple struct {
|
2021-10-22 07:58:53 +00:00
|
|
|
Version Version
|
|
|
|
Store *storage.MemCachedStore
|
2022-02-18 11:24:45 +00:00
|
|
|
private bool
|
2022-02-16 20:33:53 +00:00
|
|
|
keyBuf []byte
|
|
|
|
dataBuf *io.BufBinWriter
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-04-07 09:41:12 +00:00
|
|
|
// NewSimple creates new simple dao using provided backend store.
|
2021-08-17 15:40:11 +00:00
|
|
|
func NewSimple(backend storage.Store, stateRootInHeader bool, p2pSigExtensions bool) *Simple {
|
2020-05-29 14:20:00 +00:00
|
|
|
st := storage.NewMemCachedStore(backend)
|
2022-02-16 16:13:06 +00:00
|
|
|
return newSimple(st, stateRootInHeader, p2pSigExtensions)
|
|
|
|
}
|
|
|
|
|
|
|
|
func newSimple(st *storage.MemCachedStore, stateRootInHeader bool, p2pSigExtensions bool) *Simple {
|
2021-09-27 13:35:25 +00:00
|
|
|
return &Simple{
|
2021-10-22 07:58:53 +00:00
|
|
|
Version: Version{
|
|
|
|
StoragePrefix: storage.STStorage,
|
|
|
|
StateRootInHeader: stateRootInHeader,
|
|
|
|
P2PSigExtensions: p2pSigExtensions,
|
|
|
|
},
|
|
|
|
Store: st,
|
2021-09-27 13:35:25 +00:00
|
|
|
}
|
2020-04-03 06:49:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetBatch returns currently accumulated DB changeset.
|
2020-04-07 09:41:12 +00:00
|
|
|
func (dao *Simple) GetBatch() *storage.MemBatch {
|
|
|
|
return dao.Store.GetBatch()
|
2020-04-03 06:49:01 +00:00
|
|
|
}
|
|
|
|
|
2020-04-07 09:41:12 +00:00
|
|
|
// GetWrapped returns new DAO instance with another layer of wrapped
|
|
|
|
// MemCachedStore around the current DAO Store.
|
2022-02-16 15:04:47 +00:00
|
|
|
func (dao *Simple) GetWrapped() *Simple {
|
2021-10-22 07:58:53 +00:00
|
|
|
d := NewSimple(dao.Store, dao.Version.StateRootInHeader, dao.Version.P2PSigExtensions)
|
|
|
|
d.Version = dao.Version
|
2020-05-29 14:20:00 +00:00
|
|
|
return d
|
2019-12-12 18:04:55 +00:00
|
|
|
}
|
|
|
|
|
2022-02-16 16:13:06 +00:00
|
|
|
// GetPrivate returns new DAO instance with another layer of private
|
|
|
|
// MemCachedStore around the current DAO Store.
|
|
|
|
func (dao *Simple) GetPrivate() *Simple {
|
2022-02-18 11:18:56 +00:00
|
|
|
d := &Simple{}
|
|
|
|
*d = *dao // Inherit everything...
|
|
|
|
d.Store = storage.NewPrivateMemCachedStore(dao.Store) // except storage, wrap another layer.
|
2022-02-18 11:24:45 +00:00
|
|
|
d.private = true
|
2022-02-16 16:13:06 +00:00
|
|
|
return d
|
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// GetAndDecode performs get operation and decoding with serializable structures.
|
2020-04-07 09:41:12 +00:00
|
|
|
func (dao *Simple) GetAndDecode(entity io.Serializable, key []byte) error {
|
|
|
|
entityBytes, err := dao.Store.Get(key)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
reader := io.NewBinReaderFromBuf(entityBytes)
|
|
|
|
entity.DecodeBinary(reader)
|
|
|
|
return reader.Err
|
|
|
|
}
|
|
|
|
|
2020-03-17 09:06:46 +00:00
|
|
|
// putWithBuffer performs put operation using buf as a pre-allocated buffer for serialization.
|
2020-04-07 09:41:12 +00:00
|
|
|
func (dao *Simple) putWithBuffer(entity io.Serializable, key []byte, buf *io.BufBinWriter) error {
|
2019-11-25 17:39:11 +00:00
|
|
|
entity.EncodeBinary(buf.BinWriter)
|
|
|
|
if buf.Err != nil {
|
|
|
|
return buf.Err
|
|
|
|
}
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(key, buf.Bytes())
|
|
|
|
return nil
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) makeContractIDKey(id int32) []byte {
|
|
|
|
key := dao.getKeyBuf(5)
|
2020-11-18 20:10:48 +00:00
|
|
|
key[0] = byte(storage.STContractID)
|
2022-02-18 11:38:51 +00:00
|
|
|
binary.BigEndian.PutUint32(key[1:], uint32(id))
|
2020-12-13 15:26:35 +00:00
|
|
|
return key
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-12-13 15:26:35 +00:00
|
|
|
// DeleteContractID deletes contract's id to hash mapping.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) DeleteContractID(id int32) {
|
2022-02-16 20:33:53 +00:00
|
|
|
dao.Store.Delete(dao.makeContractIDKey(id))
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-12-13 15:26:35 +00:00
|
|
|
// PutContractID adds a mapping from contract's ID to its hash.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutContractID(id int32, hash util.Uint160) {
|
2022-02-16 20:33:53 +00:00
|
|
|
dao.Store.Put(dao.makeContractIDKey(id), hash.BytesBE())
|
2020-06-09 12:12:23 +00:00
|
|
|
}
|
|
|
|
|
2020-12-13 15:26:35 +00:00
|
|
|
// GetContractScriptHash retrieves contract's hash given its ID.
|
2020-07-28 13:36:47 +00:00
|
|
|
func (dao *Simple) GetContractScriptHash(id int32) (util.Uint160, error) {
|
2020-12-13 15:26:35 +00:00
|
|
|
var data = new(util.Uint160)
|
2022-02-16 20:33:53 +00:00
|
|
|
if err := dao.GetAndDecode(data, dao.makeContractIDKey(id)); err != nil {
|
2020-07-28 13:36:47 +00:00
|
|
|
return *data, err
|
|
|
|
}
|
|
|
|
return *data, nil
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:37:42 +00:00
|
|
|
// -- start NEP-17 transfer info.
|
2020-03-11 15:22:46 +00:00
|
|
|
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) makeTTIKey(acc util.Uint160) []byte {
|
|
|
|
key := dao.getKeyBuf(1 + util.Uint160Size)
|
|
|
|
key[0] = byte(storage.STTokenTransferInfo)
|
|
|
|
copy(key[1:], acc.BytesBE())
|
|
|
|
return key
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:37:42 +00:00
|
|
|
// GetTokenTransferInfo retrieves NEP-17 transfer info from the cache.
|
2021-11-16 16:18:06 +00:00
|
|
|
func (dao *Simple) GetTokenTransferInfo(acc util.Uint160) (*state.TokenTransferInfo, error) {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.makeTTIKey(acc)
|
2021-11-16 16:18:06 +00:00
|
|
|
bs := state.NewTokenTransferInfo()
|
2020-03-11 15:22:46 +00:00
|
|
|
err := dao.GetAndDecode(bs, key)
|
|
|
|
if err != nil && err != storage.ErrKeyNotFound {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return bs, nil
|
|
|
|
}
|
|
|
|
|
2021-11-18 13:37:42 +00:00
|
|
|
// PutTokenTransferInfo saves NEP-17 transfer info in the cache.
|
2021-11-16 16:18:06 +00:00
|
|
|
func (dao *Simple) PutTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo) error {
|
2022-02-16 20:33:53 +00:00
|
|
|
return dao.putTokenTransferInfo(acc, bs, dao.getDataBuf())
|
2020-03-17 09:06:46 +00:00
|
|
|
}
|
|
|
|
|
2021-11-16 16:18:06 +00:00
|
|
|
func (dao *Simple) putTokenTransferInfo(acc util.Uint160, bs *state.TokenTransferInfo, buf *io.BufBinWriter) error {
|
2022-02-16 20:33:53 +00:00
|
|
|
return dao.putWithBuffer(bs, dao.makeTTIKey(acc), buf)
|
2020-03-11 15:22:46 +00:00
|
|
|
}
|
|
|
|
|
2021-11-18 13:37:42 +00:00
|
|
|
// -- end NEP-17 transfer info.
|
2020-03-11 15:22:46 +00:00
|
|
|
|
2020-03-05 14:11:58 +00:00
|
|
|
// -- start transfer log.
|
|
|
|
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) getTokenTransferLogKey(acc util.Uint160, newestTimestamp uint64, index uint32, isNEP11 bool) []byte {
|
|
|
|
key := dao.getKeyBuf(1 + util.Uint160Size + 8 + 4)
|
2021-11-16 20:09:04 +00:00
|
|
|
if isNEP11 {
|
|
|
|
key[0] = byte(storage.STNEP11Transfers)
|
|
|
|
} else {
|
|
|
|
key[0] = byte(storage.STNEP17Transfers)
|
|
|
|
}
|
2020-03-12 09:43:21 +00:00
|
|
|
copy(key[1:], acc.BytesBE())
|
2022-01-18 15:28:24 +00:00
|
|
|
binary.BigEndian.PutUint64(key[1+util.Uint160Size:], newestTimestamp)
|
|
|
|
binary.BigEndian.PutUint32(key[1+util.Uint160Size+8:], index)
|
2020-03-12 09:43:21 +00:00
|
|
|
return key
|
|
|
|
}
|
|
|
|
|
2022-01-18 15:28:24 +00:00
|
|
|
// SeekNEP17TransferLog executes f for each NEP-17 transfer in log starting from
|
|
|
|
// the transfer with the newest timestamp up to the oldest transfer. It continues
|
|
|
|
// iteration until false is returned from f. The last non-nil error is returned.
|
|
|
|
func (dao *Simple) SeekNEP17TransferLog(acc util.Uint160, newestTimestamp uint64, f func(*state.NEP17Transfer) (bool, error)) error {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.getTokenTransferLogKey(acc, newestTimestamp, 0, false)
|
2022-01-18 15:28:24 +00:00
|
|
|
prefixLen := 1 + util.Uint160Size
|
|
|
|
var seekErr error
|
|
|
|
dao.Store.Seek(storage.SeekRange{
|
|
|
|
Prefix: key[:prefixLen],
|
|
|
|
Start: key[prefixLen : prefixLen+8],
|
|
|
|
Backwards: true,
|
|
|
|
}, func(k, v []byte) bool {
|
|
|
|
lg := &state.TokenTransferLog{Raw: v}
|
|
|
|
cont, err := lg.ForEachNEP17(f)
|
|
|
|
if err != nil {
|
|
|
|
seekErr = err
|
|
|
|
}
|
|
|
|
return cont
|
|
|
|
})
|
|
|
|
return seekErr
|
|
|
|
}
|
|
|
|
|
|
|
|
// SeekNEP11TransferLog executes f for each NEP-11 transfer in log starting from
|
|
|
|
// the transfer with the newest timestamp up to the oldest transfer. It continues
|
|
|
|
// iteration until false is returned from f. The last non-nil error is returned.
|
|
|
|
func (dao *Simple) SeekNEP11TransferLog(acc util.Uint160, newestTimestamp uint64, f func(*state.NEP11Transfer) (bool, error)) error {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.getTokenTransferLogKey(acc, newestTimestamp, 0, true)
|
2022-01-18 15:28:24 +00:00
|
|
|
prefixLen := 1 + util.Uint160Size
|
|
|
|
var seekErr error
|
|
|
|
dao.Store.Seek(storage.SeekRange{
|
|
|
|
Prefix: key[:prefixLen],
|
|
|
|
Start: key[prefixLen : prefixLen+8],
|
|
|
|
Backwards: true,
|
|
|
|
}, func(k, v []byte) bool {
|
|
|
|
lg := &state.TokenTransferLog{Raw: v}
|
|
|
|
cont, err := lg.ForEachNEP11(f)
|
|
|
|
if err != nil {
|
|
|
|
seekErr = err
|
|
|
|
}
|
|
|
|
return cont
|
|
|
|
})
|
|
|
|
return seekErr
|
|
|
|
}
|
|
|
|
|
2021-11-16 16:18:06 +00:00
|
|
|
// GetTokenTransferLog retrieves transfer log from the cache.
|
2022-01-18 15:28:24 +00:00
|
|
|
func (dao *Simple) GetTokenTransferLog(acc util.Uint160, newestTimestamp uint64, index uint32, isNEP11 bool) (*state.TokenTransferLog, error) {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.getTokenTransferLogKey(acc, newestTimestamp, index, isNEP11)
|
2020-04-07 09:41:12 +00:00
|
|
|
value, err := dao.Store.Get(key)
|
2020-03-05 14:11:58 +00:00
|
|
|
if err != nil {
|
2020-03-05 12:16:03 +00:00
|
|
|
if err == storage.ErrKeyNotFound {
|
2021-11-16 16:18:06 +00:00
|
|
|
return new(state.TokenTransferLog), nil
|
2020-03-05 12:16:03 +00:00
|
|
|
}
|
2020-03-05 14:11:58 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2021-11-16 16:18:06 +00:00
|
|
|
return &state.TokenTransferLog{Raw: value}, nil
|
2020-03-05 14:11:58 +00:00
|
|
|
}
|
|
|
|
|
2021-11-16 16:18:06 +00:00
|
|
|
// PutTokenTransferLog saves given transfer log in the cache.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutTokenTransferLog(acc util.Uint160, start uint64, index uint32, isNEP11 bool, lg *state.TokenTransferLog) {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.getTokenTransferLogKey(acc, start, index, isNEP11)
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(key, lg.Raw)
|
2020-03-05 14:11:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// -- end transfer log.
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// -- start notification event.
|
|
|
|
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) makeExecutableKey(hash util.Uint256) []byte {
|
|
|
|
key := dao.getKeyBuf(1 + util.Uint256Size)
|
|
|
|
key[0] = byte(storage.DataExecutable)
|
|
|
|
copy(key[1:], hash.BytesBE())
|
|
|
|
return key
|
|
|
|
}
|
|
|
|
|
2020-11-11 15:43:28 +00:00
|
|
|
// GetAppExecResults gets application execution results with the specified trigger from the
|
2019-11-25 17:39:11 +00:00
|
|
|
// given store.
|
2020-11-11 15:43:28 +00:00
|
|
|
func (dao *Simple) GetAppExecResults(hash util.Uint256, trig trigger.Type) ([]state.AppExecResult, error) {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.makeExecutableKey(hash)
|
2021-12-07 20:05:28 +00:00
|
|
|
bs, err := dao.Store.Get(key)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2021-12-07 20:05:28 +00:00
|
|
|
r := io.NewBinReaderFromBuf(bs)
|
|
|
|
switch r.ReadB() {
|
|
|
|
case storage.ExecBlock:
|
|
|
|
_, err = block.NewTrimmedFromReader(dao.Version.StateRootInHeader, r)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
case storage.ExecTransaction:
|
|
|
|
_ = r.ReadU32LE()
|
|
|
|
tx := &transaction.Transaction{}
|
|
|
|
tx.DecodeBinary(r)
|
|
|
|
}
|
|
|
|
if r.Err != nil {
|
|
|
|
return nil, r.Err
|
|
|
|
}
|
2020-11-11 15:43:28 +00:00
|
|
|
result := make([]state.AppExecResult, 0, 2)
|
|
|
|
for {
|
|
|
|
aer := new(state.AppExecResult)
|
|
|
|
aer.DecodeBinary(r)
|
|
|
|
if r.Err != nil {
|
|
|
|
if r.Err == iocore.EOF {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
return nil, r.Err
|
|
|
|
}
|
|
|
|
if aer.Trigger&trig != 0 {
|
|
|
|
result = append(result, *aer)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// -- end notification event.
|
|
|
|
|
|
|
|
// -- start storage item.
|
|
|
|
|
2020-04-07 09:41:12 +00:00
|
|
|
// GetStorageItem returns StorageItem if it exists in the given store.
|
2021-03-05 14:06:54 +00:00
|
|
|
func (dao *Simple) GetStorageItem(id int32, key []byte) state.StorageItem {
|
2022-02-16 20:33:53 +00:00
|
|
|
b, err := dao.Store.Get(dao.makeStorageItemKey(id, key))
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
2021-03-09 09:09:44 +00:00
|
|
|
return b
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:50:30 +00:00
|
|
|
// PutStorageItem puts given StorageItem for given id with given
|
2020-04-07 09:41:12 +00:00
|
|
|
// key into the given store.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutStorageItem(id int32, key []byte, si state.StorageItem) {
|
2022-02-16 20:33:53 +00:00
|
|
|
stKey := dao.makeStorageItemKey(id, key)
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(stKey, si)
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:50:30 +00:00
|
|
|
// DeleteStorageItem drops storage item for the given id with the
|
2020-04-07 09:41:12 +00:00
|
|
|
// given key from the store.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) DeleteStorageItem(id int32, key []byte) {
|
2022-02-16 20:33:53 +00:00
|
|
|
stKey := dao.makeStorageItemKey(id, key)
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Delete(stKey)
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:50:30 +00:00
|
|
|
// GetStorageItems returns all storage items for a given id.
|
2021-09-24 14:22:45 +00:00
|
|
|
func (dao *Simple) GetStorageItems(id int32) ([]state.StorageItemWithKey, error) {
|
2020-06-18 10:50:30 +00:00
|
|
|
return dao.GetStorageItemsWithPrefix(id, nil)
|
2020-04-25 21:23:30 +00:00
|
|
|
}
|
|
|
|
|
2020-06-18 10:50:30 +00:00
|
|
|
// GetStorageItemsWithPrefix returns all storage items with given id for a
|
2020-04-25 21:23:30 +00:00
|
|
|
// given scripthash.
|
2021-09-24 14:22:45 +00:00
|
|
|
func (dao *Simple) GetStorageItemsWithPrefix(id int32, prefix []byte) ([]state.StorageItemWithKey, error) {
|
|
|
|
var siArr []state.StorageItemWithKey
|
2019-11-25 17:39:11 +00:00
|
|
|
|
2022-01-17 17:41:51 +00:00
|
|
|
saveToArr := func(k, v []byte) bool {
|
2019-11-25 17:39:11 +00:00
|
|
|
// Cut prefix and hash.
|
2021-09-24 15:44:42 +00:00
|
|
|
// #1468, but don't need to copy here, because it is done by Store.
|
2021-09-24 14:22:45 +00:00
|
|
|
siArr = append(siArr, state.StorageItemWithKey{
|
2021-09-24 15:44:42 +00:00
|
|
|
Key: k,
|
|
|
|
Item: state.StorageItem(v),
|
2021-09-24 14:22:45 +00:00
|
|
|
})
|
2022-01-17 17:41:51 +00:00
|
|
|
return true
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
2021-12-16 13:55:50 +00:00
|
|
|
dao.Seek(id, storage.SeekRange{Prefix: prefix}, saveToArr)
|
2021-09-24 14:22:45 +00:00
|
|
|
return siArr, nil
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2021-12-16 13:55:50 +00:00
|
|
|
// Seek executes f for all storage items matching a given `rng` (matching given prefix and
|
|
|
|
// starting from the point specified). If key or value is to be used outside of f, they
|
2022-01-17 17:41:51 +00:00
|
|
|
// may not be copied. Seek continues iterating until false is returned from f.
|
|
|
|
func (dao *Simple) Seek(id int32, rng storage.SeekRange, f func(k, v []byte) bool) {
|
2022-02-16 20:33:53 +00:00
|
|
|
rng.Prefix = slice.Copy(dao.makeStorageItemKey(id, rng.Prefix)) // f() can use dao too.
|
2022-01-17 17:41:51 +00:00
|
|
|
dao.Store.Seek(rng, func(k, v []byte) bool {
|
|
|
|
return f(k[len(rng.Prefix):], v)
|
2021-10-19 15:03:47 +00:00
|
|
|
})
|
2021-10-04 14:01:42 +00:00
|
|
|
}
|
|
|
|
|
2021-12-16 13:55:50 +00:00
|
|
|
// SeekAsync sends all storage items matching a given `rng` (matching given prefix and
|
|
|
|
// starting from the point specified) to a channel and returns the channel.
|
|
|
|
// Resulting keys and values may not be copied.
|
|
|
|
func (dao *Simple) SeekAsync(ctx context.Context, id int32, rng storage.SeekRange) chan storage.KeyValue {
|
2022-02-16 20:33:53 +00:00
|
|
|
rng.Prefix = slice.Copy(dao.makeStorageItemKey(id, rng.Prefix))
|
2021-12-16 13:55:50 +00:00
|
|
|
return dao.Store.SeekAsync(ctx, rng, true)
|
2020-11-03 15:08:58 +00:00
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// makeStorageItemKey returns a key used to store StorageItem in the DB.
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) makeStorageItemKey(id int32, key []byte) []byte {
|
2020-06-18 10:50:30 +00:00
|
|
|
// 1 for prefix + 4 for Uint32 + len(key) for key
|
2022-02-16 20:33:53 +00:00
|
|
|
buf := dao.getKeyBuf(5 + len(key))
|
|
|
|
buf[0] = byte(dao.Version.StoragePrefix)
|
2020-06-18 10:50:30 +00:00
|
|
|
binary.LittleEndian.PutUint32(buf[1:], uint32(id))
|
|
|
|
copy(buf[5:], key)
|
|
|
|
return buf
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// -- end storage item.
|
|
|
|
|
|
|
|
// -- other.
|
|
|
|
|
|
|
|
// GetBlock returns Block by the given hash if it exists in the store.
|
2020-06-04 19:59:34 +00:00
|
|
|
func (dao *Simple) GetBlock(hash util.Uint256) (*block.Block, error) {
|
2022-02-18 08:41:27 +00:00
|
|
|
return dao.getBlock(dao.makeExecutableKey(hash))
|
|
|
|
}
|
|
|
|
|
|
|
|
func (dao *Simple) getBlock(key []byte) (*block.Block, error) {
|
2020-04-07 09:41:12 +00:00
|
|
|
b, err := dao.Store.Get(key)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
2020-06-04 19:59:34 +00:00
|
|
|
return nil, err
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
2020-02-27 13:31:28 +00:00
|
|
|
|
2021-12-07 20:05:28 +00:00
|
|
|
r := io.NewBinReaderFromBuf(b)
|
|
|
|
if r.ReadB() != storage.ExecBlock {
|
|
|
|
return nil, errors.New("internal DB inconsistency")
|
|
|
|
}
|
|
|
|
block, err := block.NewTrimmedFromReader(dao.Version.StateRootInHeader, r)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
2020-06-04 19:59:34 +00:00
|
|
|
return nil, err
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
2020-06-04 19:59:34 +00:00
|
|
|
return block, nil
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2021-10-20 14:19:16 +00:00
|
|
|
// Version represents current dao version.
|
|
|
|
type Version struct {
|
2021-11-03 09:55:33 +00:00
|
|
|
StoragePrefix storage.KeyPrefix
|
|
|
|
StateRootInHeader bool
|
|
|
|
P2PSigExtensions bool
|
|
|
|
P2PStateExchangeExtensions bool
|
|
|
|
KeepOnlyLatestState bool
|
|
|
|
Value string
|
2021-10-20 14:19:16 +00:00
|
|
|
}
|
|
|
|
|
2021-10-22 07:58:53 +00:00
|
|
|
const (
|
|
|
|
stateRootInHeaderBit = 1 << iota
|
|
|
|
p2pSigExtensionsBit
|
2021-11-03 09:55:33 +00:00
|
|
|
p2pStateExchangeExtensionsBit
|
2021-10-22 08:09:47 +00:00
|
|
|
keepOnlyLatestStateBit
|
2021-10-22 07:58:53 +00:00
|
|
|
)
|
|
|
|
|
2021-10-20 14:19:16 +00:00
|
|
|
// FromBytes decodes v from a byte-slice.
|
|
|
|
func (v *Version) FromBytes(data []byte) error {
|
|
|
|
if len(data) == 0 {
|
|
|
|
return errors.New("missing version")
|
|
|
|
}
|
|
|
|
i := 0
|
|
|
|
for ; i < len(data) && data[i] != '\x00'; i++ {
|
|
|
|
}
|
|
|
|
|
|
|
|
if i == len(data) {
|
|
|
|
v.Value = string(data)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-10-22 07:58:53 +00:00
|
|
|
if len(data) != i+3 {
|
|
|
|
return errors.New("version is invalid")
|
|
|
|
}
|
|
|
|
|
2021-10-20 14:19:16 +00:00
|
|
|
v.Value = string(data[:i])
|
2021-10-22 07:58:53 +00:00
|
|
|
v.StoragePrefix = storage.KeyPrefix(data[i+1])
|
|
|
|
v.StateRootInHeader = data[i+2]&stateRootInHeaderBit != 0
|
|
|
|
v.P2PSigExtensions = data[i+2]&p2pSigExtensionsBit != 0
|
2021-11-03 09:55:33 +00:00
|
|
|
v.P2PStateExchangeExtensions = data[i+2]&p2pStateExchangeExtensionsBit != 0
|
2021-10-22 08:09:47 +00:00
|
|
|
v.KeepOnlyLatestState = data[i+2]&keepOnlyLatestStateBit != 0
|
2021-10-20 14:19:16 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Bytes encodes v to a byte-slice.
|
|
|
|
func (v *Version) Bytes() []byte {
|
2021-10-22 07:58:53 +00:00
|
|
|
var mask byte
|
|
|
|
if v.StateRootInHeader {
|
|
|
|
mask |= stateRootInHeaderBit
|
|
|
|
}
|
|
|
|
if v.P2PSigExtensions {
|
|
|
|
mask |= p2pSigExtensionsBit
|
|
|
|
}
|
2021-11-03 09:55:33 +00:00
|
|
|
if v.P2PStateExchangeExtensions {
|
|
|
|
mask |= p2pStateExchangeExtensionsBit
|
|
|
|
}
|
2021-10-22 08:09:47 +00:00
|
|
|
if v.KeepOnlyLatestState {
|
|
|
|
mask |= keepOnlyLatestStateBit
|
|
|
|
}
|
2021-10-22 07:58:53 +00:00
|
|
|
return append([]byte(v.Value), '\x00', byte(v.StoragePrefix), mask)
|
2021-10-20 14:19:16 +00:00
|
|
|
}
|
|
|
|
|
2022-02-18 12:19:57 +00:00
|
|
|
func (dao *Simple) mkKeyPrefix(k storage.KeyPrefix) []byte {
|
|
|
|
b := dao.getKeyBuf(1)
|
|
|
|
b[0] = byte(k)
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// GetVersion attempts to get the current version stored in the
|
2020-04-07 09:41:12 +00:00
|
|
|
// underlying store.
|
2021-10-20 14:19:16 +00:00
|
|
|
func (dao *Simple) GetVersion() (Version, error) {
|
|
|
|
var version Version
|
|
|
|
|
2022-02-18 12:19:57 +00:00
|
|
|
data, err := dao.Store.Get(dao.mkKeyPrefix(storage.SYSVersion))
|
2021-10-20 14:19:16 +00:00
|
|
|
if err == nil {
|
|
|
|
err = version.FromBytes(data)
|
|
|
|
}
|
|
|
|
return version, err
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetCurrentBlockHeight returns the current block height found in the
|
2020-04-07 09:41:12 +00:00
|
|
|
// underlying store.
|
|
|
|
func (dao *Simple) GetCurrentBlockHeight() (uint32, error) {
|
2022-02-18 12:19:57 +00:00
|
|
|
b, err := dao.Store.Get(dao.mkKeyPrefix(storage.SYSCurrentBlock))
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return binary.LittleEndian.Uint32(b[32:36]), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetCurrentHeaderHeight returns the current header height and hash from
|
2020-04-07 09:41:12 +00:00
|
|
|
// the underlying store.
|
|
|
|
func (dao *Simple) GetCurrentHeaderHeight() (i uint32, h util.Uint256, err error) {
|
2019-11-25 17:39:11 +00:00
|
|
|
var b []byte
|
2022-02-18 12:19:57 +00:00
|
|
|
b, err = dao.Store.Get(dao.mkKeyPrefix(storage.SYSCurrentHeader))
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
i = binary.LittleEndian.Uint32(b[32:36])
|
|
|
|
h, err = util.Uint256DecodeBytesLE(b[:32])
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-08-03 06:56:39 +00:00
|
|
|
// GetStateSyncPoint returns current state synchronisation point P.
|
|
|
|
func (dao *Simple) GetStateSyncPoint() (uint32, error) {
|
2022-02-18 12:19:57 +00:00
|
|
|
b, err := dao.Store.Get(dao.mkKeyPrefix(storage.SYSStateSyncPoint))
|
2021-08-03 06:56:39 +00:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return binary.LittleEndian.Uint32(b), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetStateSyncCurrentBlockHeight returns current block height stored during state
|
|
|
|
// synchronisation process.
|
|
|
|
func (dao *Simple) GetStateSyncCurrentBlockHeight() (uint32, error) {
|
2022-02-18 12:19:57 +00:00
|
|
|
b, err := dao.Store.Get(dao.mkKeyPrefix(storage.SYSStateSyncCurrentBlockHeight))
|
2021-08-03 06:56:39 +00:00
|
|
|
if err != nil {
|
|
|
|
return 0, err
|
|
|
|
}
|
|
|
|
return binary.LittleEndian.Uint32(b), nil
|
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// GetHeaderHashes returns a sorted list of header hashes retrieved from
|
2020-04-07 09:41:12 +00:00
|
|
|
// the given underlying store.
|
|
|
|
func (dao *Simple) GetHeaderHashes() ([]util.Uint256, error) {
|
2022-02-18 11:35:17 +00:00
|
|
|
var hashes = make([]util.Uint256, 0)
|
|
|
|
|
2022-02-25 10:44:14 +00:00
|
|
|
var seekErr error
|
2021-12-16 13:55:50 +00:00
|
|
|
dao.Store.Seek(storage.SeekRange{
|
2022-02-18 12:19:57 +00:00
|
|
|
Prefix: dao.mkKeyPrefix(storage.IXHeaderHashList),
|
2022-01-17 17:41:51 +00:00
|
|
|
}, func(k, v []byte) bool {
|
2022-02-18 11:35:17 +00:00
|
|
|
newHashes, err := read2000Uint256Hashes(v)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
2022-02-25 10:44:14 +00:00
|
|
|
seekErr = fmt.Errorf("failed to read batch of 2000 header hashes: %w", err)
|
|
|
|
return false
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
2022-02-18 11:35:17 +00:00
|
|
|
hashes = append(hashes, newHashes...)
|
2022-01-17 17:41:51 +00:00
|
|
|
return true
|
2019-11-25 17:39:11 +00:00
|
|
|
})
|
|
|
|
|
2022-02-25 10:44:14 +00:00
|
|
|
return hashes, seekErr
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetTransaction returns Transaction and its height by the given hash
|
2020-10-15 11:45:29 +00:00
|
|
|
// if it exists in the store. It does not return dummy transactions.
|
2020-04-07 09:41:12 +00:00
|
|
|
func (dao *Simple) GetTransaction(hash util.Uint256) (*transaction.Transaction, uint32, error) {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.makeExecutableKey(hash)
|
2020-04-07 09:41:12 +00:00
|
|
|
b, err := dao.Store.Get(key)
|
2019-11-25 17:39:11 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, 0, err
|
|
|
|
}
|
2021-12-07 20:05:28 +00:00
|
|
|
if len(b) < 6 {
|
2020-10-15 11:45:29 +00:00
|
|
|
return nil, 0, errors.New("bad transaction bytes")
|
|
|
|
}
|
2021-12-07 20:05:28 +00:00
|
|
|
if b[0] != storage.ExecTransaction {
|
|
|
|
return nil, 0, errors.New("internal DB inconsistency")
|
|
|
|
}
|
|
|
|
if b[5] == transaction.DummyVersion {
|
2020-10-15 11:45:29 +00:00
|
|
|
return nil, 0, storage.ErrKeyNotFound
|
|
|
|
}
|
2019-11-25 17:39:11 +00:00
|
|
|
r := io.NewBinReaderFromBuf(b)
|
2021-12-07 20:05:28 +00:00
|
|
|
_ = r.ReadB()
|
2019-11-25 17:39:11 +00:00
|
|
|
|
2019-12-12 15:52:23 +00:00
|
|
|
var height = r.ReadU32LE()
|
2019-11-25 17:39:11 +00:00
|
|
|
|
2021-03-25 16:18:01 +00:00
|
|
|
tx := &transaction.Transaction{}
|
2019-11-25 17:39:11 +00:00
|
|
|
tx.DecodeBinary(r)
|
|
|
|
if r.Err != nil {
|
|
|
|
return nil, 0, r.Err
|
|
|
|
}
|
|
|
|
|
|
|
|
return tx, height, nil
|
|
|
|
}
|
|
|
|
|
2020-04-07 09:41:12 +00:00
|
|
|
// PutVersion stores the given version in the underlying store.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutVersion(v Version) {
|
2021-10-22 07:58:53 +00:00
|
|
|
dao.Version = v
|
2022-02-18 12:19:57 +00:00
|
|
|
dao.Store.Put(dao.mkKeyPrefix(storage.SYSVersion), v.Bytes())
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// PutCurrentHeader stores current header.
|
2022-02-18 12:04:57 +00:00
|
|
|
func (dao *Simple) PutCurrentHeader(h util.Uint256, index uint32) {
|
|
|
|
buf := dao.getDataBuf()
|
|
|
|
buf.WriteBytes(h.BytesLE())
|
|
|
|
buf.WriteU32LE(index)
|
2022-02-18 12:19:57 +00:00
|
|
|
dao.Store.Put(dao.mkKeyPrefix(storage.SYSCurrentHeader), buf.Bytes())
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2021-08-03 06:56:39 +00:00
|
|
|
// PutStateSyncPoint stores current state synchronisation point P.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutStateSyncPoint(p uint32) {
|
2022-02-18 12:19:57 +00:00
|
|
|
buf := dao.getDataBuf()
|
|
|
|
buf.WriteU32LE(p)
|
|
|
|
dao.Store.Put(dao.mkKeyPrefix(storage.SYSStateSyncPoint), buf.Bytes())
|
2021-08-03 06:56:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// PutStateSyncCurrentBlockHeight stores current block height during state synchronisation process.
|
2022-02-16 14:48:15 +00:00
|
|
|
func (dao *Simple) PutStateSyncCurrentBlockHeight(h uint32) {
|
2022-02-18 12:19:57 +00:00
|
|
|
buf := dao.getDataBuf()
|
|
|
|
buf.WriteU32LE(h)
|
|
|
|
dao.Store.Put(dao.mkKeyPrefix(storage.SYSStateSyncCurrentBlockHeight), buf.Bytes())
|
2021-08-03 06:56:39 +00:00
|
|
|
}
|
|
|
|
|
2019-11-25 17:39:11 +00:00
|
|
|
// read2000Uint256Hashes attempts to read 2000 Uint256 hashes from
|
|
|
|
// the given byte array.
|
|
|
|
func read2000Uint256Hashes(b []byte) ([]util.Uint256, error) {
|
|
|
|
r := bytes.NewReader(b)
|
|
|
|
br := io.NewBinReaderFromIO(r)
|
2019-12-12 15:52:23 +00:00
|
|
|
hashes := make([]util.Uint256, 0)
|
|
|
|
br.ReadArray(&hashes)
|
2019-11-25 17:39:11 +00:00
|
|
|
if br.Err != nil {
|
|
|
|
return nil, br.Err
|
|
|
|
}
|
|
|
|
return hashes, nil
|
|
|
|
}
|
|
|
|
|
2022-02-18 11:54:05 +00:00
|
|
|
func (dao *Simple) mkHeaderHashKey(h uint32) []byte {
|
|
|
|
b := dao.getKeyBuf(1 + 4)
|
|
|
|
b[0] = byte(storage.IXHeaderHashList)
|
|
|
|
binary.BigEndian.PutUint32(b[1:], h)
|
|
|
|
return b
|
|
|
|
}
|
|
|
|
|
|
|
|
// StoreHeaderHashes pushes a batch of header hashes into the store.
|
|
|
|
func (dao *Simple) StoreHeaderHashes(hashes []util.Uint256, height uint32) error {
|
|
|
|
key := dao.mkHeaderHashKey(height)
|
|
|
|
buf := dao.getDataBuf()
|
|
|
|
buf.WriteArray(hashes)
|
|
|
|
if buf.Err != nil {
|
|
|
|
return buf.Err
|
|
|
|
}
|
|
|
|
dao.Store.Put(key, buf.Bytes())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-10-15 11:45:29 +00:00
|
|
|
// HasTransaction returns nil if the given store does not contain the given
|
|
|
|
// Transaction hash. It returns an error in case if transaction is in chain
|
|
|
|
// or in the list of conflicting transactions.
|
|
|
|
func (dao *Simple) HasTransaction(hash util.Uint256) error {
|
2022-02-16 20:33:53 +00:00
|
|
|
key := dao.makeExecutableKey(hash)
|
2020-10-15 11:45:29 +00:00
|
|
|
bytes, err := dao.Store.Get(key)
|
|
|
|
if err != nil {
|
|
|
|
return nil
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
2020-10-15 11:45:29 +00:00
|
|
|
|
2021-12-07 20:05:28 +00:00
|
|
|
if len(bytes) < 6 {
|
2020-10-15 11:45:29 +00:00
|
|
|
return nil
|
|
|
|
}
|
2021-12-07 20:05:28 +00:00
|
|
|
if bytes[5] == transaction.DummyVersion {
|
2020-10-15 11:45:29 +00:00
|
|
|
return ErrHasConflicts
|
|
|
|
}
|
|
|
|
return ErrAlreadyExists
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2020-08-31 19:11:49 +00:00
|
|
|
// StoreAsBlock stores given block as DataBlock. It can reuse given buffer for
|
|
|
|
// the purpose of value serialization.
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) StoreAsBlock(block *block.Block, aer1 *state.AppExecResult, aer2 *state.AppExecResult) error {
|
2019-11-25 17:39:11 +00:00
|
|
|
var (
|
2022-02-16 20:33:53 +00:00
|
|
|
key = dao.makeExecutableKey(block.Hash())
|
|
|
|
buf = dao.getDataBuf()
|
2019-11-25 17:39:11 +00:00
|
|
|
)
|
2021-12-07 20:05:28 +00:00
|
|
|
buf.WriteB(storage.ExecBlock)
|
2022-03-18 07:49:25 +00:00
|
|
|
block.EncodeTrimmed(buf.BinWriter)
|
2021-12-07 20:05:28 +00:00
|
|
|
if aer1 != nil {
|
|
|
|
aer1.EncodeBinary(buf.BinWriter)
|
|
|
|
}
|
|
|
|
if aer2 != nil {
|
|
|
|
aer2.EncodeBinary(buf.BinWriter)
|
|
|
|
}
|
2019-11-25 17:39:11 +00:00
|
|
|
if buf.Err != nil {
|
|
|
|
return buf.Err
|
|
|
|
}
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(key, buf.Bytes())
|
|
|
|
return nil
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2022-02-16 13:13:12 +00:00
|
|
|
// DeleteBlock removes block from dao. It's not atomic, so make sure you're
|
|
|
|
// using private MemCached instance here.
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) DeleteBlock(h util.Uint256) error {
|
|
|
|
key := dao.makeExecutableKey(h)
|
2020-11-24 09:07:58 +00:00
|
|
|
|
2022-02-18 08:41:27 +00:00
|
|
|
b, err := dao.getBlock(key)
|
2020-11-24 09:07:58 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2022-02-18 08:55:06 +00:00
|
|
|
err = dao.storeHeader(key, &b.Header)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2020-11-24 09:07:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, tx := range b.Transactions {
|
|
|
|
copy(key[1:], tx.Hash().BytesBE())
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Delete(key)
|
2021-10-22 07:58:53 +00:00
|
|
|
if dao.Version.P2PSigExtensions {
|
2021-08-17 15:40:11 +00:00
|
|
|
for _, attr := range tx.GetAttributes(transaction.ConflictsT) {
|
|
|
|
hash := attr.Value.(*transaction.Conflicts).Hash
|
|
|
|
copy(key[1:], hash.BytesBE())
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Delete(key)
|
2021-08-17 15:40:11 +00:00
|
|
|
}
|
|
|
|
}
|
2020-11-24 09:07:58 +00:00
|
|
|
}
|
|
|
|
|
2022-02-16 13:13:12 +00:00
|
|
|
return nil
|
2020-11-24 09:07:58 +00:00
|
|
|
}
|
|
|
|
|
2022-02-18 08:55:06 +00:00
|
|
|
// StoreHeader saves block header into the store.
|
|
|
|
func (dao *Simple) StoreHeader(h *block.Header) error {
|
|
|
|
return dao.storeHeader(dao.makeExecutableKey(h.Hash()), h)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (dao *Simple) storeHeader(key []byte, h *block.Header) error {
|
|
|
|
buf := dao.getDataBuf()
|
|
|
|
buf.WriteB(storage.ExecBlock)
|
|
|
|
h.EncodeBinary(buf.BinWriter)
|
|
|
|
buf.BinWriter.WriteB(0)
|
|
|
|
if buf.Err != nil {
|
|
|
|
return buf.Err
|
|
|
|
}
|
|
|
|
dao.Store.Put(key, buf.Bytes())
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2020-08-31 19:11:49 +00:00
|
|
|
// StoreAsCurrentBlock stores a hash of the given block with prefix
|
|
|
|
// SYSCurrentBlock. It can reuse given buffer for the purpose of value
|
|
|
|
// serialization.
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) StoreAsCurrentBlock(block *block.Block) {
|
|
|
|
buf := dao.getDataBuf()
|
2019-12-12 15:52:23 +00:00
|
|
|
h := block.Hash()
|
|
|
|
h.EncodeBinary(buf.BinWriter)
|
|
|
|
buf.WriteU32LE(block.Index)
|
2022-02-18 12:19:57 +00:00
|
|
|
dao.Store.Put(dao.mkKeyPrefix(storage.SYSCurrentBlock), buf.Bytes())
|
2019-11-25 17:39:11 +00:00
|
|
|
}
|
|
|
|
|
2021-08-17 15:33:09 +00:00
|
|
|
// StoreAsTransaction stores given TX as DataTransaction. It also stores transactions
|
|
|
|
// given tx has conflicts with as DataTransaction with dummy version. It can reuse given
|
2020-08-31 19:11:49 +00:00
|
|
|
// buffer for the purpose of value serialization.
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) StoreAsTransaction(tx *transaction.Transaction, index uint32, aer *state.AppExecResult) error {
|
|
|
|
key := dao.makeExecutableKey(tx.Hash())
|
|
|
|
buf := dao.getDataBuf()
|
|
|
|
|
2021-12-07 20:05:28 +00:00
|
|
|
buf.WriteB(storage.ExecTransaction)
|
2019-12-12 15:52:23 +00:00
|
|
|
buf.WriteU32LE(index)
|
2021-08-17 15:33:09 +00:00
|
|
|
tx.EncodeBinary(buf.BinWriter)
|
2021-12-07 20:05:28 +00:00
|
|
|
if aer != nil {
|
|
|
|
aer.EncodeBinary(buf.BinWriter)
|
|
|
|
}
|
2019-11-25 17:39:11 +00:00
|
|
|
if buf.Err != nil {
|
|
|
|
return buf.Err
|
|
|
|
}
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(key, buf.Bytes())
|
2021-10-22 07:58:53 +00:00
|
|
|
if dao.Version.P2PSigExtensions {
|
2021-08-17 15:33:09 +00:00
|
|
|
var value []byte
|
|
|
|
for _, attr := range tx.GetAttributes(transaction.ConflictsT) {
|
|
|
|
hash := attr.Value.(*transaction.Conflicts).Hash
|
|
|
|
copy(key[1:], hash.BytesBE())
|
|
|
|
if value == nil {
|
|
|
|
buf.Reset()
|
2021-12-07 20:05:28 +00:00
|
|
|
buf.WriteB(storage.ExecTransaction)
|
2021-08-17 15:33:09 +00:00
|
|
|
buf.WriteU32LE(index)
|
|
|
|
buf.BinWriter.WriteB(transaction.DummyVersion)
|
|
|
|
value = buf.Bytes()
|
|
|
|
}
|
2022-02-16 14:48:15 +00:00
|
|
|
dao.Store.Put(key, value)
|
2021-08-02 11:20:33 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2022-02-16 20:33:53 +00:00
|
|
|
func (dao *Simple) getKeyBuf(len int) []byte {
|
2022-02-18 11:24:45 +00:00
|
|
|
if dao.private {
|
|
|
|
if dao.keyBuf == nil {
|
|
|
|
dao.keyBuf = make([]byte, 0, 1+4+storage.MaxStorageKeyLen) // Prefix, uint32, key.
|
|
|
|
}
|
2022-02-16 20:33:53 +00:00
|
|
|
return dao.keyBuf[:len] // Should have enough capacity.
|
|
|
|
}
|
|
|
|
return make([]byte, len)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (dao *Simple) getDataBuf() *io.BufBinWriter {
|
2022-02-18 11:24:45 +00:00
|
|
|
if dao.private {
|
|
|
|
if dao.dataBuf == nil {
|
|
|
|
dao.dataBuf = io.NewBufBinWriter()
|
|
|
|
}
|
2022-02-16 20:33:53 +00:00
|
|
|
dao.dataBuf.Reset()
|
|
|
|
return dao.dataBuf
|
|
|
|
}
|
|
|
|
return io.NewBufBinWriter()
|
|
|
|
}
|
|
|
|
|
2019-12-12 18:17:13 +00:00
|
|
|
// Persist flushes all the changes made into the (supposedly) persistent
|
2021-11-22 07:41:40 +00:00
|
|
|
// underlying store. It doesn't block accesses to DAO from other threads.
|
2020-04-07 09:41:12 +00:00
|
|
|
func (dao *Simple) Persist() (int, error) {
|
|
|
|
return dao.Store.Persist()
|
2019-12-12 18:17:13 +00:00
|
|
|
}
|
2020-12-24 16:32:27 +00:00
|
|
|
|
2021-11-22 07:41:40 +00:00
|
|
|
// PersistSync flushes all the changes made into the (supposedly) persistent
|
|
|
|
// underlying store. It's a synchronous version of Persist that doesn't allow
|
|
|
|
// other threads to work with DAO while flushing the Store.
|
|
|
|
func (dao *Simple) PersistSync() (int, error) {
|
|
|
|
return dao.Store.PersistSync()
|
|
|
|
}
|