dao: move header hash store logic out of the core
Which allows for more efficient buffer use along the way.
This commit is contained in:
parent
de2579ec07
commit
d2db58d748
2 changed files with 23 additions and 7 deletions
|
@ -949,15 +949,12 @@ func (bc *Blockchain) addHeaders(verify bool, headers ...*block.Header) error {
|
|||
}
|
||||
|
||||
if oldlen != len(bc.headerHashes) {
|
||||
buf := io.NewBufBinWriter()
|
||||
for int(lastHeader.Index)-headerBatchCount >= int(bc.storedHeaderCount) {
|
||||
buf.WriteArray(bc.headerHashes[bc.storedHeaderCount : bc.storedHeaderCount+headerBatchCount])
|
||||
if buf.Err != nil {
|
||||
return buf.Err
|
||||
err = batch.StoreHeaderHashes(bc.headerHashes[bc.storedHeaderCount:bc.storedHeaderCount+headerBatchCount],
|
||||
bc.storedHeaderCount)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
key := storage.AppendPrefixInt(storage.IXHeaderHashList, int(bc.storedHeaderCount))
|
||||
batch.Store.Put(key, buf.Bytes())
|
||||
bc.storedHeaderCount += headerBatchCount
|
||||
}
|
||||
|
||||
|
|
|
@ -590,6 +590,25 @@ func read2000Uint256Hashes(b []byte) ([]util.Uint256, error) {
|
|||
return hashes, nil
|
||||
}
|
||||
|
||||
func (dao *Simple) mkHeaderHashKey(h uint32) []byte {
|
||||
b := dao.getKeyBuf(1 + 4)
|
||||
b[0] = byte(storage.IXHeaderHashList)
|
||||
binary.BigEndian.PutUint32(b[1:], h)
|
||||
return b
|
||||
}
|
||||
|
||||
// StoreHeaderHashes pushes a batch of header hashes into the store.
|
||||
func (dao *Simple) StoreHeaderHashes(hashes []util.Uint256, height uint32) error {
|
||||
key := dao.mkHeaderHashKey(height)
|
||||
buf := dao.getDataBuf()
|
||||
buf.WriteArray(hashes)
|
||||
if buf.Err != nil {
|
||||
return buf.Err
|
||||
}
|
||||
dao.Store.Put(key, buf.Bytes())
|
||||
return nil
|
||||
}
|
||||
|
||||
// HasTransaction returns nil if the given store does not contain the given
|
||||
// Transaction hash. It returns an error in case if transaction is in chain
|
||||
// or in the list of conflicting transactions.
|
||||
|
|
Loading…
Reference in a new issue