forked from TrueCloudLab/neoneo-go
Merge pull request #3004 from nspcc-dev/require-error-is
*: use require.ErrorIs instead of require.True(t, error.Is())
This commit is contained in:
commit
b47a891b9e
27 changed files with 109 additions and 133 deletions
|
@ -1,7 +1,6 @@
|
||||||
package consensus
|
package consensus
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -343,7 +342,7 @@ func TestService_PrepareRequest(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
require.True(t, errors.Is(err, expectedErr), "got: %v", err)
|
require.ErrorIs(t, err, expectedErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
checkRequest(t, errInvalidVersion, &prepareRequest{version: 0xFF, prevHash: prevHash})
|
checkRequest(t, errInvalidVersion, &prepareRequest{version: 0xFF, prevHash: prevHash})
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -236,7 +235,7 @@ func TestBlockEncodeDecode(t *testing.T) {
|
||||||
data, err := testserdes.EncodeBinary(b)
|
data, err := testserdes.EncodeBinary(b)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
require.True(t, errors.Is(testserdes.DecodeBinary(data, new(Block)), ErrMaxContentsPerBlock))
|
require.ErrorIs(t, testserdes.DecodeBinary(data, new(Block)), ErrMaxContentsPerBlock)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
@ -33,16 +32,16 @@ func TestVerifyHeader(t *testing.T) {
|
||||||
h := prev.Hash()
|
h := prev.Hash()
|
||||||
h[0] = ^h[0]
|
h[0] = ^h[0]
|
||||||
hdr := newBlock(bc.config.ProtocolConfiguration, 1, h).Header
|
hdr := newBlock(bc.config.ProtocolConfiguration, 1, h).Header
|
||||||
require.True(t, errors.Is(bc.verifyHeader(&hdr, &prev), ErrHdrHashMismatch))
|
require.ErrorIs(t, bc.verifyHeader(&hdr, &prev), ErrHdrHashMismatch)
|
||||||
})
|
})
|
||||||
t.Run("Index", func(t *testing.T) {
|
t.Run("Index", func(t *testing.T) {
|
||||||
hdr := newBlock(bc.config.ProtocolConfiguration, 3, prev.Hash()).Header
|
hdr := newBlock(bc.config.ProtocolConfiguration, 3, prev.Hash()).Header
|
||||||
require.True(t, errors.Is(bc.verifyHeader(&hdr, &prev), ErrHdrIndexMismatch))
|
require.ErrorIs(t, bc.verifyHeader(&hdr, &prev), ErrHdrIndexMismatch)
|
||||||
})
|
})
|
||||||
t.Run("Timestamp", func(t *testing.T) {
|
t.Run("Timestamp", func(t *testing.T) {
|
||||||
hdr := newBlock(bc.config.ProtocolConfiguration, 1, prev.Hash()).Header
|
hdr := newBlock(bc.config.ProtocolConfiguration, 1, prev.Hash()).Header
|
||||||
hdr.Timestamp = 0
|
hdr.Timestamp = 0
|
||||||
require.True(t, errors.Is(bc.verifyHeader(&hdr, &prev), ErrHdrInvalidTimestamp))
|
require.ErrorIs(t, bc.verifyHeader(&hdr, &prev), ErrHdrInvalidTimestamp)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("Valid", func(t *testing.T) {
|
t.Run("Valid", func(t *testing.T) {
|
||||||
|
|
|
@ -427,7 +427,7 @@ func TestBlockchain_AddBlockStateRoot(t *testing.T) {
|
||||||
b.PrevStateRoot = util.Uint256{}
|
b.PrevStateRoot = util.Uint256{}
|
||||||
e.SignBlock(b)
|
e.SignBlock(b)
|
||||||
err = bc.AddBlock(b)
|
err = bc.AddBlock(b)
|
||||||
require.True(t, errors.Is(err, core.ErrHdrStateRootSetting), "got: %v", err)
|
require.ErrorIs(t, err, core.ErrHdrStateRootSetting)
|
||||||
|
|
||||||
u := sr.Root
|
u := sr.Root
|
||||||
u[0] ^= 0xFF
|
u[0] ^= 0xFF
|
||||||
|
@ -435,7 +435,7 @@ func TestBlockchain_AddBlockStateRoot(t *testing.T) {
|
||||||
b.PrevStateRoot = u
|
b.PrevStateRoot = u
|
||||||
e.SignBlock(b)
|
e.SignBlock(b)
|
||||||
err = bc.AddBlock(b)
|
err = bc.AddBlock(b)
|
||||||
require.True(t, errors.Is(err, core.ErrHdrInvalidStateRoot), "got: %v", err)
|
require.ErrorIs(t, err, core.ErrHdrInvalidStateRoot)
|
||||||
|
|
||||||
b = e.NewUnsignedBlock(t)
|
b = e.NewUnsignedBlock(t)
|
||||||
e.SignBlock(b)
|
e.SignBlock(b)
|
||||||
|
@ -455,7 +455,7 @@ func TestBlockchain_AddHeadersStateRoot(t *testing.T) {
|
||||||
|
|
||||||
// invalid stateroot
|
// invalid stateroot
|
||||||
h1.PrevStateRoot[0] ^= 0xFF
|
h1.PrevStateRoot[0] ^= 0xFF
|
||||||
require.True(t, errors.Is(bc.AddHeaders(&h1), core.ErrHdrInvalidStateRoot))
|
require.ErrorIs(t, bc.AddHeaders(&h1), core.ErrHdrInvalidStateRoot)
|
||||||
|
|
||||||
// valid stateroot
|
// valid stateroot
|
||||||
h1.PrevStateRoot = r
|
h1.PrevStateRoot = r
|
||||||
|
@ -597,12 +597,12 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
newH[0] = ^newH[0]
|
newH[0] = ^newH[0]
|
||||||
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
||||||
_, err := bc.VerifyWitness(newH, nil, w, gas)
|
_, err := bc.VerifyWitness(newH, nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrUnknownVerificationContract))
|
require.ErrorIs(t, err, core.ErrUnknownVerificationContract)
|
||||||
})
|
})
|
||||||
t.Run("Invalid", func(t *testing.T) {
|
t.Run("Invalid", func(t *testing.T) {
|
||||||
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
||||||
_, err := bc.VerifyWitness(csInvalid.Hash, nil, w, gas)
|
_, err := bc.VerifyWitness(csInvalid.Hash, nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrInvalidVerificationContract))
|
require.ErrorIs(t, err, core.ErrInvalidVerificationContract)
|
||||||
})
|
})
|
||||||
t.Run("ValidSignature", func(t *testing.T) {
|
t.Run("ValidSignature", func(t *testing.T) {
|
||||||
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH4)}}
|
||||||
|
@ -612,7 +612,7 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
t.Run("InvalidSignature", func(t *testing.T) {
|
t.Run("InvalidSignature", func(t *testing.T) {
|
||||||
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH3)}}
|
w := &transaction.Witness{InvocationScript: []byte{byte(opcode.PUSH3)}}
|
||||||
_, err := bc.VerifyWitness(cs.Hash, nil, w, gas)
|
_, err := bc.VerifyWitness(cs.Hash, nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrVerificationFailed))
|
require.ErrorIs(t, err, core.ErrVerificationFailed)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("NotEnoughGas", func(t *testing.T) {
|
t.Run("NotEnoughGas", func(t *testing.T) {
|
||||||
|
@ -622,7 +622,7 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
VerificationScript: verif,
|
VerificationScript: verif,
|
||||||
}
|
}
|
||||||
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, 1)
|
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, 1)
|
||||||
require.True(t, errors.Is(err, core.ErrVerificationFailed))
|
require.ErrorIs(t, err, core.ErrVerificationFailed)
|
||||||
})
|
})
|
||||||
t.Run("NoResult", func(t *testing.T) {
|
t.Run("NoResult", func(t *testing.T) {
|
||||||
verif := []byte{byte(opcode.DROP)}
|
verif := []byte{byte(opcode.DROP)}
|
||||||
|
@ -631,7 +631,7 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
VerificationScript: verif,
|
VerificationScript: verif,
|
||||||
}
|
}
|
||||||
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrVerificationFailed))
|
require.ErrorIs(t, err, core.ErrVerificationFailed)
|
||||||
})
|
})
|
||||||
t.Run("BadResult", func(t *testing.T) {
|
t.Run("BadResult", func(t *testing.T) {
|
||||||
verif := make([]byte, keys.SignatureLen+2)
|
verif := make([]byte, keys.SignatureLen+2)
|
||||||
|
@ -642,7 +642,7 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
VerificationScript: verif,
|
VerificationScript: verif,
|
||||||
}
|
}
|
||||||
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrVerificationFailed))
|
require.ErrorIs(t, err, core.ErrVerificationFailed)
|
||||||
})
|
})
|
||||||
t.Run("TooManyResults", func(t *testing.T) {
|
t.Run("TooManyResults", func(t *testing.T) {
|
||||||
verif := []byte{byte(opcode.NOP)}
|
verif := []byte{byte(opcode.NOP)}
|
||||||
|
@ -651,7 +651,7 @@ func TestBlockchain_VerifyHashAgainstScript(t *testing.T) {
|
||||||
VerificationScript: verif,
|
VerificationScript: verif,
|
||||||
}
|
}
|
||||||
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
_, err := bc.VerifyWitness(hash.Hash160(verif), nil, w, gas)
|
||||||
require.True(t, errors.Is(err, core.ErrVerificationFailed))
|
require.ErrorIs(t, err, core.ErrVerificationFailed)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1165,7 +1165,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
|
|
||||||
checkErr := func(t *testing.T, expectedErr error, tx *transaction.Transaction) {
|
checkErr := func(t *testing.T, expectedErr error, tx *transaction.Transaction) {
|
||||||
err := bc.VerifyTx(tx)
|
err := bc.VerifyTx(tx)
|
||||||
require.True(t, errors.Is(err, expectedErr), "expected: %v, got: %v", expectedErr, err)
|
require.ErrorIs(t, err, expectedErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
testScript := []byte{byte(opcode.PUSH1)}
|
testScript := []byte{byte(opcode.PUSH1)}
|
||||||
|
@ -1321,7 +1321,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
tx2.NetworkFee = balance / 2
|
tx2.NetworkFee = balance / 2
|
||||||
require.NoError(t, accs[0].SignTx(netmode.UnitTestNet, tx2))
|
require.NoError(t, accs[0].SignTx(netmode.UnitTestNet, tx2))
|
||||||
err := bc.PoolTx(tx2)
|
err := bc.PoolTx(tx2)
|
||||||
require.True(t, errors.Is(err, core.ErrMemPoolConflict))
|
require.ErrorIs(t, err, core.ErrMemPoolConflict)
|
||||||
})
|
})
|
||||||
t.Run("InvalidWitnessHash", func(t *testing.T) {
|
t.Run("InvalidWitnessHash", func(t *testing.T) {
|
||||||
tx := newTestTx(t, h, testScript)
|
tx := newTestTx(t, h, testScript)
|
||||||
|
@ -1358,7 +1358,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
require.NoError(t, bc.PoolTx(tx))
|
require.NoError(t, bc.PoolTx(tx))
|
||||||
|
|
||||||
err := bc.PoolTx(tx)
|
err := bc.PoolTx(tx)
|
||||||
require.True(t, errors.Is(err, core.ErrAlreadyExists))
|
require.ErrorIs(t, err, core.ErrAlreadyExists)
|
||||||
})
|
})
|
||||||
t.Run("MemPoolOOM", func(t *testing.T) {
|
t.Run("MemPoolOOM", func(t *testing.T) {
|
||||||
mp := mempool.New(1, 0, false, nil)
|
mp := mempool.New(1, 0, false, nil)
|
||||||
|
@ -1370,7 +1370,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
tx2 := newTestTx(t, h, testScript)
|
tx2 := newTestTx(t, h, testScript)
|
||||||
require.NoError(t, accs[0].SignTx(netmode.UnitTestNet, tx2))
|
require.NoError(t, accs[0].SignTx(netmode.UnitTestNet, tx2))
|
||||||
err := bc.PoolTx(tx2, mp)
|
err := bc.PoolTx(tx2, mp)
|
||||||
require.True(t, errors.Is(err, core.ErrOOM))
|
require.ErrorIs(t, err, core.ErrOOM)
|
||||||
})
|
})
|
||||||
t.Run("Attribute", func(t *testing.T) {
|
t.Run("Attribute", func(t *testing.T) {
|
||||||
t.Run("InvalidHighPriority", func(t *testing.T) {
|
t.Run("InvalidHighPriority", func(t *testing.T) {
|
||||||
|
@ -1476,7 +1476,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
emit.String(w.BinWriter, nativenames.Oracle)
|
emit.String(w.BinWriter, nativenames.Oracle)
|
||||||
tx.Scripts[len(tx.Scripts)-1].VerificationScript = w.Bytes()
|
tx.Scripts[len(tx.Scripts)-1].VerificationScript = w.Bytes()
|
||||||
err := bc.VerifyTx(tx)
|
err := bc.VerifyTx(tx)
|
||||||
require.True(t, errors.Is(err, core.ErrNativeContractWitness), "got: %v", err)
|
require.ErrorIs(t, err, core.ErrNativeContractWitness)
|
||||||
})
|
})
|
||||||
t.Run("Good", func(t *testing.T) {
|
t.Run("Good", func(t *testing.T) {
|
||||||
tx.Scripts[len(tx.Scripts)-1].VerificationScript = nil
|
tx.Scripts[len(tx.Scripts)-1].VerificationScript = nil
|
||||||
|
@ -1549,7 +1549,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
t.Run("Enabled", func(t *testing.T) {
|
t.Run("Enabled", func(t *testing.T) {
|
||||||
t.Run("NotYetValid", func(t *testing.T) {
|
t.Run("NotYetValid", func(t *testing.T) {
|
||||||
tx := getNVBTx(e, bc.BlockHeight()+1)
|
tx := getNVBTx(e, bc.BlockHeight()+1)
|
||||||
require.True(t, errors.Is(bc.VerifyTx(tx), core.ErrInvalidAttribute))
|
require.ErrorIs(t, bc.VerifyTx(tx), core.ErrInvalidAttribute)
|
||||||
})
|
})
|
||||||
t.Run("positive", func(t *testing.T) {
|
t.Run("positive", func(t *testing.T) {
|
||||||
tx := getNVBTx(e, bc.BlockHeight())
|
tx := getNVBTx(e, bc.BlockHeight())
|
||||||
|
@ -1655,7 +1655,7 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
conflicting.NetworkFee = 1000_0000
|
conflicting.NetworkFee = 1000_0000
|
||||||
require.NoError(t, validator.SignTx(netmode.UnitTestNet, conflicting))
|
require.NoError(t, validator.SignTx(netmode.UnitTestNet, conflicting))
|
||||||
e.AddNewBlock(t, conflicting)
|
e.AddNewBlock(t, conflicting)
|
||||||
require.True(t, errors.Is(bc.VerifyTx(tx), core.ErrHasConflicts))
|
require.ErrorIs(t, bc.VerifyTx(tx), core.ErrHasConflicts)
|
||||||
})
|
})
|
||||||
t.Run("attribute on-chain conflict", func(t *testing.T) {
|
t.Run("attribute on-chain conflict", func(t *testing.T) {
|
||||||
tx := neoValidatorsInvoker.Invoke(t, stackitem.NewBool(true), "transfer", neoOwner, neoOwner, 1, nil)
|
tx := neoValidatorsInvoker.Invoke(t, stackitem.NewBool(true), "transfer", neoOwner, neoOwner, 1, nil)
|
||||||
|
@ -1950,13 +1950,13 @@ func TestBlockchain_VerifyTx(t *testing.T) {
|
||||||
maxNVB, err := bc.GetMaxNotValidBeforeDelta()
|
maxNVB, err := bc.GetMaxNotValidBeforeDelta()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
tx := getPartiallyFilledTx(bc.BlockHeight()+maxNVB+1, bc.BlockHeight()+1)
|
tx := getPartiallyFilledTx(bc.BlockHeight()+maxNVB+1, bc.BlockHeight()+1)
|
||||||
require.True(t, errors.Is(bc.PoolTxWithData(tx, 5, mp, bc, verificationF), core.ErrInvalidAttribute))
|
require.ErrorIs(t, bc.PoolTxWithData(tx, 5, mp, bc, verificationF), core.ErrInvalidAttribute)
|
||||||
})
|
})
|
||||||
t.Run("bad ValidUntilBlock: too small", func(t *testing.T) {
|
t.Run("bad ValidUntilBlock: too small", func(t *testing.T) {
|
||||||
maxNVB, err := bc.GetMaxNotValidBeforeDelta()
|
maxNVB, err := bc.GetMaxNotValidBeforeDelta()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
tx := getPartiallyFilledTx(bc.BlockHeight(), bc.BlockHeight()+maxNVB+1)
|
tx := getPartiallyFilledTx(bc.BlockHeight(), bc.BlockHeight()+maxNVB+1)
|
||||||
require.True(t, errors.Is(bc.PoolTxWithData(tx, 5, mp, bc, verificationF), core.ErrInvalidAttribute))
|
require.ErrorIs(t, bc.PoolTxWithData(tx, 5, mp, bc, verificationF), core.ErrInvalidAttribute)
|
||||||
})
|
})
|
||||||
t.Run("good", func(t *testing.T) {
|
t.Run("good", func(t *testing.T) {
|
||||||
tx := getPartiallyFilledTx(bc.BlockHeight(), bc.BlockHeight()+1)
|
tx := getPartiallyFilledTx(bc.BlockHeight(), bc.BlockHeight()+1)
|
||||||
|
|
|
@ -85,7 +85,7 @@ func testDumpAndRestore(t *testing.T, dumpF, restoreF func(c *config.Blockchain)
|
||||||
|
|
||||||
r = io.NewBinReaderFromBuf(buf)
|
r = io.NewBinReaderFromBuf(buf)
|
||||||
err := chaindump.Restore(bc2, r, 4, bc.BlockHeight()-bc2.BlockHeight(), f)
|
err := chaindump.Restore(bc2, r, 4, bc.BlockHeight()-bc2.BlockHeight(), f)
|
||||||
require.True(t, errors.Is(err, errStopped))
|
require.ErrorIs(t, err, errStopped)
|
||||||
require.Equal(t, bc.BlockHeight()-1, lastIndex)
|
require.Equal(t, bc.BlockHeight()-1, lastIndex)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -2,7 +2,6 @@ package dao
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nspcc-dev/neo-go/internal/random"
|
"github.com/nspcc-dev/neo-go/internal/random"
|
||||||
|
@ -219,9 +218,9 @@ func TestStoreAsTransaction(t *testing.T) {
|
||||||
err := dao.StoreAsTransaction(tx, 0, aer)
|
err := dao.StoreAsTransaction(tx, 0, aer)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
err = dao.HasTransaction(hash)
|
err = dao.HasTransaction(hash)
|
||||||
require.True(t, errors.Is(err, ErrAlreadyExists))
|
require.ErrorIs(t, err, ErrAlreadyExists)
|
||||||
err = dao.HasTransaction(conflictsH)
|
err = dao.HasTransaction(conflictsH)
|
||||||
require.True(t, errors.Is(err, ErrHasConflicts))
|
require.ErrorIs(t, err, ErrHasConflicts)
|
||||||
gotAppExecResult, err := dao.GetAppExecResults(hash, trigger.All)
|
gotAppExecResult, err := dao.GetAppExecResults(hash, trigger.All)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, 1, len(gotAppExecResult))
|
require.Equal(t, 1, len(gotAppExecResult))
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package interopnames
|
package interopnames
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -16,6 +15,6 @@ func TestFromID(t *testing.T) {
|
||||||
})
|
})
|
||||||
t.Run("Invalid", func(t *testing.T) {
|
t.Run("Invalid", func(t *testing.T) {
|
||||||
_, err := FromID(0x42424242)
|
_, err := FromID(0x42424242)
|
||||||
require.True(t, errors.Is(err, errNotFound))
|
require.ErrorIs(t, err, errNotFound)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package storage_test
|
package storage_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"math/big"
|
"math/big"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -42,7 +41,7 @@ func TestPut(t *testing.T) {
|
||||||
t.Run("create, not enough gas", func(t *testing.T) {
|
t.Run("create, not enough gas", func(t *testing.T) {
|
||||||
initVM(t, []byte{1}, []byte{2, 3}, 2*native.DefaultStoragePrice)
|
initVM(t, []byte{1}, []byte{2, 3}, 2*native.DefaultStoragePrice)
|
||||||
err := istorage.Put(ic)
|
err := istorage.Put(ic)
|
||||||
require.True(t, errors.Is(err, istorage.ErrGasLimitExceeded), "got: %v", err)
|
require.ErrorIs(t, err, istorage.ErrGasLimitExceeded)
|
||||||
})
|
})
|
||||||
|
|
||||||
initVM(t, []byte{4}, []byte{5, 6}, 3*native.DefaultStoragePrice)
|
initVM(t, []byte{4}, []byte{5, 6}, 3*native.DefaultStoragePrice)
|
||||||
|
@ -52,7 +51,7 @@ func TestPut(t *testing.T) {
|
||||||
t.Run("not enough gas", func(t *testing.T) {
|
t.Run("not enough gas", func(t *testing.T) {
|
||||||
initVM(t, []byte{4}, []byte{5, 6, 7, 8}, native.DefaultStoragePrice)
|
initVM(t, []byte{4}, []byte{5, 6, 7, 8}, native.DefaultStoragePrice)
|
||||||
err := istorage.Put(ic)
|
err := istorage.Put(ic)
|
||||||
require.True(t, errors.Is(err, istorage.ErrGasLimitExceeded), "got: %v", err)
|
require.ErrorIs(t, err, istorage.ErrGasLimitExceeded)
|
||||||
})
|
})
|
||||||
initVM(t, []byte{4}, []byte{5, 6, 7, 8}, 3*native.DefaultStoragePrice)
|
initVM(t, []byte{4}, []byte{5, 6, 7, 8}, 3*native.DefaultStoragePrice)
|
||||||
require.NoError(t, istorage.Put(ic))
|
require.NoError(t, istorage.Put(ic))
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package mempool
|
package mempool
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"math/big"
|
"math/big"
|
||||||
"sort"
|
"sort"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -380,7 +379,7 @@ func TestMempoolAddRemoveOracleResponse(t *testing.T) {
|
||||||
// smaller network fee
|
// smaller network fee
|
||||||
tx2 := newTx(5, 1)
|
tx2 := newTx(5, 1)
|
||||||
err := mp.Add(tx2, fs)
|
err := mp.Add(tx2, fs)
|
||||||
require.True(t, errors.Is(err, ErrOracleResponse))
|
require.ErrorIs(t, err, ErrOracleResponse)
|
||||||
|
|
||||||
// ok if old tx is removed
|
// ok if old tx is removed
|
||||||
mp.Remove(tx1.Hash(), fs)
|
mp.Remove(tx1.Hash(), fs)
|
||||||
|
@ -457,7 +456,7 @@ func TestMempoolAddRemoveConflicts(t *testing.T) {
|
||||||
|
|
||||||
// tx2 conflicts with tx1 and has smaller netfee (Step 2, negative)
|
// tx2 conflicts with tx1 and has smaller netfee (Step 2, negative)
|
||||||
tx2 := getConflictsTx(smallNetFee-1, tx1.Hash())
|
tx2 := getConflictsTx(smallNetFee-1, tx1.Hash())
|
||||||
require.True(t, errors.Is(mp.Add(tx2, fs), ErrConflictsAttribute))
|
require.ErrorIs(t, mp.Add(tx2, fs), ErrConflictsAttribute)
|
||||||
|
|
||||||
// tx3 conflicts with mempooled tx1 and has larger netfee => tx1 should be replaced by tx3 (Step 2, positive)
|
// tx3 conflicts with mempooled tx1 and has larger netfee => tx1 should be replaced by tx3 (Step 2, positive)
|
||||||
tx3 := getConflictsTx(smallNetFee+1, tx1.Hash())
|
tx3 := getConflictsTx(smallNetFee+1, tx1.Hash())
|
||||||
|
@ -468,7 +467,7 @@ func TestMempoolAddRemoveConflicts(t *testing.T) {
|
||||||
|
|
||||||
// tx1 still does not conflicts with anyone, but tx3 is mempooled, conflicts with tx1
|
// tx1 still does not conflicts with anyone, but tx3 is mempooled, conflicts with tx1
|
||||||
// and has larger netfee => tx1 shouldn't be added again (Step 1, negative)
|
// and has larger netfee => tx1 shouldn't be added again (Step 1, negative)
|
||||||
require.True(t, errors.Is(mp.Add(tx1, fs), ErrConflictsAttribute))
|
require.ErrorIs(t, mp.Add(tx1, fs), ErrConflictsAttribute)
|
||||||
|
|
||||||
// tx2 can now safely be added because conflicting tx1 is not in mempool => we
|
// tx2 can now safely be added because conflicting tx1 is not in mempool => we
|
||||||
// cannot check that tx2 is signed by tx1.Sender
|
// cannot check that tx2 is signed by tx1.Sender
|
||||||
|
@ -541,7 +540,7 @@ func TestMempoolAddRemoveConflicts(t *testing.T) {
|
||||||
}}
|
}}
|
||||||
_, ok := mp.TryGetValue(tx13.Hash())
|
_, ok := mp.TryGetValue(tx13.Hash())
|
||||||
require.Equal(t, false, ok)
|
require.Equal(t, false, ok)
|
||||||
require.True(t, errors.Is(mp.Add(tx13, fs), ErrConflictsAttribute))
|
require.ErrorIs(t, mp.Add(tx13, fs), ErrConflictsAttribute)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMempoolAddWithDataGetData(t *testing.T) {
|
func TestMempoolAddWithDataGetData(t *testing.T) {
|
||||||
|
@ -570,7 +569,7 @@ func TestMempoolAddWithDataGetData(t *testing.T) {
|
||||||
MainTransaction: newTx(t, 0),
|
MainTransaction: newTx(t, 0),
|
||||||
FallbackTransaction: newTx(t, fs.balance+1),
|
FallbackTransaction: newTx(t, fs.balance+1),
|
||||||
}
|
}
|
||||||
require.True(t, errors.Is(mp.Add(r1.FallbackTransaction, fs, r1), ErrInsufficientFunds))
|
require.ErrorIs(t, mp.Add(r1.FallbackTransaction, fs, r1), ErrInsufficientFunds)
|
||||||
|
|
||||||
// good
|
// good
|
||||||
r2 := &payload.P2PNotaryRequest{
|
r2 := &payload.P2PNotaryRequest{
|
||||||
|
@ -584,7 +583,7 @@ func TestMempoolAddWithDataGetData(t *testing.T) {
|
||||||
require.Equal(t, r2, data)
|
require.Equal(t, r2, data)
|
||||||
|
|
||||||
// bad, already in pool
|
// bad, already in pool
|
||||||
require.True(t, errors.Is(mp.Add(r2.FallbackTransaction, fs, r2), ErrDup))
|
require.ErrorIs(t, mp.Add(r2.FallbackTransaction, fs, r2), ErrDup)
|
||||||
|
|
||||||
// good, higher priority than r2. The resulting mp.verifiedTxes: [r3, r2]
|
// good, higher priority than r2. The resulting mp.verifiedTxes: [r3, r2]
|
||||||
r3 := &payload.P2PNotaryRequest{
|
r3 := &payload.P2PNotaryRequest{
|
||||||
|
|
|
@ -2,7 +2,6 @@ package mpt
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/binary"
|
"encoding/binary"
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nspcc-dev/neo-go/pkg/core/storage"
|
"github.com/nspcc-dev/neo-go/pkg/core/storage"
|
||||||
|
@ -21,7 +20,7 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, expectedRefCount, binary.LittleEndian.Uint32(expectedBytes[len(expectedBytes)-4:]))
|
require.Equal(t, expectedRefCount, binary.LittleEndian.Uint32(expectedBytes[len(expectedBytes)-4:]))
|
||||||
} else {
|
} else {
|
||||||
require.True(t, errors.Is(err, storage.ErrKeyNotFound))
|
require.ErrorIs(t, err, storage.ErrKeyNotFound)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,11 +48,11 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
check(t, tr, expected, n.Node, 1)
|
check(t, tr, expected, n.Node, 1)
|
||||||
|
|
||||||
// Same path, but wrong hash => error expected, no refcount changes
|
// Same path, but wrong hash => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, NewBranchNode()), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, NewBranchNode()), ErrRestoreFailed)
|
||||||
check(t, tr, expected, n.Node, 1)
|
check(t, tr, expected, n.Node, 1)
|
||||||
|
|
||||||
// New path (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
// New path (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(toNibbles([]byte{0xAB}), n.Node), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(toNibbles([]byte{0xAB}), n.Node), ErrRestoreFailed)
|
||||||
check(t, tr, expected, n.Node, 1)
|
check(t, tr, expected, n.Node, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -75,11 +74,11 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
check(t, tr, expected, l, 1)
|
check(t, tr, expected, l, 1)
|
||||||
|
|
||||||
// Same path, but wrong hash => error expected, no refcount changes
|
// Same path, but wrong hash => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed)
|
||||||
check(t, tr, expected, l, 1)
|
check(t, tr, expected, l, 1)
|
||||||
|
|
||||||
// New path (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
// New path (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(toNibbles([]byte{0xAB}), l), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(toNibbles([]byte{0xAB}), l), ErrRestoreFailed)
|
||||||
check(t, tr, expected, l, 1)
|
check(t, tr, expected, l, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -91,7 +90,7 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
tr.root = e
|
tr.root = e
|
||||||
|
|
||||||
// no-op
|
// no-op
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, h), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, h), ErrRestoreFailed)
|
||||||
check(t, tr, e, h, 0)
|
check(t, tr, e, h, 0)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
@ -108,10 +107,10 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
// Same path, but wrong hash => error expected, no refcount changes
|
// Same path, but wrong hash => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed)
|
||||||
|
|
||||||
// Non-nil path, but MPT structure can't be changed => error expected, no refcount changes
|
// Non-nil path, but MPT structure can't be changed => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(toNibbles([]byte{0xAC}), NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(toNibbles([]byte{0xAC}), NewLeafNode([]byte{0xAB, 0xEF})), ErrRestoreFailed)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("parent is Branch", func(t *testing.T) {
|
t.Run("parent is Branch", func(t *testing.T) {
|
||||||
|
@ -138,11 +137,11 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
check(t, tr, b, l1, 1)
|
check(t, tr, b, l1, 1)
|
||||||
|
|
||||||
// Same path, but wrong hash => error expected, no refcount changes
|
// Same path, but wrong hash => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, NewLeafNode([]byte{0xAD})), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, NewLeafNode([]byte{0xAD})), ErrRestoreFailed)
|
||||||
check(t, tr, b, l1, 1)
|
check(t, tr, b, l1, 1)
|
||||||
|
|
||||||
// New path pointing to the empty HashNode (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
// New path pointing to the empty HashNode (changes in the MPT structure are not allowed) => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode([]byte{0x01}, l1), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode([]byte{0x01}, l1), ErrRestoreFailed)
|
||||||
check(t, tr, b, l1, 1)
|
check(t, tr, b, l1, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -169,7 +168,7 @@ func TestBillet_RestoreHashNode(t *testing.T) {
|
||||||
check(t, tr, b, l2, 1)
|
check(t, tr, b, l2, 1)
|
||||||
|
|
||||||
// Same path, but wrong hash => error expected, no refcount changes
|
// Same path, but wrong hash => error expected, no refcount changes
|
||||||
require.True(t, errors.Is(tr.RestoreHashNode(path, NewLeafNode([]byte{0xAD})), ErrRestoreFailed))
|
require.ErrorIs(t, tr.RestoreHashNode(path, NewLeafNode([]byte{0xAD})), ErrRestoreFailed)
|
||||||
check(t, tr, b, l2, 1)
|
check(t, tr, b, l2, 1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nspcc-dev/neo-go/internal/testchain"
|
"github.com/nspcc-dev/neo-go/internal/testchain"
|
||||||
|
@ -30,7 +29,7 @@ func TestDesignate_DesignateAsRole(t *testing.T) {
|
||||||
ic.VM.LoadScript([]byte{byte(opcode.RET)})
|
ic.VM.LoadScript([]byte{byte(opcode.RET)})
|
||||||
|
|
||||||
_, _, err := des.GetDesignatedByRole(bc.dao, 0xFF, 255)
|
_, _, err := des.GetDesignatedByRole(bc.dao, 0xFF, 255)
|
||||||
require.True(t, errors.Is(err, native.ErrInvalidRole), "got: %v", err)
|
require.ErrorIs(t, err, native.ErrInvalidRole)
|
||||||
|
|
||||||
pubs, index, err := des.GetDesignatedByRole(bc.dao, noderoles.Oracle, 255)
|
pubs, index, err := des.GetDesignatedByRole(bc.dao, noderoles.Oracle, 255)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -38,20 +37,20 @@ func TestDesignate_DesignateAsRole(t *testing.T) {
|
||||||
require.Equal(t, uint32(0), index)
|
require.Equal(t, uint32(0), index)
|
||||||
|
|
||||||
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{})
|
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{})
|
||||||
require.True(t, errors.Is(err, native.ErrEmptyNodeList), "got: %v", err)
|
require.ErrorIs(t, err, native.ErrEmptyNodeList)
|
||||||
|
|
||||||
err = des.DesignateAsRole(ic, noderoles.Oracle, make(keys.PublicKeys, 32+1))
|
err = des.DesignateAsRole(ic, noderoles.Oracle, make(keys.PublicKeys, 32+1))
|
||||||
require.True(t, errors.Is(err, native.ErrLargeNodeList), "got: %v", err)
|
require.ErrorIs(t, err, native.ErrLargeNodeList)
|
||||||
|
|
||||||
priv, err := keys.NewPrivateKey()
|
priv, err := keys.NewPrivateKey()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
pub := priv.PublicKey()
|
pub := priv.PublicKey()
|
||||||
|
|
||||||
err = des.DesignateAsRole(ic, 0xFF, keys.PublicKeys{pub})
|
err = des.DesignateAsRole(ic, 0xFF, keys.PublicKeys{pub})
|
||||||
require.True(t, errors.Is(err, native.ErrInvalidRole), "got: %v", err)
|
require.ErrorIs(t, err, native.ErrInvalidRole)
|
||||||
|
|
||||||
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{pub})
|
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{pub})
|
||||||
require.True(t, errors.Is(err, native.ErrInvalidWitness), "got: %v", err)
|
require.ErrorIs(t, err, native.ErrInvalidWitness)
|
||||||
|
|
||||||
setSigner(tx, testchain.CommitteeScriptHash())
|
setSigner(tx, testchain.CommitteeScriptHash())
|
||||||
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{pub})
|
err = des.DesignateAsRole(ic, noderoles.Oracle, keys.PublicKeys{pub})
|
||||||
|
|
|
@ -2,7 +2,6 @@ package transaction
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -41,7 +40,7 @@ func TestOracleResponse_EncodeBinary(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = testserdes.DecodeBinary(bs, new(OracleResponse))
|
err = testserdes.DecodeBinary(bs, new(OracleResponse))
|
||||||
require.True(t, errors.Is(err, ErrInvalidResponseCode), "got: %v", err)
|
require.ErrorIs(t, err, ErrInvalidResponseCode)
|
||||||
})
|
})
|
||||||
t.Run("InvalidResult", func(t *testing.T) {
|
t.Run("InvalidResult", func(t *testing.T) {
|
||||||
r := &OracleResponse{
|
r := &OracleResponse{
|
||||||
|
@ -53,7 +52,7 @@ func TestOracleResponse_EncodeBinary(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = testserdes.DecodeBinary(bs, new(OracleResponse))
|
err = testserdes.DecodeBinary(bs, new(OracleResponse))
|
||||||
require.True(t, errors.Is(err, ErrInvalidResult), "got: %v", err)
|
require.ErrorIs(t, err, ErrInvalidResult)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"math"
|
"math"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -125,7 +124,7 @@ func TestDecodingTxWithInvalidWitnessesNumber(t *testing.T) {
|
||||||
tx.Scripts = []Witness{{InvocationScript: []byte{}, VerificationScript: []byte{}}, {InvocationScript: []byte{}, VerificationScript: []byte{}}}
|
tx.Scripts = []Witness{{InvocationScript: []byte{}, VerificationScript: []byte{}}, {InvocationScript: []byte{}, VerificationScript: []byte{}}}
|
||||||
data, err := testserdes.EncodeBinary(tx)
|
data, err := testserdes.EncodeBinary(tx)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.True(t, errors.Is(testserdes.DecodeBinary(data, new(Transaction)), ErrInvalidWitnessNum))
|
require.ErrorIs(t, testserdes.DecodeBinary(data, new(Transaction)), ErrInvalidWitnessNum)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnmarshalNeoFSTX(t *testing.T) {
|
func TestUnmarshalNeoFSTX(t *testing.T) {
|
||||||
|
@ -216,32 +215,32 @@ func TestTransaction_isValid(t *testing.T) {
|
||||||
t.Run("InvalidVersion", func(t *testing.T) {
|
t.Run("InvalidVersion", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.Version = 1
|
tx.Version = 1
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrInvalidVersion))
|
require.ErrorIs(t, tx.isValid(), ErrInvalidVersion)
|
||||||
})
|
})
|
||||||
t.Run("NegativeSystemFee", func(t *testing.T) {
|
t.Run("NegativeSystemFee", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.SystemFee = -1
|
tx.SystemFee = -1
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrNegativeSystemFee))
|
require.ErrorIs(t, tx.isValid(), ErrNegativeSystemFee)
|
||||||
})
|
})
|
||||||
t.Run("NegativeNetworkFee", func(t *testing.T) {
|
t.Run("NegativeNetworkFee", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.NetworkFee = -1
|
tx.NetworkFee = -1
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrNegativeNetworkFee))
|
require.ErrorIs(t, tx.isValid(), ErrNegativeNetworkFee)
|
||||||
})
|
})
|
||||||
t.Run("TooBigFees", func(t *testing.T) {
|
t.Run("TooBigFees", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.SystemFee = math.MaxInt64 - tx.NetworkFee + 1
|
tx.SystemFee = math.MaxInt64 - tx.NetworkFee + 1
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrTooBigFees))
|
require.ErrorIs(t, tx.isValid(), ErrTooBigFees)
|
||||||
})
|
})
|
||||||
t.Run("EmptySigners", func(t *testing.T) {
|
t.Run("EmptySigners", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.Signers = tx.Signers[:0]
|
tx.Signers = tx.Signers[:0]
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrEmptySigners))
|
require.ErrorIs(t, tx.isValid(), ErrEmptySigners)
|
||||||
})
|
})
|
||||||
t.Run("NonUniqueSigners", func(t *testing.T) {
|
t.Run("NonUniqueSigners", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.Signers[1].Account = tx.Signers[0].Account
|
tx.Signers[1].Account = tx.Signers[0].Account
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrNonUniqueSigners))
|
require.ErrorIs(t, tx.isValid(), ErrNonUniqueSigners)
|
||||||
})
|
})
|
||||||
t.Run("MultipleHighPriority", func(t *testing.T) {
|
t.Run("MultipleHighPriority", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
|
@ -249,7 +248,7 @@ func TestTransaction_isValid(t *testing.T) {
|
||||||
{Type: HighPriority},
|
{Type: HighPriority},
|
||||||
{Type: HighPriority},
|
{Type: HighPriority},
|
||||||
}
|
}
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrInvalidAttribute))
|
require.ErrorIs(t, tx.isValid(), ErrInvalidAttribute)
|
||||||
})
|
})
|
||||||
t.Run("MultipleOracle", func(t *testing.T) {
|
t.Run("MultipleOracle", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
|
@ -257,12 +256,12 @@ func TestTransaction_isValid(t *testing.T) {
|
||||||
{Type: OracleResponseT},
|
{Type: OracleResponseT},
|
||||||
{Type: OracleResponseT},
|
{Type: OracleResponseT},
|
||||||
}
|
}
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrInvalidAttribute))
|
require.ErrorIs(t, tx.isValid(), ErrInvalidAttribute)
|
||||||
})
|
})
|
||||||
t.Run("NoScript", func(t *testing.T) {
|
t.Run("NoScript", func(t *testing.T) {
|
||||||
tx := newTx()
|
tx := newTx()
|
||||||
tx.Script = []byte{}
|
tx.Script = []byte{}
|
||||||
require.True(t, errors.Is(tx.isValid(), ErrEmptyScript))
|
require.ErrorIs(t, tx.isValid(), ErrEmptyScript)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ func TestRemoveStale(t *testing.T) {
|
||||||
func (p *Pool) testAdd(t *testing.T, expectedOk bool, expectedErr error, ep *payload.Extensible) {
|
func (p *Pool) testAdd(t *testing.T, expectedOk bool, expectedErr error, ep *payload.Extensible) {
|
||||||
ok, err := p.Add(ep)
|
ok, err := p.Add(ep)
|
||||||
if expectedErr != nil {
|
if expectedErr != nil {
|
||||||
require.True(t, errors.Is(err, expectedErr), "got: %v", err)
|
require.ErrorIs(t, err, expectedErr)
|
||||||
} else {
|
} else {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package payload
|
package payload
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
gio "io"
|
gio "io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -34,11 +33,11 @@ func TestExtensible_Serializable(t *testing.T) {
|
||||||
|
|
||||||
t.Run("unexpected EOF", func(t *testing.T) {
|
t.Run("unexpected EOF", func(t *testing.T) {
|
||||||
err := testserdes.DecodeBinary(unsigned, new(Extensible))
|
err := testserdes.DecodeBinary(unsigned, new(Extensible))
|
||||||
require.True(t, errors.Is(err, gio.EOF))
|
require.ErrorIs(t, err, gio.EOF)
|
||||||
})
|
})
|
||||||
t.Run("invalid padding", func(t *testing.T) {
|
t.Run("invalid padding", func(t *testing.T) {
|
||||||
err := testserdes.DecodeBinary(append(unsigned, 42), new(Extensible))
|
err := testserdes.DecodeBinary(append(unsigned, 42), new(Extensible))
|
||||||
require.True(t, errors.Is(err, errInvalidPadding))
|
require.ErrorIs(t, err, errInvalidPadding)
|
||||||
})
|
})
|
||||||
t.Run("too large data size", func(t *testing.T) {
|
t.Run("too large data size", func(t *testing.T) {
|
||||||
expected.Data = make([]byte, MaxSize+1)
|
expected.Data = make([]byte, MaxSize+1)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package payload
|
package payload
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -52,7 +51,7 @@ func TestMerkleBlock_EncodeDecodeBinary(t *testing.T) {
|
||||||
}
|
}
|
||||||
data, err := testserdes.EncodeBinary(expected)
|
data, err := testserdes.EncodeBinary(expected)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.True(t, errors.Is(block.ErrMaxContentsPerBlock, testserdes.DecodeBinary(data, new(MerkleBlock))))
|
require.ErrorIs(t, testserdes.DecodeBinary(data, new(MerkleBlock)), block.ErrMaxContentsPerBlock)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("bad flags size", func(t *testing.T) {
|
t.Run("bad flags size", func(t *testing.T) {
|
||||||
|
|
|
@ -104,7 +104,7 @@ func TestServerStartAndShutdown(t *testing.T) {
|
||||||
require.True(t, s.transports[0].(*fakeTransp).closed.Load())
|
require.True(t, s.transports[0].(*fakeTransp).closed.Load())
|
||||||
err, ok := p.droppedWith.Load().(error)
|
err, ok := p.droppedWith.Load().(error)
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.True(t, errors.Is(err, errServerShutdown))
|
require.ErrorIs(t, err, errServerShutdown)
|
||||||
})
|
})
|
||||||
t.Run("with consensus", func(t *testing.T) {
|
t.Run("with consensus", func(t *testing.T) {
|
||||||
s := newTestServer(t, ServerConfig{})
|
s := newTestServer(t, ServerConfig{})
|
||||||
|
@ -161,7 +161,7 @@ func TestServerRegisterPeer(t *testing.T) {
|
||||||
require.True(t, index >= 0)
|
require.True(t, index >= 0)
|
||||||
err, ok := ps[index].droppedWith.Load().(error)
|
err, ok := ps[index].droppedWith.Load().(error)
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
require.True(t, errors.Is(err, errMaxPeers))
|
require.ErrorIs(t, err, errMaxPeers)
|
||||||
|
|
||||||
index = (index + 1) % peerCount
|
index = (index + 1) % peerCount
|
||||||
s.unregister <- peerDrop{ps[index], errIdenticalID}
|
s.unregister <- peerDrop{ps[index], errIdenticalID}
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
@ -2013,7 +2012,7 @@ func TestGetNetwork(t *testing.T) {
|
||||||
c.getNextRequestID = getTestRequestID
|
c.getNextRequestID = getTestRequestID
|
||||||
// network was not initialised
|
// network was not initialised
|
||||||
_, err = c.GetNetwork()
|
_, err = c.GetNetwork()
|
||||||
require.True(t, errors.Is(err, errNetworkNotInitialized))
|
require.ErrorIs(t, err, errNetworkNotInitialized)
|
||||||
require.Equal(t, false, c.cache.initDone)
|
require.Equal(t, false, c.cache.initDone)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@ package rpcclient
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
|
@ -753,7 +752,7 @@ func TestWS_RequestAfterClose(t *testing.T) {
|
||||||
_, err = c.GetBlockCount()
|
_, err = c.GetBlockCount()
|
||||||
})
|
})
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.True(t, errors.Is(err, ErrWSConnLost))
|
require.ErrorIs(t, err, ErrWSConnLost)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestWSClient_ConnClosedError(t *testing.T) {
|
func TestWSClient_ConnClosedError(t *testing.T) {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package neofs
|
package neofs
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -61,7 +60,7 @@ func TestParseNeoFSURL(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
oa, ps, err := parseNeoFSURL(u)
|
oa, ps, err := parseNeoFSURL(u)
|
||||||
if tc.err != nil {
|
if tc.err != nil {
|
||||||
require.True(t, errors.Is(err, tc.err), "got: %#v", err)
|
require.ErrorIs(t, err, tc.err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package oracle
|
package oracle
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"net"
|
"net"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -41,8 +40,7 @@ func TestDefaultClient_RestrictedRedirectErr(t *testing.T) {
|
||||||
for _, c := range testCases {
|
for _, c := range testCases {
|
||||||
t.Run(c, func(t *testing.T) {
|
t.Run(c, func(t *testing.T) {
|
||||||
_, err := cl.Get(c) //nolint:bodyclose // It errors out and it's a test.
|
_, err := cl.Get(c) //nolint:bodyclose // It errors out and it's a test.
|
||||||
require.Error(t, err)
|
require.ErrorIs(t, err, ErrRestrictedRedirect)
|
||||||
require.True(t, errors.Is(err, ErrRestrictedRedirect), err)
|
|
||||||
require.True(t, strings.Contains(err.Error(), "IP is not global unicast"), err)
|
require.True(t, strings.Contains(err.Error(), "IP is not global unicast"), err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,6 @@ package stateroot_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/elliptic"
|
"crypto/elliptic"
|
||||||
"errors"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -133,7 +132,7 @@ func TestStateRoot(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
data := testSignStateRoot(t, r, pubs, accInv)
|
data := testSignStateRoot(t, r, pubs, accInv)
|
||||||
err = srv.OnPayload(&payload.Extensible{Data: data})
|
err = srv.OnPayload(&payload.Extensible{Data: data})
|
||||||
require.True(t, errors.Is(err, core.ErrWitnessHashMismatch), "got: %v", err)
|
require.ErrorIs(t, err, core.ErrWitnessHashMismatch)
|
||||||
require.EqualValues(t, 0, bc.GetStateModule().CurrentValidatedHeight())
|
require.EqualValues(t, 0, bc.GetStateModule().CurrentValidatedHeight())
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package standard
|
package standard
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nspcc-dev/neo-go/pkg/smartcontract"
|
"github.com/nspcc-dev/neo-go/pkg/smartcontract"
|
||||||
|
@ -39,14 +38,14 @@ func TestComplyMissingMethod(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetMethod("foo", -1).Name = "notafoo"
|
m.ABI.GetMethod("foo", -1).Name = "notafoo"
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrMethodMissing))
|
require.ErrorIs(t, err, ErrMethodMissing)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestComplyInvalidReturnType(t *testing.T) {
|
func TestComplyInvalidReturnType(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetMethod("foo", -1).ReturnType = smartcontract.VoidType
|
m.ABI.GetMethod("foo", -1).ReturnType = smartcontract.VoidType
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrInvalidReturnType))
|
require.ErrorIs(t, err, ErrInvalidReturnType)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestComplyMethodParameterCount(t *testing.T) {
|
func TestComplyMethodParameterCount(t *testing.T) {
|
||||||
|
@ -55,14 +54,14 @@ func TestComplyMethodParameterCount(t *testing.T) {
|
||||||
f := m.ABI.GetMethod("foo", -1)
|
f := m.ABI.GetMethod("foo", -1)
|
||||||
f.Parameters = append(f.Parameters, manifest.Parameter{Type: smartcontract.BoolType})
|
f.Parameters = append(f.Parameters, manifest.Parameter{Type: smartcontract.BoolType})
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrMethodMissing))
|
require.ErrorIs(t, err, ErrMethodMissing)
|
||||||
})
|
})
|
||||||
t.Run("Event", func(t *testing.T) {
|
t.Run("Event", func(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
ev := m.ABI.GetEvent("bar")
|
ev := m.ABI.GetEvent("bar")
|
||||||
ev.Parameters = ev.Parameters[:0]
|
ev.Parameters = ev.Parameters[:0]
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrInvalidParameterCount))
|
require.ErrorIs(t, err, ErrInvalidParameterCount)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,13 +70,13 @@ func TestComplyParameterType(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetMethod("foo", -1).Parameters[0].Type = smartcontract.InteropInterfaceType
|
m.ABI.GetMethod("foo", -1).Parameters[0].Type = smartcontract.InteropInterfaceType
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrInvalidParameterType))
|
require.ErrorIs(t, err, ErrInvalidParameterType)
|
||||||
})
|
})
|
||||||
t.Run("Event", func(t *testing.T) {
|
t.Run("Event", func(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetEvent("bar").Parameters[0].Type = smartcontract.InteropInterfaceType
|
m.ABI.GetEvent("bar").Parameters[0].Type = smartcontract.InteropInterfaceType
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrInvalidParameterType))
|
require.ErrorIs(t, err, ErrInvalidParameterType)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +86,7 @@ func TestComplyParameterName(t *testing.T) {
|
||||||
m.ABI.GetMethod("foo", -1).Parameters[0].Name = "hehe"
|
m.ABI.GetMethod("foo", -1).Parameters[0].Name = "hehe"
|
||||||
s := &Standard{Manifest: *fooMethodBarEvent()}
|
s := &Standard{Manifest: *fooMethodBarEvent()}
|
||||||
err := Comply(m, s)
|
err := Comply(m, s)
|
||||||
require.True(t, errors.Is(err, ErrInvalidParameterName))
|
require.ErrorIs(t, err, ErrInvalidParameterName)
|
||||||
require.NoError(t, ComplyABI(m, s))
|
require.NoError(t, ComplyABI(m, s))
|
||||||
})
|
})
|
||||||
t.Run("Event", func(t *testing.T) {
|
t.Run("Event", func(t *testing.T) {
|
||||||
|
@ -95,7 +94,7 @@ func TestComplyParameterName(t *testing.T) {
|
||||||
m.ABI.GetEvent("bar").Parameters[0].Name = "hehe"
|
m.ABI.GetEvent("bar").Parameters[0].Name = "hehe"
|
||||||
s := &Standard{Manifest: *fooMethodBarEvent()}
|
s := &Standard{Manifest: *fooMethodBarEvent()}
|
||||||
err := Comply(m, s)
|
err := Comply(m, s)
|
||||||
require.True(t, errors.Is(err, ErrInvalidParameterName))
|
require.ErrorIs(t, err, ErrInvalidParameterName)
|
||||||
require.NoError(t, ComplyABI(m, s))
|
require.NoError(t, ComplyABI(m, s))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -104,14 +103,14 @@ func TestMissingEvent(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetEvent("bar").Name = "notabar"
|
m.ABI.GetEvent("bar").Name = "notabar"
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrEventMissing))
|
require.ErrorIs(t, err, ErrEventMissing)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSafeFlag(t *testing.T) {
|
func TestSafeFlag(t *testing.T) {
|
||||||
m := fooMethodBarEvent()
|
m := fooMethodBarEvent()
|
||||||
m.ABI.GetMethod("foo", -1).Safe = false
|
m.ABI.GetMethod("foo", -1).Safe = false
|
||||||
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
err := Comply(m, &Standard{Manifest: *fooMethodBarEvent()})
|
||||||
require.True(t, errors.Is(err, ErrSafeMethodMismatch))
|
require.ErrorIs(t, err, ErrSafeMethodMismatch)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestComplyValid(t *testing.T) {
|
func TestComplyValid(t *testing.T) {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package nef
|
package nef
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -37,7 +36,7 @@ func TestMethodToken_Serializable(t *testing.T) {
|
||||||
data, err := testserdes.EncodeBinary(tok)
|
data, err := testserdes.EncodeBinary(tok)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
err = testserdes.DecodeBinary(data, new(MethodToken))
|
err = testserdes.DecodeBinary(data, new(MethodToken))
|
||||||
require.True(t, errors.Is(err, errInvalidMethodName))
|
require.ErrorIs(t, err, errInvalidMethodName)
|
||||||
})
|
})
|
||||||
t.Run("invalid call flag", func(t *testing.T) {
|
t.Run("invalid call flag", func(t *testing.T) {
|
||||||
tok := getToken()
|
tok := getToken()
|
||||||
|
@ -45,6 +44,6 @@ func TestMethodToken_Serializable(t *testing.T) {
|
||||||
data, err := testserdes.EncodeBinary(tok)
|
data, err := testserdes.EncodeBinary(tok)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
err = testserdes.DecodeBinary(data, new(MethodToken))
|
err = testserdes.DecodeBinary(data, new(MethodToken))
|
||||||
require.True(t, errors.Is(err, errInvalidCallFlag))
|
require.ErrorIs(t, err, errInvalidCallFlag)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@ package nef
|
||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -87,12 +86,12 @@ func TestEncodeDecodeBinary(t *testing.T) {
|
||||||
sz := io.GetVarSize(&expected.Header)
|
sz := io.GetVarSize(&expected.Header)
|
||||||
bytes[sz] = 1
|
bytes[sz] = 1
|
||||||
err = testserdes.DecodeBinary(bytes, new(File))
|
err = testserdes.DecodeBinary(bytes, new(File))
|
||||||
require.True(t, errors.Is(err, errInvalidReserved), "got: %v", err)
|
require.ErrorIs(t, err, errInvalidReserved)
|
||||||
|
|
||||||
bytes[sz] = 0
|
bytes[sz] = 0
|
||||||
bytes[sz+3] = 1
|
bytes[sz+3] = 1
|
||||||
err = testserdes.DecodeBinary(bytes, new(File))
|
err = testserdes.DecodeBinary(bytes, new(File))
|
||||||
require.True(t, errors.Is(err, errInvalidReserved), "got: %v", err)
|
require.ErrorIs(t, err, errInvalidReserved)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package stackitem
|
package stackitem
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"math/big"
|
"math/big"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -56,7 +55,7 @@ func TestFromToJSON(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = FromJSON([]byte(js), 3)
|
_, err = FromJSON([]byte(js), 3)
|
||||||
require.True(t, errors.Is(err, errTooBigElements), err)
|
require.ErrorIs(t, err, errTooBigElements)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("Map", func(t *testing.T) {
|
t.Run("Map", func(t *testing.T) {
|
||||||
|
@ -79,7 +78,7 @@ func TestFromToJSON(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = FromJSON([]byte(js), 4)
|
_, err = FromJSON([]byte(js), 4)
|
||||||
require.True(t, errors.Is(err, errTooBigElements), err)
|
require.ErrorIs(t, err, errTooBigElements)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
t.Run("Invalid", func(t *testing.T) {
|
t.Run("Invalid", func(t *testing.T) {
|
||||||
|
@ -126,7 +125,7 @@ func TestFromToJSON(t *testing.T) {
|
||||||
func testToJSON(t *testing.T, expectedErr error, item Item) {
|
func testToJSON(t *testing.T, expectedErr error, item Item) {
|
||||||
data, err := ToJSON(item)
|
data, err := ToJSON(item)
|
||||||
if expectedErr != nil {
|
if expectedErr != nil {
|
||||||
require.True(t, errors.Is(err, expectedErr), err)
|
require.ErrorIs(t, err, expectedErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -313,16 +312,16 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
items[i] = NewBuffer(bigBuf)
|
items[i] = NewBuffer(bigBuf)
|
||||||
}
|
}
|
||||||
_, err := ToJSONWithTypes(NewArray(items))
|
_, err := ToJSONWithTypes(NewArray(items))
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on primitive item", func(t *testing.T) {
|
t.Run("overflow on primitive item", func(t *testing.T) {
|
||||||
_, err := ToJSONWithTypes(NewBuffer(bigBuf))
|
_, err := ToJSONWithTypes(NewBuffer(bigBuf))
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on array element", func(t *testing.T) {
|
t.Run("overflow on array element", func(t *testing.T) {
|
||||||
b := NewBuffer(bigBuf[:MaxSize/2])
|
b := NewBuffer(bigBuf[:MaxSize/2])
|
||||||
_, err := ToJSONWithTypes(NewArray([]Item{b, b}))
|
_, err := ToJSONWithTypes(NewArray([]Item{b, b}))
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on map key", func(t *testing.T) {
|
t.Run("overflow on map key", func(t *testing.T) {
|
||||||
m := NewMapWithValue([]MapElement{
|
m := NewMapWithValue([]MapElement{
|
||||||
|
@ -330,7 +329,7 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
{NewByteArray(bigBuf), NewBool(true)},
|
{NewByteArray(bigBuf), NewBool(true)},
|
||||||
})
|
})
|
||||||
_, err := ToJSONWithTypes(m)
|
_, err := ToJSONWithTypes(m)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on the last byte of array", func(t *testing.T) {
|
t.Run("overflow on the last byte of array", func(t *testing.T) {
|
||||||
// Construct big enough buffer and pad with integer digits
|
// Construct big enough buffer and pad with integer digits
|
||||||
|
@ -340,7 +339,7 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
NewBigInteger(big.NewInt(1234)),
|
NewBigInteger(big.NewInt(1234)),
|
||||||
})
|
})
|
||||||
_, err := ToJSONWithTypes(arr)
|
_, err := ToJSONWithTypes(arr)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on the item prefix", func(t *testing.T) {
|
t.Run("overflow on the item prefix", func(t *testing.T) {
|
||||||
arr := NewArray([]Item{
|
arr := NewArray([]Item{
|
||||||
|
@ -348,7 +347,7 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
NewBool(true),
|
NewBool(true),
|
||||||
})
|
})
|
||||||
_, err := ToJSONWithTypes(arr)
|
_, err := ToJSONWithTypes(arr)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on null", func(t *testing.T) {
|
t.Run("overflow on null", func(t *testing.T) {
|
||||||
arr := NewArray([]Item{
|
arr := NewArray([]Item{
|
||||||
|
@ -356,7 +355,7 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
Null{},
|
Null{},
|
||||||
})
|
})
|
||||||
_, err := ToJSONWithTypes(arr)
|
_, err := ToJSONWithTypes(arr)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on interop", func(t *testing.T) {
|
t.Run("overflow on interop", func(t *testing.T) {
|
||||||
arr := NewArray([]Item{
|
arr := NewArray([]Item{
|
||||||
|
@ -364,17 +363,17 @@ func TestToJSONWithTypesBadCases(t *testing.T) {
|
||||||
NewInterop(42),
|
NewInterop(42),
|
||||||
})
|
})
|
||||||
_, err := ToJSONWithTypes(arr)
|
_, err := ToJSONWithTypes(arr)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("overflow on cached item", func(t *testing.T) {
|
t.Run("overflow on cached item", func(t *testing.T) {
|
||||||
b := NewArray([]Item{NewByteArray(bigBuf[:MaxSize/2])})
|
b := NewArray([]Item{NewByteArray(bigBuf[:MaxSize/2])})
|
||||||
arr := NewArray([]Item{b, b})
|
arr := NewArray([]Item{b, b})
|
||||||
_, err := ToJSONWithTypes(arr)
|
_, err := ToJSONWithTypes(arr)
|
||||||
require.True(t, errors.Is(err, errTooBigSize), "got: %v", err)
|
require.ErrorIs(t, err, errTooBigSize)
|
||||||
})
|
})
|
||||||
t.Run("invalid type", func(t *testing.T) {
|
t.Run("invalid type", func(t *testing.T) {
|
||||||
_, err := ToJSONWithTypes(nil)
|
_, err := ToJSONWithTypes(nil)
|
||||||
require.True(t, errors.Is(err, ErrUnserializable), "got: %v", err)
|
require.ErrorIs(t, err, ErrUnserializable)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package stackitem
|
package stackitem
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/nspcc-dev/neo-go/pkg/io"
|
"github.com/nspcc-dev/neo-go/pkg/io"
|
||||||
|
@ -20,13 +19,13 @@ func TestSerializationMaxErr(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = Serialize(aitem)
|
_, err = Serialize(aitem)
|
||||||
require.True(t, errors.Is(err, ErrTooBig), err)
|
require.ErrorIs(t, err, ErrTooBig)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testSerialize(t *testing.T, expectedErr error, item Item) {
|
func testSerialize(t *testing.T, expectedErr error, item Item) {
|
||||||
data, err := Serialize(item)
|
data, err := Serialize(item)
|
||||||
if expectedErr != nil {
|
if expectedErr != nil {
|
||||||
require.True(t, errors.Is(err, expectedErr), err)
|
require.ErrorIs(t, err, expectedErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -62,7 +61,7 @@ func TestSerialize(t *testing.T) {
|
||||||
data, err := Serialize(newItem(items))
|
data, err := Serialize(newItem(items))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = Deserialize(data)
|
_, err = Deserialize(data)
|
||||||
require.True(t, errors.Is(err, ErrTooBig), err)
|
require.ErrorIs(t, err, ErrTooBig)
|
||||||
}
|
}
|
||||||
t.Run("array", func(t *testing.T) {
|
t.Run("array", func(t *testing.T) {
|
||||||
testArray(t, func(items []Item) Item { return NewArray(items) })
|
testArray(t, func(items []Item) Item { return NewArray(items) })
|
||||||
|
@ -169,7 +168,7 @@ func TestSerialize(t *testing.T) {
|
||||||
data, err := Serialize(m)
|
data, err := Serialize(m)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = Deserialize(data)
|
_, err = Deserialize(data)
|
||||||
require.True(t, errors.Is(err, ErrTooBig), err)
|
require.ErrorIs(t, err, ErrTooBig)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,7 +188,7 @@ func TestMapDeserializationError(t *testing.T) {
|
||||||
EncodeBinaryProtected(m, w.BinWriter)
|
EncodeBinaryProtected(m, w.BinWriter)
|
||||||
require.NoError(t, w.Err)
|
require.NoError(t, w.Err)
|
||||||
_, err := Deserialize(w.Bytes())
|
_, err := Deserialize(w.Bytes())
|
||||||
require.True(t, errors.Is(err, ErrInvalidType), err)
|
require.ErrorIs(t, err, ErrInvalidType)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDeserializeTooManyElements(t *testing.T) {
|
func TestDeserializeTooManyElements(t *testing.T) {
|
||||||
|
@ -206,7 +205,7 @@ func TestDeserializeTooManyElements(t *testing.T) {
|
||||||
data, err = Serialize(item)
|
data, err = Serialize(item)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = Deserialize(data)
|
_, err = Deserialize(data)
|
||||||
require.True(t, errors.Is(err, ErrTooBig), err)
|
require.ErrorIs(t, err, ErrTooBig)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDeserializeLimited(t *testing.T) {
|
func TestDeserializeLimited(t *testing.T) {
|
||||||
|
@ -226,7 +225,7 @@ func TestDeserializeLimited(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
_, err = DeserializeLimited(data, customLimit)
|
_, err = DeserializeLimited(data, customLimit)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.True(t, errors.Is(err, ErrTooBig), err)
|
require.ErrorIs(t, err, ErrTooBig)
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkEncodeBinary(b *testing.B) {
|
func BenchmarkEncodeBinary(b *testing.B) {
|
||||||
|
|
Loading…
Reference in a new issue