mirror of
https://github.com/nspcc-dev/neo-go.git
synced 2025-01-30 19:30:13 +00:00
commit
4d2b88dd9d
4 changed files with 119 additions and 6 deletions
|
@ -6,8 +6,13 @@ import (
|
||||||
"github.com/nspcc-dev/neo-go/pkg/io"
|
"github.com/nspcc-dev/neo-go/pkg/io"
|
||||||
)
|
)
|
||||||
|
|
||||||
// MaxCapabilities is the maximum number of capabilities per payload.
|
const (
|
||||||
const MaxCapabilities = 32
|
// MaxCapabilities is the maximum number of capabilities per payload.
|
||||||
|
MaxCapabilities = 32
|
||||||
|
|
||||||
|
// MaxDataSize is the maximum size of capability payload.
|
||||||
|
MaxDataSize = 1024
|
||||||
|
)
|
||||||
|
|
||||||
// Capabilities is a list of Capability.
|
// Capabilities is a list of Capability.
|
||||||
type Capabilities []Capability
|
type Capabilities []Capability
|
||||||
|
@ -26,9 +31,14 @@ func (cs *Capabilities) EncodeBinary(br *io.BinWriter) {
|
||||||
// checkUniqueCapabilities checks whether payload capabilities have a unique type.
|
// checkUniqueCapabilities checks whether payload capabilities have a unique type.
|
||||||
func (cs Capabilities) checkUniqueCapabilities() error {
|
func (cs Capabilities) checkUniqueCapabilities() error {
|
||||||
err := errors.New("capabilities with the same type are not allowed")
|
err := errors.New("capabilities with the same type are not allowed")
|
||||||
var isFullNode, isTCP, isWS bool
|
var isFullNode, isArchived, isTCP, isWS bool
|
||||||
for _, cap := range cs {
|
for _, cap := range cs {
|
||||||
switch cap.Type {
|
switch cap.Type {
|
||||||
|
case ArchivalNode:
|
||||||
|
if isArchived {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
isArchived = true
|
||||||
case FullNode:
|
case FullNode:
|
||||||
if isFullNode {
|
if isFullNode {
|
||||||
return err
|
return err
|
||||||
|
@ -44,6 +54,7 @@ func (cs Capabilities) checkUniqueCapabilities() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
isWS = true
|
isWS = true
|
||||||
|
default: /* OK to have duplicates */
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
@ -59,13 +70,14 @@ type Capability struct {
|
||||||
func (c *Capability) DecodeBinary(br *io.BinReader) {
|
func (c *Capability) DecodeBinary(br *io.BinReader) {
|
||||||
c.Type = Type(br.ReadB())
|
c.Type = Type(br.ReadB())
|
||||||
switch c.Type {
|
switch c.Type {
|
||||||
|
case ArchivalNode:
|
||||||
|
c.Data = &Archival{}
|
||||||
case FullNode:
|
case FullNode:
|
||||||
c.Data = &Node{}
|
c.Data = &Node{}
|
||||||
case TCPServer, WSServer:
|
case TCPServer, WSServer:
|
||||||
c.Data = &Server{}
|
c.Data = &Server{}
|
||||||
default:
|
default:
|
||||||
br.Err = errors.New("unknown node capability type")
|
c.Data = &Unknown{}
|
||||||
return
|
|
||||||
}
|
}
|
||||||
c.Data.DecodeBinary(br)
|
c.Data.DecodeBinary(br)
|
||||||
}
|
}
|
||||||
|
@ -110,3 +122,35 @@ func (s *Server) DecodeBinary(br *io.BinReader) {
|
||||||
func (s *Server) EncodeBinary(bw *io.BinWriter) {
|
func (s *Server) EncodeBinary(bw *io.BinWriter) {
|
||||||
bw.WriteU16LE(s.Port)
|
bw.WriteU16LE(s.Port)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Archival represents an archival node that stores all blocks.
|
||||||
|
type Archival struct{}
|
||||||
|
|
||||||
|
// DecodeBinary implements io.Serializable.
|
||||||
|
func (a *Archival) DecodeBinary(br *io.BinReader) {
|
||||||
|
var zero = br.ReadB() // Zero-length byte array as per Unknown.
|
||||||
|
if zero != 0 {
|
||||||
|
br.Err = errors.New("archival capability with non-zero data")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodeBinary implements io.Serializable.
|
||||||
|
func (a *Archival) EncodeBinary(bw *io.BinWriter) {
|
||||||
|
bw.WriteB(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unknown represents an unknown capability with some data. Other nodes can
|
||||||
|
// decode it even if they can't interpret it. This is not expected to be used
|
||||||
|
// for sending data directly (proper new types should be used), but it allows
|
||||||
|
// for easier protocol extensibility (old nodes won't reject new capabilities).
|
||||||
|
type Unknown []byte
|
||||||
|
|
||||||
|
// DecodeBinary implements io.Serializable.
|
||||||
|
func (u *Unknown) DecodeBinary(br *io.BinReader) {
|
||||||
|
*u = br.ReadVarBytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncodeBinary implements io.Serializable.
|
||||||
|
func (u *Unknown) EncodeBinary(bw *io.BinWriter) {
|
||||||
|
bw.WriteVarBytes(*u)
|
||||||
|
}
|
||||||
|
|
48
pkg/network/capability/capability_test.go
Normal file
48
pkg/network/capability/capability_test.go
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
package capability
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/nspcc-dev/neo-go/internal/testserdes"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestUnknownEncodeDecode(t *testing.T) {
|
||||||
|
var (
|
||||||
|
u = Unknown{0x55, 0xaa}
|
||||||
|
ud Unknown
|
||||||
|
)
|
||||||
|
testserdes.EncodeDecodeBinary(t, &u, &ud)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestArchivalEncodeDecode(t *testing.T) {
|
||||||
|
var (
|
||||||
|
a = Archival{}
|
||||||
|
ad Archival
|
||||||
|
)
|
||||||
|
testserdes.EncodeDecodeBinary(t, &a, &ad)
|
||||||
|
|
||||||
|
var bad = []byte{0x02, 0x55, 0xaa} // Two-byte var-encoded string.
|
||||||
|
require.Error(t, testserdes.DecodeBinary(bad, &ad))
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCheckUniqueError(t *testing.T) {
|
||||||
|
// Successful cases are already checked in Version payload test.
|
||||||
|
var caps Capabilities
|
||||||
|
|
||||||
|
for _, bad := range [][]byte{
|
||||||
|
{0x02, 0x11, 0x00, 0x11, 0x00}, // 2 Archival
|
||||||
|
{0x02, 0x10, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00}, // 2 FullNode
|
||||||
|
{0x02, 0x01, 0x55, 0xaa, 0x01, 0x55, 0xaa}, // 2 TCPServer
|
||||||
|
{0x02, 0x02, 0x55, 0xaa, 0x02, 0x55, 0xaa}, // 2 WSServer
|
||||||
|
} {
|
||||||
|
require.Error(t, testserdes.DecodeBinary(bad, &caps))
|
||||||
|
}
|
||||||
|
for _, good := range [][]byte{
|
||||||
|
{0x02, 0x11, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00}, // Archival + FullNode
|
||||||
|
{0x02, 0x01, 0x55, 0xaa, 0x02, 0x55, 0xaa}, // TCPServer + WSServer
|
||||||
|
{0x02, 0xf0, 0x00, 0xf0, 0x00}, // 2 Reserved 0xf0
|
||||||
|
} {
|
||||||
|
require.NoError(t, testserdes.DecodeBinary(good, &caps))
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,6 +8,15 @@ const (
|
||||||
TCPServer Type = 0x01
|
TCPServer Type = 0x01
|
||||||
// WSServer represents WebSocket node capability type.
|
// WSServer represents WebSocket node capability type.
|
||||||
WSServer Type = 0x02
|
WSServer Type = 0x02
|
||||||
// FullNode represents full node capability type.
|
// FullNode represents a node that has complete current state.
|
||||||
FullNode Type = 0x10
|
FullNode Type = 0x10
|
||||||
|
// ArchivalNode represents a node that stores full block history.
|
||||||
|
// These nodes can be used for P2P synchronization from genesis
|
||||||
|
// (FullNode can cut the tail and may not respond to requests for
|
||||||
|
// old (wrt MaxTraceableBlocks) blocks).
|
||||||
|
ArchivalNode Type = 0x11
|
||||||
|
|
||||||
|
// 0xf0-0xff are reserved for private experiments.
|
||||||
|
ReservedFirst Type = 0xf0
|
||||||
|
ReservedLast Type = 0xff
|
||||||
)
|
)
|
||||||
|
|
|
@ -29,12 +29,24 @@ func TestVersionEncodeDecode(t *testing.T) {
|
||||||
Port: wsPort,
|
Port: wsPort,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Type: capability.ArchivalNode,
|
||||||
|
Data: &capability.Archival{},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: 0xff,
|
||||||
|
Data: &capability.Unknown{},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Type: capability.FullNode,
|
Type: capability.FullNode,
|
||||||
Data: &capability.Node{
|
Data: &capability.Node{
|
||||||
StartHeight: height,
|
StartHeight: height,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Type: 0xf0,
|
||||||
|
Data: &capability.Unknown{0x55, 0xaa},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
version := NewVersion(magic, id, useragent, capabilities)
|
version := NewVersion(magic, id, useragent, capabilities)
|
||||||
|
|
Loading…
Reference in a new issue