Merge pull request #68 from nspcc-dev/stringify-object-method
object: Stringily method
This commit is contained in:
commit
72a71e6a94
5 changed files with 302 additions and 1 deletions
2
Makefile
2
Makefile
|
@ -1,4 +1,4 @@
|
|||
PROTO_VERSION=v0.7.2
|
||||
PROTO_VERSION=v0.7.3
|
||||
PROTO_URL=https://github.com/nspcc-dev/neofs-api/archive/$(PROTO_VERSION).tar.gz
|
||||
|
||||
B=\033[0;1m
|
||||
|
|
118
object/types.go
118
object/types.go
|
@ -3,10 +3,14 @@ package object
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
|
||||
"github.com/gogo/protobuf/proto"
|
||||
"github.com/nspcc-dev/neofs-api-go/internal"
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type (
|
||||
|
@ -262,3 +266,117 @@ func (m Object) Address() *refs.Address {
|
|||
CID: m.SystemHeader.CID,
|
||||
}
|
||||
}
|
||||
|
||||
func (m CreationPoint) String() string {
|
||||
return fmt.Sprintf(`{UnixTime=%d Epoch=%d}`, m.UnixTime, m.Epoch)
|
||||
}
|
||||
|
||||
// Stringify converts object into string format.
|
||||
func Stringify(dst io.Writer, obj *Object) error {
|
||||
// put empty line
|
||||
if _, err := fmt.Fprintln(dst); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// put object line
|
||||
if _, err := fmt.Fprintln(dst, "Object:"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// put system headers
|
||||
if _, err := fmt.Fprintln(dst, "\tSystemHeader:"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sysHeaders := []string{"ID", "CID", "OwnerID", "Version", "PayloadLength", "CreatedAt"}
|
||||
v := reflect.ValueOf(obj.SystemHeader)
|
||||
for _, key := range sysHeaders {
|
||||
if !v.FieldByName(key).IsValid() {
|
||||
return errors.Errorf("invalid system header key: %q", key)
|
||||
}
|
||||
|
||||
val := v.FieldByName(key).Interface()
|
||||
if _, err := fmt.Fprintf(dst, "\t\t- %s=%v\n", key, val); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// put user headers
|
||||
if _, err := fmt.Fprintln(dst, "\tUserHeaders:"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, header := range obj.Headers {
|
||||
var (
|
||||
typ = reflect.ValueOf(header.Value)
|
||||
key string
|
||||
val interface{}
|
||||
)
|
||||
|
||||
switch t := typ.Interface().(type) {
|
||||
case *Header_Link:
|
||||
key = "Link"
|
||||
val = fmt.Sprintf(`{Type=%s ID=%s}`, t.Link.Type, t.Link.ID)
|
||||
case *Header_Redirect:
|
||||
key = "Redirect"
|
||||
val = fmt.Sprintf(`{CID=%s OID=%s}`, t.Redirect.CID, t.Redirect.ObjectID)
|
||||
case *Header_UserHeader:
|
||||
key = "UserHeader"
|
||||
val = fmt.Sprintf(`{Key=%s Val=%s}`, t.UserHeader.Key, t.UserHeader.Value)
|
||||
case *Header_Transform:
|
||||
key = "Transform"
|
||||
val = t.Transform.Type.String()
|
||||
case *Header_Tombstone:
|
||||
key = "Tombstone"
|
||||
val = "MARKED"
|
||||
case *Header_Token:
|
||||
key = "Token"
|
||||
val = fmt.Sprintf("{"+
|
||||
"ID=%s OwnerID=%s Verb=%s Address=%s Created=%d ValidUntil=%d SessionKey=%02x Signature=%02x"+
|
||||
"}",
|
||||
t.Token.Token_Info.ID,
|
||||
t.Token.Token_Info.OwnerID,
|
||||
t.Token.Token_Info.Verb,
|
||||
t.Token.Token_Info.Address,
|
||||
t.Token.Token_Info.Created,
|
||||
t.Token.Token_Info.ValidUntil,
|
||||
t.Token.Token_Info.SessionKey,
|
||||
t.Token.Signature)
|
||||
case *Header_HomoHash:
|
||||
key = "HomoHash"
|
||||
val = t.HomoHash
|
||||
case *Header_PayloadChecksum:
|
||||
key = "PayloadChecksum"
|
||||
val = t.PayloadChecksum
|
||||
case *Header_Integrity:
|
||||
key = "Integrity"
|
||||
val = fmt.Sprintf(`{Checksum=%02x Signature=%02x}`,
|
||||
t.Integrity.HeadersChecksum,
|
||||
t.Integrity.ChecksumSignature)
|
||||
case *Header_StorageGroup:
|
||||
key = "StorageGroup"
|
||||
val = fmt.Sprintf(`{DataSize=%d Hash=%02x Lifetime={Unit=%s Value=%d}}`,
|
||||
t.StorageGroup.ValidationDataSize,
|
||||
t.StorageGroup.ValidationHash,
|
||||
t.StorageGroup.Lifetime.Unit,
|
||||
t.StorageGroup.Lifetime.Value)
|
||||
case *Header_PublicKey:
|
||||
key = "PublicKey"
|
||||
val = t.PublicKey.Value
|
||||
default:
|
||||
key = "Unknown"
|
||||
val = t
|
||||
}
|
||||
|
||||
if _, err := fmt.Fprintf(dst, "\t\t- Type=%s\n\t\t Value=%v\n", key, val); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// put payload
|
||||
if _, err := fmt.Fprintf(dst, "\tPayload: %#v\n", obj.Payload); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
Binary file not shown.
|
@ -70,6 +70,8 @@ message SystemHeader {
|
|||
}
|
||||
|
||||
message CreationPoint {
|
||||
option (gogoproto.goproto_stringer) = false;
|
||||
|
||||
// UnixTime is a date of creation in unixtime format
|
||||
int64 UnixTime = 1;
|
||||
// Epoch is a date of creation in NeoFS epochs
|
||||
|
|
181
object/types_test.go
Normal file
181
object/types_test.go
Normal file
|
@ -0,0 +1,181 @@
|
|||
package object
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/nspcc-dev/neofs-api-go/refs"
|
||||
"github.com/nspcc-dev/neofs-api-go/service"
|
||||
"github.com/nspcc-dev/neofs-api-go/storagegroup"
|
||||
"github.com/nspcc-dev/neofs-crypto/test"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestStringify(t *testing.T) {
|
||||
res := `
|
||||
Object:
|
||||
SystemHeader:
|
||||
- ID=7e0b9c6c-aabc-4985-949e-2680e577b48b
|
||||
- CID=11111111111111111111111111111111
|
||||
- OwnerID=ALYeYC41emF6MrmUMc4a8obEPdgFhq9ran
|
||||
- Version=1
|
||||
- PayloadLength=1
|
||||
- CreatedAt={UnixTime=1 Epoch=1}
|
||||
UserHeaders:
|
||||
- Type=Link
|
||||
Value={Type=Child ID=7e0b9c6c-aabc-4985-949e-2680e577b48b}
|
||||
- Type=Redirect
|
||||
Value={CID=11111111111111111111111111111111 OID=7e0b9c6c-aabc-4985-949e-2680e577b48b}
|
||||
- Type=UserHeader
|
||||
Value={Key=test_key Val=test_value}
|
||||
- Type=Transform
|
||||
Value=Split
|
||||
- Type=Tombstone
|
||||
Value=MARKED
|
||||
- Type=Token
|
||||
Value={ID=7e0b9c6c-aabc-4985-949e-2680e577b48b OwnerID=ALYeYC41emF6MrmUMc4a8obEPdgFhq9ran Verb=Search Address=11111111111111111111111111111111/7e0b9c6c-aabc-4985-949e-2680e577b48b Created=1 ValidUntil=2 SessionKey=010203040506 Signature=010203040506}
|
||||
- Type=HomoHash
|
||||
Value=1111111111111111111111111111111111111111111111111111111111111111
|
||||
- Type=PayloadChecksum
|
||||
Value=[1 2 3 4 5 6]
|
||||
- Type=Integrity
|
||||
Value={Checksum=010203040506 Signature=010203040506}
|
||||
- Type=StorageGroup
|
||||
Value={DataSize=5 Hash=31313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131313131 Lifetime={Unit=UnixTime Value=555}}
|
||||
- Type=PublicKey
|
||||
Value=[1 2 3 4 5 6]
|
||||
Payload: []byte{0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7}
|
||||
`
|
||||
|
||||
key := test.DecodeKey(0)
|
||||
|
||||
uid, err := refs.NewOwnerID(&key.PublicKey)
|
||||
require.NoError(t, err)
|
||||
|
||||
var oid refs.UUID
|
||||
|
||||
require.NoError(t, oid.Parse("7e0b9c6c-aabc-4985-949e-2680e577b48b"))
|
||||
|
||||
obj := &Object{
|
||||
SystemHeader: SystemHeader{
|
||||
Version: 1,
|
||||
PayloadLength: 1,
|
||||
ID: oid,
|
||||
OwnerID: uid,
|
||||
CID: CID{},
|
||||
CreatedAt: CreationPoint{
|
||||
UnixTime: 1,
|
||||
Epoch: 1,
|
||||
},
|
||||
},
|
||||
Payload: []byte{1, 2, 3, 4, 5, 6, 7},
|
||||
}
|
||||
|
||||
// *Header_Link
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Link{
|
||||
Link: &Link{ID: oid, Type: Link_Child},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_Redirect
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Redirect{
|
||||
Redirect: &Address{ObjectID: oid, CID: CID{}},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_UserHeader
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_UserHeader{
|
||||
UserHeader: &UserHeader{
|
||||
Key: "test_key",
|
||||
Value: "test_value",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_Transform
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Transform{
|
||||
Transform: &Transform{
|
||||
Type: Transform_Split,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_Tombstone
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Tombstone{
|
||||
Tombstone: &Tombstone{},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_Token
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Token{
|
||||
Token: &Token{
|
||||
Signature: []byte{1, 2, 3, 4, 5, 6},
|
||||
Token_Info: service.Token_Info{
|
||||
ID: oid,
|
||||
OwnerID: uid,
|
||||
Verb: service.Token_Info_Search,
|
||||
Address: service.Address{ObjectID: oid, CID: refs.CID{}},
|
||||
Created: 1,
|
||||
ValidUntil: 2,
|
||||
SessionKey: []byte{1, 2, 3, 4, 5, 6},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_HomoHash
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_HomoHash{
|
||||
HomoHash: Hash{},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_PayloadChecksum
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_PayloadChecksum{
|
||||
PayloadChecksum: []byte{1, 2, 3, 4, 5, 6},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_Integrity
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_Integrity{
|
||||
Integrity: &IntegrityHeader{
|
||||
HeadersChecksum: []byte{1, 2, 3, 4, 5, 6},
|
||||
ChecksumSignature: []byte{1, 2, 3, 4, 5, 6},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_StorageGroup
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_StorageGroup{
|
||||
StorageGroup: &storagegroup.StorageGroup{
|
||||
ValidationDataSize: 5,
|
||||
ValidationHash: storagegroup.Hash{},
|
||||
Lifetime: &storagegroup.StorageGroup_Lifetime{
|
||||
Unit: storagegroup.StorageGroup_Lifetime_UnixTime,
|
||||
Value: 555,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// *Header_PublicKey
|
||||
obj.Headers = append(obj.Headers, Header{
|
||||
Value: &Header_PublicKey{
|
||||
PublicKey: &PublicKey{Value: []byte{1, 2, 3, 4, 5, 6}},
|
||||
},
|
||||
})
|
||||
|
||||
buf := new(bytes.Buffer)
|
||||
|
||||
require.NoError(t, Stringify(buf, obj))
|
||||
require.Equal(t, res, buf.String())
|
||||
}
|
Loading…
Reference in a new issue