Dep ensure (#1803)

* vendor: don't vendor the context stuff

We don't need to vendor this anymore as we moved to the std lib for
these.

* new stuff showing up with dep ensure

* remove go-shlex
This commit is contained in:
Miek Gieben 2018-05-16 21:17:06 +01:00 committed by Yong Tang
parent cffa1948ab
commit 1e471a353e
10377 changed files with 4225826 additions and 54911 deletions

View file

@ -4,6 +4,7 @@ go:
- 1.7.x # go testing suite support, which we use, was introduced in go 1.7
- 1.8.x
- 1.9.x
- 1.10.x
- tip
script:
- go test -tags "alltests" -run Suite -coverprofile coverage.txt github.com/ugorji/go/codec

View file

@ -1,6 +1,6 @@
[![Sourcegraph](https://sourcegraph.com/github.com/ugorji/go/-/badge.svg)](https://sourcegraph.com/github.com/ugorji/go/-/blob/codec)
[![Sourcegraph](https://sourcegraph.com/github.com/ugorji/go/-/badge.svg?v=2)](https://sourcegraph.com/github.com/ugorji/go/-/tree/codec)
[![Build Status](https://travis-ci.org/ugorji/go.svg?branch=master)](https://travis-ci.org/ugorji/go)
[![codecov](https://codecov.io/gh/ugorji/go/branch/master/graph/badge.svg)](https://codecov.io/gh/ugorji/go)
[![codecov](https://codecov.io/gh/ugorji/go/branch/master/graph/badge.svg?v=2)](https://codecov.io/gh/ugorji/go)
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/ugorji/go/codec)
[![rcard](https://goreportcard.com/badge/github.com/ugorji/go/codec?v=2)](https://goreportcard.com/report/github.com/ugorji/go/codec)
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/ugorji/go/master/LICENSE)
@ -25,3 +25,7 @@ Install using:
go get github.com/ugorji/go/codec
TODO:
- [ ] 2018-03-12 - Release v1.1.1 containing 32-bit fixes

View file

@ -225,7 +225,7 @@ with some caveats. See Encode documentation.
package codec
// TODO:
// - In Go 1.10, when mid-stack inlining is enabled,
// - For Go 1.11, when mid-stack inlining is enabled,
// we should use committed functions for writeXXX and readXXX calls.
// This involves uncommenting the methods for decReaderSwitch and encWriterSwitch
// and using those (decReaderSwitch and encWriterSwitch) in all handles

View file

@ -55,6 +55,50 @@ const (
// others not currently supported
)
func bincdesc(vd, vs byte) string {
switch vd {
case bincVdSpecial:
switch vs {
case bincSpNil:
return "nil"
case bincSpFalse:
return "false"
case bincSpTrue:
return "true"
case bincSpNan, bincSpPosInf, bincSpNegInf, bincSpZeroFloat:
return "float"
case bincSpZero:
return "uint"
case bincSpNegOne:
return "int"
default:
return "unknown"
}
case bincVdSmallInt, bincVdPosInt:
return "uint"
case bincVdNegInt:
return "int"
case bincVdFloat:
return "float"
case bincVdSymbol:
return "string"
case bincVdString:
return "string"
case bincVdByteArray:
return "bytes"
case bincVdTimestamp:
return "time"
case bincVdCustomExt:
return "ext"
case bincVdArray:
return "array"
case bincVdMap:
return "map"
default:
return "unknown"
}
}
type bincEncDriver struct {
e *Encoder
h *BincHandle
@ -405,7 +449,7 @@ func (d *bincDecDriver) DecodeTime() (t time.Time) {
return
}
if d.vd != bincVdTimestamp {
d.d.errorf("Invalid d.vd. Expecting 0x%x. Received: 0x%x", bincVdTimestamp, d.vd)
d.d.errorf("cannot decode time - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
t, err := bincDecodeTime(d.r.readx(int(d.vs)))
@ -422,7 +466,7 @@ func (d *bincDecDriver) decFloatPre(vs, defaultLen byte) {
} else {
l := d.r.readn1()
if l > 8 {
d.d.errorf("At most 8 bytes used to represent float. Received: %v bytes", l)
d.d.errorf("cannot read float - at most 8 bytes used to represent float - received %v bytes", l)
return
}
for i := l; i < 8; i++ {
@ -441,7 +485,7 @@ func (d *bincDecDriver) decFloat() (f float64) {
d.decFloatPre(d.vs, 8)
f = math.Float64frombits(bigen.Uint64(d.b[0:8]))
} else {
d.d.errorf("only float32 and float64 are supported. d.vd: 0x%x, d.vs: 0x%x", d.vd, d.vs)
d.d.errorf("read float - only float32 and float64 are supported - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
return
@ -498,7 +542,8 @@ func (d *bincDecDriver) decCheckInteger() (ui uint64, neg bool) {
neg = true
ui = 1
} else {
d.d.errorf("numeric decode fails for special value: d.vs: 0x%x", d.vs)
d.d.errorf("integer decode fails - invalid special value from descriptor %x-%x/%s",
d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
} else {
@ -521,7 +566,7 @@ func (d *bincDecDriver) DecodeInt64() (i int64) {
func (d *bincDecDriver) DecodeUint64() (ui uint64) {
ui, neg := d.decCheckInteger()
if neg {
d.d.errorf("Assigning negative signed value to unsigned type")
d.d.errorf("assigning negative signed value to unsigned integer type")
return
}
d.bdRead = false
@ -544,7 +589,8 @@ func (d *bincDecDriver) DecodeFloat64() (f float64) {
} else if vs == bincSpNegInf {
return math.Inf(-1)
} else {
d.d.errorf("Invalid d.vs decoding float where d.vd=bincVdSpecial: %v", d.vs)
d.d.errorf("float - invalid special value from descriptor %x-%x/%s",
d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
} else if vd == bincVdFloat {
@ -566,7 +612,7 @@ func (d *bincDecDriver) DecodeBool() (b bool) {
} else if bd == (bincVdSpecial | bincSpTrue) {
b = true
} else {
d.d.errorf("Invalid single-byte value for bool: %s: %x", msgBadDesc, d.bd)
d.d.errorf("bool - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
d.bdRead = false
@ -578,7 +624,7 @@ func (d *bincDecDriver) ReadMapStart() (length int) {
d.readNextBd()
}
if d.vd != bincVdMap {
d.d.errorf("Invalid d.vd for map. Expecting 0x%x. Got: 0x%x", bincVdMap, d.vd)
d.d.errorf("map - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
length = d.decLen()
@ -591,7 +637,7 @@ func (d *bincDecDriver) ReadArrayStart() (length int) {
d.readNextBd()
}
if d.vd != bincVdArray {
d.d.errorf("Invalid d.vd for array. Expecting 0x%x. Got: 0x%x", bincVdArray, d.vd)
d.d.errorf("array - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
length = d.decLen()
@ -704,8 +750,7 @@ func (d *bincDecDriver) decStringAndBytes(bs []byte, withString, zerocopy bool)
d.s = append(d.s, bincDecSymbol{i: symbol, s: s, b: bs2})
}
default:
d.d.errorf("Invalid d.vd. Expecting string:0x%x, bytearray:0x%x or symbol: 0x%x. Got: 0x%x",
bincVdString, bincVdByteArray, bincVdSymbol, d.vd)
d.d.errorf("string/bytes - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
d.bdRead = false
@ -742,8 +787,7 @@ func (d *bincDecDriver) DecodeBytes(bs []byte, zerocopy bool) (bsOut []byte) {
if d.vd == bincVdString || d.vd == bincVdByteArray {
clen = d.decLen()
} else {
d.d.errorf("Invalid d.vd for bytes. Expecting string:0x%x or bytearray:0x%x. Got: 0x%x",
bincVdString, bincVdByteArray, d.vd)
d.d.errorf("bytes - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
d.bdRead = false
@ -759,7 +803,7 @@ func (d *bincDecDriver) DecodeBytes(bs []byte, zerocopy bool) (bsOut []byte) {
func (d *bincDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) (realxtag uint64) {
if xtag > 0xff {
d.d.errorf("decodeExt: tag must be <= 0xff; got: %v", xtag)
d.d.errorf("ext: tag must be <= 0xff; got: %v", xtag)
return
}
realxtag1, xbs := d.decodeExtV(ext != nil, uint8(xtag))
@ -782,14 +826,14 @@ func (d *bincDecDriver) decodeExtV(verifyTag bool, tag byte) (xtag byte, xbs []b
l := d.decLen()
xtag = d.r.readn1()
if verifyTag && xtag != tag {
d.d.errorf("Wrong extension tag. Got %b. Expecting: %v", xtag, tag)
d.d.errorf("wrong extension tag - got %b, expecting: %v", xtag, tag)
return
}
xbs = d.r.readx(l)
} else if d.vd == bincVdByteArray {
xbs = d.DecodeBytes(nil, true)
} else {
d.d.errorf("Invalid d.vd for extensions (Expecting extensions or byte array). Got: 0x%x", d.vd)
d.d.errorf("ext - expecting extensions or byte array - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
return
}
d.bdRead = false
@ -834,7 +878,7 @@ func (d *bincDecDriver) DecodeNaked() {
n.v = valueTypeInt
n.i = int64(-1) // int8(-1)
default:
d.d.errorf("decodeNaked: Unrecognized special value 0x%x", d.vs)
d.d.errorf("cannot infer value - unrecognized special value from descriptor %x-%x/%s", d.vd, d.vs, bincdesc(d.vd, d.vs))
}
case bincVdSmallInt:
n.v = valueTypeUint
@ -876,7 +920,7 @@ func (d *bincDecDriver) DecodeNaked() {
n.v = valueTypeMap
decodeFurther = true
default:
d.d.errorf("decodeNaked: Unrecognized d.vd: 0x%x", d.vd)
d.d.errorf("cannot infer value - %s %x-%x/%s", msgBadDesc, d.vd, d.vs, bincdesc(d.vd, d.vs))
}
if !decodeFurther {
@ -928,7 +972,7 @@ type BincHandle struct {
// - n: none
// - a: all: same as m, s, ...
_ [1]uint64 // padding
// _ [1]uint64 // padding
}
// Name returns the name of the handle: binc

View file

@ -60,6 +60,46 @@ const (
cborBaseSimple = 0xe0
)
func cbordesc(bd byte) string {
switch bd {
case cborBdNil:
return "nil"
case cborBdFalse:
return "false"
case cborBdTrue:
return "true"
case cborBdFloat16, cborBdFloat32, cborBdFloat64:
return "float"
case cborBdIndefiniteBytes:
return "bytes*"
case cborBdIndefiniteString:
return "string*"
case cborBdIndefiniteArray:
return "array*"
case cborBdIndefiniteMap:
return "map*"
default:
switch {
case bd >= cborBaseUint && bd < cborBaseNegInt:
return "(u)int"
case bd >= cborBaseNegInt && bd < cborBaseBytes:
return "int"
case bd >= cborBaseBytes && bd < cborBaseString:
return "bytes"
case bd >= cborBaseString && bd < cborBaseArray:
return "string"
case bd >= cborBaseArray && bd < cborBaseMap:
return "array"
case bd >= cborBaseMap && bd < cborBaseTag:
return "map"
case bd >= cborBaseTag && bd < cborBaseSimple:
return "ext"
default:
return "unknown"
}
}
}
// -------------------
type cborEncDriver struct {
@ -326,7 +366,7 @@ func (d *cborDecDriver) decUint() (ui uint64) {
} else if v == 0x1b {
ui = uint64(bigen.Uint64(d.r.readx(8)))
} else {
d.d.errorf("decUint: Invalid descriptor: %v", d.bd)
d.d.errorf("invalid descriptor decoding uint: %x/%s", d.bd, cbordesc(d.bd))
return
}
}
@ -342,7 +382,7 @@ func (d *cborDecDriver) decCheckInteger() (neg bool) {
} else if major == cborMajorNegInt {
neg = true
} else {
d.d.errorf("invalid major: %v (bd: %v)", major, d.bd)
d.d.errorf("not an integer - invalid major %v from descriptor %x/%s", major, d.bd, cbordesc(d.bd))
return
}
return
@ -363,7 +403,7 @@ func (d *cborDecDriver) DecodeInt64() (i int64) {
func (d *cborDecDriver) DecodeUint64() (ui uint64) {
if d.decCheckInteger() {
d.d.errorf("Assigning negative signed value to unsigned type")
d.d.errorf("assigning negative signed value to unsigned type")
return
}
ui = d.decUint()
@ -384,7 +424,7 @@ func (d *cborDecDriver) DecodeFloat64() (f float64) {
} else if bd >= cborBaseUint && bd < cborBaseBytes {
f = float64(d.DecodeInt64())
} else {
d.d.errorf("Float only valid from float16/32/64: Invalid descriptor: %v", bd)
d.d.errorf("float only valid from float16/32/64 - invalid descriptor %x/%s", bd, cbordesc(bd))
return
}
d.bdRead = false
@ -400,7 +440,7 @@ func (d *cborDecDriver) DecodeBool() (b bool) {
b = true
} else if bd == cborBdFalse {
} else {
d.d.errorf("Invalid single-byte value for bool: %s: %x", msgBadDesc, d.bd)
d.d.errorf("not bool - %s %x/%s", msgBadDesc, d.bd, cbordesc(d.bd))
return
}
d.bdRead = false
@ -441,7 +481,7 @@ func (d *cborDecDriver) decAppendIndefiniteBytes(bs []byte) []byte {
}
if major := d.bd >> 5; major != cborMajorBytes && major != cborMajorText {
d.d.errorf("expect bytes/string major type in indefinite string/bytes;"+
" got: %v, byte: %v", major, d.bd)
" got major %v from descriptor %x/%x", major, d.bd, cbordesc(d.bd))
return nil
}
n := d.decLen()
@ -684,7 +724,7 @@ type CborHandle struct {
// If unset, we encode time.Time using seconds past epoch.
TimeRFC3339 bool
_ [1]uint64 // padding
// _ [1]uint64 // padding
}
// Name returns the name of the handle: cbor

View file

@ -881,7 +881,7 @@ func testCodecMiscOne(t *testing.T, h Handle) {
} else {
logT(t, "------- b: size: %v, value: %s", len(b), b1)
}
ts2 := new(TestStrucFlex)
ts2 := emptyTestStrucFlex()
testUnmarshalErr(ts2, b, h, t, "pointer-to-struct")
if ts2.I64 != math.MaxInt64*2/3 {
logT(t, "------- Unmarshal wrong. Expect I64 = 64. Got: %v", ts2.I64)
@ -1874,12 +1874,13 @@ func doTestLargeContainerLen(t *testing.T, h Handle) {
0, 1,
math.MaxInt8, math.MaxInt8 + 4, math.MaxInt8 - 4,
math.MaxInt16, math.MaxInt16 + 4, math.MaxInt16 - 4,
math.MaxInt32, math.MaxInt32 + 4, math.MaxInt32 - 4,
math.MaxInt64, math.MaxInt64 - 4,
math.MaxInt32, math.MaxInt32 - 4,
// math.MaxInt32 + 4, // bombs on 32-bit
// math.MaxInt64, math.MaxInt64 - 4, // bombs on 32-bit
math.MaxUint8, math.MaxUint8 + 4, math.MaxUint8 - 4,
math.MaxUint16, math.MaxUint16 + 4, math.MaxUint16 - 4,
math.MaxUint32, math.MaxUint32 + 4, math.MaxUint32 - 4,
// math.MaxUint32, math.MaxUint32 + 4, math.MaxUint32 - 4, // bombs on 32-bit
} {
m[i] = make([]struct{}, i)
}
@ -2286,6 +2287,26 @@ func doTestIntfMapping(t *testing.T, name string, h Handle) {
}
}
func doTestOmitempty(t *testing.T, name string, h Handle) {
testOnce.Do(testInitAll)
if h.getBasicHandle().StructToArray {
t.Skipf("Skipping OmitEmpty test when StructToArray=true")
}
type T1 struct {
A int `codec:"a"`
B *int `codec:"b,omitempty"`
C int `codec:"c,omitempty"`
}
type T2 struct {
A int `codec:"a"`
}
var v1 T1
var v2 T2
b1 := testMarshalErr(v1, h, t, name+"-omitempty")
b2 := testMarshalErr(v2, h, t, name+"-no-omitempty-trunc")
testDeepEqualErr(b1, b2, t, name+"-omitempty-cmp")
}
// -----------------
func TestJsonDecodeNonStringScalarInStringContext(t *testing.T) {
@ -2923,6 +2944,26 @@ func TestSimpleScalars(t *testing.T) {
doTestScalars(t, "simple", testSimpleH)
}
func TestJsonOmitempty(t *testing.T) {
doTestOmitempty(t, "json", testJsonH)
}
func TestCborOmitempty(t *testing.T) {
doTestOmitempty(t, "cbor", testCborH)
}
func TestMsgpackOmitempty(t *testing.T) {
doTestOmitempty(t, "msgpack", testMsgpackH)
}
func TestBincOmitempty(t *testing.T) {
doTestOmitempty(t, "binc", testBincH)
}
func TestSimpleOmitempty(t *testing.T) {
doTestOmitempty(t, "simple", testSimpleH)
}
func TestJsonIntfMapping(t *testing.T) {
doTestIntfMapping(t, "json", testJsonH)
}

37
vendor/github.com/ugorji/go/codec/codecgen/README.md generated vendored Normal file
View file

@ -0,0 +1,37 @@
# codecgen tool
Generate is given a list of *.go files to parse, and an output file (fout),
codecgen will create an output file __file.go__ which
contains `codec.Selfer` implementations for the named types found
in the files parsed.
Using codecgen is very straightforward.
**Download and install the tool**
`go get -u github.com/ugorji/go/codec/codecgen`
**Run the tool on your files**
The command line format is:
`codecgen [options] (-o outfile) (infile ...)`
```sh
% codecgen -?
Usage of codecgen:
-c="github.com/ugorji/go/codec": codec path
-o="": out file
-r=".*": regex for type name to match
-nr="": regex for type name to exclude
-rt="": tags for go run
-t="": build tag to put in file
-u=false: Use unsafe, e.g. to avoid unnecessary allocation on []byte->string
-x=false: keep temp file
% codecgen -o values_codecgen.go values.go values2.go moretypedefs.go
```
Please see the [blog article](http://ugorji.net/blog/go-codecgen)
for more information on how to use the tool.

360
vendor/github.com/ugorji/go/codec/codecgen/gen.go generated vendored Normal file
View file

@ -0,0 +1,360 @@
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
// Use of this source code is governed by a MIT license found in the LICENSE file.
// codecgen generates codec.Selfer implementations for a set of types.
package main
import (
"bufio"
"bytes"
"errors"
"flag"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"math/rand"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/template"
"time"
)
const genCodecPkg = "codec1978" // keep this in sync with codec.genCodecPkg
const genFrunMainTmpl = `//+build ignore
// Code generated - temporary main package for codecgen - DO NOT EDIT.
package main
{{ if .Types }}import "{{ .ImportPath }}"{{ end }}
func main() {
{{ $.PackageName }}.CodecGenTempWrite{{ .RandString }}()
}
`
// const genFrunPkgTmpl = `//+build codecgen
const genFrunPkgTmpl = `
// Code generated - temporary package for codecgen - DO NOT EDIT.
package {{ $.PackageName }}
import (
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }} "{{ .CodecImportPath }}"{{ end }}
"os"
"reflect"
"bytes"
"strings"
"go/format"
)
func CodecGenTempWrite{{ .RandString }}() {
os.Remove("{{ .OutFile }}")
fout, err := os.Create("{{ .OutFile }}")
if err != nil {
panic(err)
}
defer fout.Close()
var typs []reflect.Type
var typ reflect.Type
var numfields int
{{ range $index, $element := .Types }}
var t{{ $index }} {{ . }}
typ = reflect.TypeOf(t{{ $index }})
typs = append(typs, typ)
if typ.Kind() == reflect.Struct { numfields += typ.NumField() } else { numfields += 1 }
{{ end }}
// println("initializing {{ .OutFile }}, buf size: {{ .AllFilesSize }}*16",
// {{ .AllFilesSize }}*16, "num fields: ", numfields)
var out = bytes.NewBuffer(make([]byte, 0, numfields*1024)) // {{ .AllFilesSize }}*16
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }}.{{ end }}Gen(out,
"{{ .BuildTag }}", "{{ .PackageName }}", "{{ .RandString }}", {{ .NoExtensions }},
{{ if not .CodecPkgFiles }}{{ .CodecPkgName }}.{{ end }}NewTypeInfos(strings.Split("{{ .StructTags }}", ",")),
typs...)
bout, err := format.Source(out.Bytes())
// println("... lengths: before formatting: ", len(out.Bytes()), ", after formatting", len(bout))
if err != nil {
fout.Write(out.Bytes())
panic(err)
}
fout.Write(bout)
}
`
// Generate is given a list of *.go files to parse, and an output file (fout).
//
// It finds all types T in the files, and it creates 2 tmp files (frun).
// - main package file passed to 'go run'
// - package level file which calls *genRunner.Selfer to write Selfer impls for each T.
// We use a package level file so that it can reference unexported types in the package being worked on.
// Tool then executes: "go run __frun__" which creates fout.
// fout contains Codec(En|De)codeSelf implementations for every type T.
//
func Generate(outfile, buildTag, codecPkgPath string,
uid int64,
goRunTag string, st string,
regexName, notRegexName *regexp.Regexp,
deleteTempFile, noExtensions bool,
infiles ...string) (err error) {
// For each file, grab AST, find each type, and write a call to it.
if len(infiles) == 0 {
return
}
if outfile == "" || codecPkgPath == "" {
err = errors.New("outfile and codec package path cannot be blank")
return
}
if uid < 0 {
uid = -uid
} else if uid == 0 {
rr := rand.New(rand.NewSource(time.Now().UnixNano()))
uid = 101 + rr.Int63n(9777)
}
// We have to parse dir for package, before opening the temp file for writing (else ImportDir fails).
// Also, ImportDir(...) must take an absolute path.
lastdir := filepath.Dir(outfile)
absdir, err := filepath.Abs(lastdir)
if err != nil {
return
}
pkg, err := build.Default.ImportDir(absdir, build.AllowBinary)
if err != nil {
return
}
type tmplT struct {
CodecPkgName string
CodecImportPath string
ImportPath string
OutFile string
PackageName string
RandString string
BuildTag string
StructTags string
Types []string
AllFilesSize int64
CodecPkgFiles bool
NoExtensions bool
}
tv := tmplT{
CodecPkgName: genCodecPkg,
OutFile: outfile,
CodecImportPath: codecPkgPath,
BuildTag: buildTag,
RandString: strconv.FormatInt(uid, 10),
StructTags: st,
NoExtensions: noExtensions,
}
tv.ImportPath = pkg.ImportPath
if tv.ImportPath == tv.CodecImportPath {
tv.CodecPkgFiles = true
tv.CodecPkgName = "codec"
} else {
// HACK: always handle vendoring. It should be typically on in go 1.6, 1.7
tv.ImportPath = stripVendor(tv.ImportPath)
}
astfiles := make([]*ast.File, len(infiles))
var fi os.FileInfo
for i, infile := range infiles {
if filepath.Dir(infile) != lastdir {
err = errors.New("in files must all be in same directory as outfile")
return
}
if fi, err = os.Stat(infile); err != nil {
return
}
tv.AllFilesSize += fi.Size()
fset := token.NewFileSet()
astfiles[i], err = parser.ParseFile(fset, infile, nil, 0)
if err != nil {
return
}
if i == 0 {
tv.PackageName = astfiles[i].Name.Name
if tv.PackageName == "main" {
// codecgen cannot be run on types in the 'main' package.
// A temporary 'main' package must be created, and should reference the fully built
// package containing the types.
// Also, the temporary main package will conflict with the main package which already has a main method.
err = errors.New("codecgen cannot be run on types in the 'main' package")
return
}
}
}
// keep track of types with selfer methods
// selferMethods := []string{"CodecEncodeSelf", "CodecDecodeSelf"}
selferEncTyps := make(map[string]bool)
selferDecTyps := make(map[string]bool)
for _, f := range astfiles {
for _, d := range f.Decls {
// if fd, ok := d.(*ast.FuncDecl); ok && fd.Recv != nil && fd.Recv.NumFields() == 1 {
if fd, ok := d.(*ast.FuncDecl); ok && fd.Recv != nil && len(fd.Recv.List) == 1 {
recvType := fd.Recv.List[0].Type
if ptr, ok := recvType.(*ast.StarExpr); ok {
recvType = ptr.X
}
if id, ok := recvType.(*ast.Ident); ok {
switch fd.Name.Name {
case "CodecEncodeSelf":
selferEncTyps[id.Name] = true
case "CodecDecodeSelf":
selferDecTyps[id.Name] = true
}
}
}
}
}
// now find types
for _, f := range astfiles {
for _, d := range f.Decls {
if gd, ok := d.(*ast.GenDecl); ok {
for _, dd := range gd.Specs {
if td, ok := dd.(*ast.TypeSpec); ok {
// if len(td.Name.Name) == 0 || td.Name.Name[0] > 'Z' || td.Name.Name[0] < 'A' {
if len(td.Name.Name) == 0 {
continue
}
// only generate for:
// struct: StructType
// primitives (numbers, bool, string): Ident
// map: MapType
// slice, array: ArrayType
// chan: ChanType
// do not generate:
// FuncType, InterfaceType, StarExpr (ptr), etc
//
// We generate for all these types (not just structs), because they may be a field
// in another struct which doesn't have codecgen run on it, and it will be nice
// to take advantage of the fact that the type is a Selfer.
switch td.Type.(type) {
case *ast.StructType, *ast.Ident, *ast.MapType, *ast.ArrayType, *ast.ChanType:
// only add to tv.Types iff
// - it matches per the -r parameter
// - it doesn't match per the -nr parameter
// - it doesn't have any of the Selfer methods in the file
if regexName.FindStringIndex(td.Name.Name) != nil &&
notRegexName.FindStringIndex(td.Name.Name) == nil &&
!selferEncTyps[td.Name.Name] &&
!selferDecTyps[td.Name.Name] {
tv.Types = append(tv.Types, td.Name.Name)
}
}
}
}
}
}
}
if len(tv.Types) == 0 {
return
}
// we cannot use ioutil.TempFile, because we cannot guarantee the file suffix (.go).
// Also, we cannot create file in temp directory,
// because go run will not work (as it needs to see the types here).
// Consequently, create the temp file in the current directory, and remove when done.
// frun, err = ioutil.TempFile("", "codecgen-")
// frunName := filepath.Join(os.TempDir(), "codecgen-"+strconv.FormatInt(time.Now().UnixNano(), 10)+".go")
frunMainName := "codecgen-main-" + tv.RandString + ".generated.go"
frunPkgName := "codecgen-pkg-" + tv.RandString + ".generated.go"
if deleteTempFile {
defer os.Remove(frunMainName)
defer os.Remove(frunPkgName)
}
// var frunMain, frunPkg *os.File
if _, err = gen1(frunMainName, genFrunMainTmpl, &tv); err != nil {
return
}
if _, err = gen1(frunPkgName, genFrunPkgTmpl, &tv); err != nil {
return
}
// remove outfile, so "go run ..." will not think that types in outfile already exist.
os.Remove(outfile)
// execute go run frun
cmd := exec.Command("go", "run", "-tags", "codecgen.exec safe "+goRunTag, frunMainName) //, frunPkg.Name())
var buf bytes.Buffer
cmd.Stdout = &buf
cmd.Stderr = &buf
if err = cmd.Run(); err != nil {
err = fmt.Errorf("error running 'go run %s': %v, console: %s",
frunMainName, err, buf.Bytes())
return
}
os.Stdout.Write(buf.Bytes())
return
}
func gen1(frunName, tmplStr string, tv interface{}) (frun *os.File, err error) {
os.Remove(frunName)
if frun, err = os.Create(frunName); err != nil {
return
}
defer frun.Close()
t := template.New("")
if t, err = t.Parse(tmplStr); err != nil {
return
}
bw := bufio.NewWriter(frun)
if err = t.Execute(bw, tv); err != nil {
bw.Flush()
return
}
if err = bw.Flush(); err != nil {
return
}
return
}
// copied from ../gen.go (keep in sync).
func stripVendor(s string) string {
// HACK: Misbehaviour occurs in go 1.5. May have to re-visit this later.
// if s contains /vendor/ OR startsWith vendor/, then return everything after it.
const vendorStart = "vendor/"
const vendorInline = "/vendor/"
if i := strings.LastIndex(s, vendorInline); i >= 0 {
s = s[i+len(vendorInline):]
} else if strings.HasPrefix(s, vendorStart) {
s = s[len(vendorStart):]
}
return s
}
func main() {
o := flag.String("o", "", "out file")
c := flag.String("c", genCodecPath, "codec path")
t := flag.String("t", "", "build tag to put in file")
r := flag.String("r", ".*", "regex for type name to match")
nr := flag.String("nr", "^$", "regex for type name to exclude")
rt := flag.String("rt", "", "tags for go run")
st := flag.String("st", "codec,json", "struct tag keys to introspect")
x := flag.Bool("x", false, "keep temp file")
_ = flag.Bool("u", false, "Allow unsafe use. ***IGNORED*** - kept for backwards compatibility: ")
d := flag.Int64("d", 0, "random identifier for use in generated code")
nx := flag.Bool("nx", false, "do not support extensions - support of extensions may cause extra allocation")
flag.Parse()
err := Generate(*o, *t, *c, *d, *rt, *st,
regexp.MustCompile(*r), regexp.MustCompile(*nr), !*x, *nx, flag.Args()...)
if err != nil {
fmt.Fprintf(os.Stderr, "codecgen error: %v\n", err)
os.Exit(1)
}
}

3
vendor/github.com/ugorji/go/codec/codecgen/z.go generated vendored Normal file
View file

@ -0,0 +1,3 @@
package main
const genCodecPath = "github.com/ugorji/go/codec"

View file

@ -16,10 +16,14 @@ import (
// Some tagging information for error messages.
const (
msgBadDesc = "Unrecognized descriptor byte"
msgBadDesc = "unrecognized descriptor byte"
msgDecCannotExpandArr = "cannot expand go array from %v to stream length: %v"
)
const decDefSliceCap = 8
const decDefChanCap = 64 // should be large, as cap cannot be expanded
const decScratchByteArrayLen = cacheLineSize - 8
var (
errstrOnlyMapOrArrayCanDecodeIntoStruct = "only encoded map or array can be decoded into a struct"
errstrCannotDecodeIntoNil = "cannot decode into nil"
@ -1237,7 +1241,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
// This way, the order can be kept (as order is lost with map).
ti := f.ti
if f.seq == seqTypeChan && ti.chandir&uint8(reflect.SendDir) == 0 {
d.errorf("receive-only channel cannot be used for sending byte(s)")
d.errorf("receive-only channel cannot be decoded")
}
dd := d.d
rtelem0 := ti.elem
@ -1356,14 +1360,17 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
if j == 0 && (f.seq == seqTypeSlice || f.seq == seqTypeChan) && rv.IsNil() {
if hasLen {
rvlen = decInferLen(containerLenS, d.h.MaxInitLen, rtelem0Size)
} else if f.seq == seqTypeSlice {
rvlen = decDefSliceCap
} else {
rvlen = 8
rvlen = decDefChanCap
}
if rvCanset {
if f.seq == seqTypeSlice {
rv = reflect.MakeSlice(ti.rt, rvlen, rvlen)
rvChanged = true
} else { // chan
// xdebugf(">>>>>> haslen = %v, make chan of type '%v' with length: %v", hasLen, ti.rt, rvlen)
rv = reflect.MakeChan(ti.rt, rvlen)
rvChanged = true
}
@ -1385,6 +1392,7 @@ func (d *Decoder) kSlice(f *codecFnInfo, rv reflect.Value) {
fn = d.cf.get(rtelem, true, true)
}
d.decodeValue(rv9, fn, true)
// xdebugf(">>>> rv9 sent on %v during decode: %v, with len=%v, cap=%v", rv.Type(), rv9, rv.Len(), rv.Cap())
rv.Send(rv9)
} else {
// if indefinite, etc, then expand the slice if necessary
@ -1734,7 +1742,7 @@ type decReaderSwitch struct {
esep bool // has elem separators
}
// TODO: Uncomment after mid-stack inlining enabled in go 1.10
// TODO: Uncomment after mid-stack inlining enabled in go 1.11
//
// func (z *decReaderSwitch) unreadn1() {
// if z.bytes {
@ -1800,8 +1808,6 @@ type decReaderSwitch struct {
// return z.ri.readUntil(in, stop)
// }
const decScratchByteArrayLen = cacheLineSize - 8
// A Decoder reads and decodes an object from an input stream in the codec format.
type Decoder struct {
panicHdl
@ -2002,9 +2008,7 @@ func (d *Decoder) naked() *decNaked {
// Note: we allow nil values in the stream anywhere except for map keys.
// A nil value in the encoded stream where a map key is expected is treated as an error.
func (d *Decoder) Decode(v interface{}) (err error) {
// need to call defer directly, else it seems the recover is not fully handled
defer panicToErrs2(d, &d.err, &err)
defer d.alwaysAtEnd()
defer d.deferred(&err)
d.MustDecode(v)
return
}
@ -2025,11 +2029,15 @@ func (d *Decoder) MustDecode(v interface{}) {
// xprintf(">>>>>>>> >>>>>>>> num decFns: %v\n", d.cf.sn)
}
// // this is not a smart swallow, as it allocates objects and does unnecessary work.
// func (d *Decoder) swallowViaHammer() {
// var blank interface{}
// d.decodeValueNoFn(reflect.ValueOf(&blank).Elem())
// }
func (d *Decoder) deferred(err1 *error) {
d.alwaysAtEnd()
if recoverPanicToErr {
if x := recover(); x != nil {
panicValToErr(d, x, err1)
panicValToErr(d, x, &d.err)
}
}
}
func (d *Decoder) alwaysAtEnd() {
if d.n != nil {
@ -2040,6 +2048,12 @@ func (d *Decoder) alwaysAtEnd() {
d.codecFnPooler.alwaysAtEnd()
}
// // this is not a smart swallow, as it allocates objects and does unnecessary work.
// func (d *Decoder) swallowViaHammer() {
// var blank interface{}
// d.decodeValueNoFn(reflect.ValueOf(&blank).Elem())
// }
func (d *Decoder) swallow() {
// smarter decode that just swallows the content
dd := d.d
@ -2368,6 +2382,10 @@ func (d *Decoder) wrapErrstr(v interface{}, err *error) {
*err = fmt.Errorf("%s decode error [pos %d]: %v", d.hh.Name(), d.r.numread(), v)
}
func (d *Decoder) NumBytesRead() int {
return d.r.numread()
}
// --------------------------------------------------
// decSliceHelper assists when decoding into a slice, from a map or an array in the stream.

View file

@ -103,7 +103,15 @@ type EncodeOptions struct {
// if > 0, we use a smart buffer internally for performance purposes.
WriterBufferSize int
// Encode a struct as an array, and not as a map
// ChanRecvTimeout is the timeout used when selecting from a chan.
//
// Configuring this controls how we receive from a chan during the encoding process.
// - If ==0, we only consume the elements currently available in the chan.
// - if <0, we consume until the chan is closed.
// - If >0, we consume until this timeout.
ChanRecvTimeout time.Duration
// StructToArray specifies to encode a struct as an array, and not as a map
StructToArray bool
// Canonical representation means that encoding a value will always result in the same
@ -219,7 +227,9 @@ func (z *ioEncWriter) writen2(b1, b2 byte) {
func (z *ioEncWriter) atEndOfEncode() {
if z.fw != nil {
z.fw.Flush()
if err := z.fw.Flush(); err != nil {
panic(err)
}
}
}
@ -312,18 +322,19 @@ func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
}
}
if f.seq == seqTypeChan && ti.chandir&uint8(reflect.RecvDir) == 0 {
e.errorf("send-only channel cannot be used for receiving byte(s)")
e.errorf("send-only channel cannot be encoded")
}
elemsep := e.esep
l := rv.Len()
rtelem := ti.elem
rtelemIsByte := uint8TypId == rt2id(rtelem) // NOT rtelem.Kind() == reflect.Uint8
var l int
// if a slice, array or chan of bytes, treat specially
if rtelemIsByte {
switch f.seq {
case seqTypeSlice:
ee.EncodeStringBytes(cRAW, rv.Bytes())
case seqTypeArray:
l = rv.Len()
if rv.CanAddr() {
ee.EncodeStringBytes(cRAW, rv.Slice(0, l).Bytes())
} else {
@ -337,24 +348,89 @@ func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
ee.EncodeStringBytes(cRAW, bs)
}
case seqTypeChan:
bs := e.b[:0]
// do not use range, so that the number of elements encoded
// does not change, and encoding does not hang waiting on someone to close chan.
// for b := range rv2i(rv).(<-chan byte) { bs = append(bs, b) }
// ch := rv2i(rv).(<-chan byte) // fix error - that this is a chan byte, not a <-chan byte.
if rv.IsNil() {
ee.EncodeNil()
break
}
bs := e.b[:0]
irv := rv2i(rv)
ch, ok := irv.(<-chan byte)
if !ok {
ch = irv.(chan byte)
}
for i := 0; i < l; i++ {
bs = append(bs, <-ch)
L1:
switch timeout := e.h.ChanRecvTimeout; {
case timeout == 0: // only consume available
for {
select {
case b := <-ch:
bs = append(bs, b)
default:
break L1
}
}
case timeout > 0: // consume until timeout
tt := time.NewTimer(timeout)
for {
select {
case b := <-ch:
bs = append(bs, b)
case <-tt.C:
// close(tt.C)
break L1
}
}
default: // consume until close
for b := range ch {
bs = append(bs, b)
}
}
ee.EncodeStringBytes(cRAW, bs)
}
return
}
// if chan, consume chan into a slice, and work off that slice.
var rvcs reflect.Value
if f.seq == seqTypeChan {
rvcs = reflect.Zero(reflect.SliceOf(rtelem))
timeout := e.h.ChanRecvTimeout
if timeout < 0 { // consume until close
for {
recv, recvOk := rv.Recv()
if !recvOk {
break
}
rvcs = reflect.Append(rvcs, recv)
}
} else {
cases := make([]reflect.SelectCase, 2)
cases[0] = reflect.SelectCase{Dir: reflect.SelectRecv, Chan: rv}
if timeout == 0 {
cases[1] = reflect.SelectCase{Dir: reflect.SelectDefault}
} else {
tt := time.NewTimer(timeout)
cases[1] = reflect.SelectCase{Dir: reflect.SelectRecv, Chan: reflect.ValueOf(tt.C)}
}
for {
chosen, recv, recvOk := reflect.Select(cases)
if chosen == 1 || !recvOk {
break
}
rvcs = reflect.Append(rvcs, recv)
}
}
rv = rvcs // TODO: ensure this doesn't mess up anywhere that rv of kind chan is expected
}
l = rv.Len()
if ti.mbs {
if l%2 == 1 {
e.errorf("mapBySlice requires even slice length, but got %v", l)
@ -388,15 +464,7 @@ func (e *Encoder) kSlice(f *codecFnInfo, rv reflect.Value) {
ee.WriteArrayElem()
}
}
if f.seq == seqTypeChan {
if rv2, ok2 := rv.Recv(); ok2 {
e.encodeValue(rv2, fn, true)
} else {
ee.EncodeNil() // WE HAVE TO DO SOMETHING, so nil if nothing received.
}
} else {
e.encodeValue(rv.Index(j), fn, true)
}
e.encodeValue(rv.Index(j), fn, true)
}
}
@ -835,7 +903,7 @@ type encWriterSwitch struct {
isas bool // whether e.as != nil
}
// // TODO: Uncomment after mid-stack inlining enabled in go 1.10
// // TODO: Uncomment after mid-stack inlining enabled in go 1.11
// func (z *encWriterSwitch) writeb(s []byte) {
// if z.wx {
@ -995,9 +1063,12 @@ func (e *Encoder) ResetBytes(out *[]byte) {
// Encode writes an object into a stream.
//
// Encoding can be configured via the struct tag for the fields.
// The "codec" key in struct field's tag value is the key name,
// The key (in the struct tags) that we look at is configurable.
//
// By default, we look up the "codec" key in the struct field's tags,
// and fall bak to the "json" key if "codec" is absent.
// That key in struct field's tag value is the key name,
// followed by an optional comma and options.
// Note that the "json" key is used in the absence of the "codec" key.
//
// To set an option on all fields (e.g. omitempty on all fields), you
// can create a field called _struct, and set flags on it. The options
@ -1073,8 +1144,7 @@ func (e *Encoder) ResetBytes(out *[]byte) {
// Some formats support symbols (e.g. binc) and will properly encode the string
// only once in the stream, and use a tag to refer to it thereafter.
func (e *Encoder) Encode(v interface{}) (err error) {
defer panicToErrs2(e, &e.err, &err)
defer e.alwaysAtEnd()
defer e.deferred(&err)
e.MustEncode(v)
return
}
@ -1091,6 +1161,16 @@ func (e *Encoder) MustEncode(v interface{}) {
e.alwaysAtEnd()
}
func (e *Encoder) deferred(err1 *error) {
e.alwaysAtEnd()
if recoverPanicToErr {
if x := recover(); x != nil {
panicValToErr(e, x, err1)
panicValToErr(e, x, &e.err)
}
}
}
// func (e *Encoder) alwaysAtEnd() {
// e.codecFnPooler.alwaysAtEnd()
// }

View file

@ -9,13 +9,14 @@ if {{var "l"}} == 0 {
} else if len({{var "v"}}) != 0 {
{{var "v"}} = {{var "v"}}[:0]
{{var "c"}} = true
} {{end}} {{if isChan }}if {{var "v"}} == nil {
} {{else if isChan }}if {{var "v"}} == nil {
{{var "v"}} = make({{ .CTyp }}, 0)
{{var "c"}} = true
} {{end}}
} else {
{{var "hl"}} := {{var "l"}} > 0
var {{var "rl"}} int; _ = {{var "rl"}}
var {{var "rl"}} int
_ = {{var "rl"}}
{{if isSlice }} if {{var "hl"}} {
if {{var "l"}} > cap({{var "v"}}) {
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
@ -33,25 +34,26 @@ if {{var "l"}} == 0 {
var {{var "j"}} int
// var {{var "dn"}} bool
for ; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
{{if not isArray}} if {{var "j"}} == 0 && len({{var "v"}}) == 0 {
{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
if {{var "hl"}} {
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
} else {
{{var "rl"}} = 8
{{var "rl"}} = {{if isSlice}}8{{else if isChan}}64{{end}}
}
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
{{var "c"}} = true
}{{end}}
{{var "h"}}.ElemContainerState({{var "j"}})
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}
{{if isChan}}{{ $x := printf "%[1]vv%[2]v" .TempVar .Rand }}var {{var $x}} {{ .Typ }}
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */}}
{{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
{{ decLineVar $x }}
{{var "v"}} <- {{ $x }}
{{else}}
// if indefinite, etc, then expand the slice if necessary
// println(">>>> sending ", {{ $x }}, " into ", {{var "v"}}) // TODO: remove this
{{else}}{{/* // if indefinite, etc, then expand the slice if necessary */}}
var {{var "db"}} bool
if {{var "j"}} >= len({{var "v"}}) {
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }}); {{var "c"}} = true
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
{{var "c"}} = true
{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
{{end}}
}
@ -74,4 +76,3 @@ if {{var "l"}} == 0 {
{{if not isArray }}if {{var "c"}} {
*{{ .Varname }} = {{var "v"}}
}{{end}}

27
vendor/github.com/ugorji/go/codec/gen-enc-chan.go.tmpl generated vendored Normal file
View file

@ -0,0 +1,27 @@
{{.Label}}:
switch timeout{{.Sfx}} := z.EncBasicHandle().ChanRecvTimeout; {
case timeout{{.Sfx}} == 0: // only consume available
for {
select {
case b{{.Sfx}} := <-{{.Chan}}:
{{ .Slice }} = append({{.Slice}}, b{{.Sfx}})
default:
break {{.Label}}
}
}
case timeout{{.Sfx}} > 0: // consume until timeout
tt{{.Sfx}} := time.NewTimer(timeout{{.Sfx}})
for {
select {
case b{{.Sfx}} := <-{{.Chan}}:
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
case <-tt{{.Sfx}}.C:
// close(tt.C)
break {{.Label}}
}
}
default: // consume until close
for b{{.Sfx}} := range {{.Chan}} {
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
}
}

View file

@ -64,13 +64,14 @@ if {{var "l"}} == 0 {
} else if len({{var "v"}}) != 0 {
{{var "v"}} = {{var "v"}}[:0]
{{var "c"}} = true
} {{end}} {{if isChan }}if {{var "v"}} == nil {
} {{else if isChan }}if {{var "v"}} == nil {
{{var "v"}} = make({{ .CTyp }}, 0)
{{var "c"}} = true
} {{end}}
} else {
{{var "hl"}} := {{var "l"}} > 0
var {{var "rl"}} int; _ = {{var "rl"}}
var {{var "rl"}} int
_ = {{var "rl"}}
{{if isSlice }} if {{var "hl"}} {
if {{var "l"}} > cap({{var "v"}}) {
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
@ -88,25 +89,26 @@ if {{var "l"}} == 0 {
var {{var "j"}} int
// var {{var "dn"}} bool
for ; ({{var "hl"}} && {{var "j"}} < {{var "l"}}) || !({{var "hl"}} || r.CheckBreak()); {{var "j"}}++ {
{{if not isArray}} if {{var "j"}} == 0 && len({{var "v"}}) == 0 {
{{if not isArray}} if {{var "j"}} == 0 && {{var "v"}} == nil {
if {{var "hl"}} {
{{var "rl"}} = z.DecInferLen({{var "l"}}, z.DecBasicHandle().MaxInitLen, {{ .Size }})
} else {
{{var "rl"}} = 8
{{var "rl"}} = {{if isSlice}}8{{else if isChan}}64{{end}}
}
{{var "v"}} = make([]{{ .Typ }}, {{var "rl"}})
{{var "v"}} = make({{if isSlice}}[]{{ .Typ }}{{else if isChan}}{{.CTyp}}{{end}}, {{var "rl"}})
{{var "c"}} = true
}{{end}}
{{var "h"}}.ElemContainerState({{var "j"}})
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}
{{if isChan}}{{ $x := printf "%[1]vv%[2]v" .TempVar .Rand }}var {{var $x}} {{ .Typ }}
{{/* {{var "dn"}} = r.TryDecodeAsNil() */}}{{/* commented out, as decLineVar handles this already each time */}}
{{if isChan}}{{ $x := printf "%[1]vvcx%[2]v" .TempVar .Rand }}var {{$x}} {{ .Typ }}
{{ decLineVar $x }}
{{var "v"}} <- {{ $x }}
{{else}}
// if indefinite, etc, then expand the slice if necessary
// println(">>>> sending ", {{ $x }}, " into ", {{var "v"}}) // TODO: remove this
{{else}}{{/* // if indefinite, etc, then expand the slice if necessary */}}
var {{var "db"}} bool
if {{var "j"}} >= len({{var "v"}}) {
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }}); {{var "c"}} = true
{{if isSlice }} {{var "v"}} = append({{var "v"}}, {{ zero }})
{{var "c"}} = true
{{else}} z.DecArrayCannotExpand(len(v), {{var "j"}}+1); {{var "db"}} = true
{{end}}
}
@ -129,5 +131,34 @@ if {{var "l"}} == 0 {
{{if not isArray }}if {{var "c"}} {
*{{ .Varname }} = {{var "v"}}
}{{end}}
`
const genEncChanTmpl = `
{{.Label}}:
switch timeout{{.Sfx}} := z.EncBasicHandle().ChanRecvTimeout; {
case timeout{{.Sfx}} == 0: // only consume available
for {
select {
case b{{.Sfx}} := <-{{.Chan}}:
{{ .Slice }} = append({{.Slice}}, b{{.Sfx}})
default:
break {{.Label}}
}
}
case timeout{{.Sfx}} > 0: // consume until timeout
tt{{.Sfx}} := time.NewTimer(timeout{{.Sfx}})
for {
select {
case b{{.Sfx}} := <-{{.Chan}}:
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
case <-tt{{.Sfx}}.C:
// close(tt.C)
break {{.Label}}
}
}
default: // consume until close
for b{{.Sfx}} := range {{.Chan}} {
{{.Slice}} = append({{.Slice}}, b{{.Sfx}})
}
}
`

View file

@ -542,7 +542,6 @@ func (x *genRunner) selfer(encode bool) {
if encode {
x.line(") CodecEncodeSelf(e *" + x.cpfx + "Encoder) {")
x.genRequiredMethodVars(true)
// x.enc(genTopLevelVarName, t)
x.encVar(genTopLevelVarName, t)
} else {
x.line(") CodecDecodeSelf(d *" + x.cpfx + "Decoder) {")
@ -649,7 +648,7 @@ func (x *genRunner) encVar(varname string, t reflect.Type) {
case reflect.Ptr:
telem := t.Elem()
tek := telem.Kind()
if tek == reflect.Array || (tek == reflect.Struct && t != timeTyp) {
if tek == reflect.Array || (tek == reflect.Struct && telem != timeTyp) {
x.enc(varname, genNonPtr(t))
break
}
@ -1083,28 +1082,49 @@ func (x *genRunner) encStruct(varname string, rtid uintptr, t reflect.Type) {
}
func (x *genRunner) encListFallback(varname string, t reflect.Type) {
elemBytes := t.Elem().Kind() == reflect.Uint8
if t.AssignableTo(uint8SliceTyp) {
x.linef("r.EncodeStringBytes(codecSelferCcRAW%s, []byte(%s))", x.xs, varname)
return
}
if t.Kind() == reflect.Array && t.Elem().Kind() == reflect.Uint8 {
if t.Kind() == reflect.Array && elemBytes {
x.linef("r.EncodeStringBytes(codecSelferCcRAW%s, ((*[%d]byte)(%s))[:])", x.xs, t.Len(), varname)
return
}
i := x.varsfx()
g := genTempVarPfx
x.line("r.WriteArrayStart(len(" + varname + "))")
if t.Kind() == reflect.Chan {
x.linef("for %si%s, %si2%s := 0, len(%s); %si%s < %si2%s; %si%s++ {", g, i, g, i, varname, g, i, g, i, g, i)
x.line("r.WriteArrayElem()")
x.linef("%sv%s := <-%s", g, i, varname)
} else {
x.linef("for _, %sv%s := range %s {", genTempVarPfx, i, varname)
x.line("r.WriteArrayElem()")
type ts struct {
Label, Chan, Slice, Sfx string
}
tm, err := template.New("").Parse(genEncChanTmpl)
if err != nil {
panic(err)
}
x.linef("if %s == nil { r.EncodeNil() } else { ", varname)
x.linef("var sch%s []%s", i, x.genTypeName(t.Elem()))
err = tm.Execute(x.w, &ts{"Lsch" + i, varname, "sch" + i, i})
if err != nil {
panic(err)
}
// x.linef("%s = sch%s", varname, i)
if elemBytes {
x.linef("r.EncodeStringBytes(codecSelferCcRAW%s, []byte(%s))", x.xs, "sch"+i)
x.line("}")
return
}
varname = "sch" + i
}
x.line("r.WriteArrayStart(len(" + varname + "))")
x.linef("for _, %sv%s := range %s {", genTempVarPfx, i, varname)
x.line("r.WriteArrayElem()")
x.encVar(genTempVarPfx+"v"+i, t.Elem())
x.line("}")
x.line("r.WriteArrayEnd()")
if t.Kind() == reflect.Chan {
x.line("}")
}
}
func (x *genRunner) encMapFallback(varname string, t reflect.Type) {

View file

@ -391,6 +391,10 @@ var immutableKindsSet = [32]bool{
// Any type which implements Selfer will be able to encode or decode itself.
// Consequently, during (en|de)code, this takes precedence over
// (text|binary)(M|Unm)arshal or extension support.
//
// Note: *the first set of bytes of any value MUST NOT represent nil in the format*.
// This is because, during each decode, we first check the the next set of bytes
// represent nil, and if so, we just set the value to nil.
type Selfer interface {
CodecEncodeSelf(*Encoder)
CodecDecodeSelf(*Decoder)
@ -1543,6 +1547,8 @@ func isEmptyStruct(v reflect.Value, tinfos *TypeInfos, deref, checkStruct bool)
// }
func panicToErr(h errstrDecorator, err *error) {
// Note: This method MUST be called directly from defer i.e. defer panicToErr ...
// else it seems the recover is not fully handled
if recoverPanicToErr {
if x := recover(); x != nil {
// fmt.Printf("panic'ing with: %v\n", x)
@ -1552,15 +1558,6 @@ func panicToErr(h errstrDecorator, err *error) {
}
}
func panicToErrs2(h errstrDecorator, err1, err2 *error) {
if recoverPanicToErr {
if x := recover(); x != nil {
panicValToErr(h, x, err1)
panicValToErr(h, x, err2)
}
}
}
func panicValToErr(h errstrDecorator, v interface{}, err *error) {
switch xerr := v.(type) {
case nil:

View file

@ -157,7 +157,8 @@ func isEmptyValue(v reflect.Value, tinfos *TypeInfos, deref, checkStruct bool) b
}
return isnil
case reflect.Ptr:
isnil := urv.ptr == nil
// isnil := urv.ptr == nil (not sufficient, as a pointer value encodes the type)
isnil := urv.ptr == nil || *(*unsafe.Pointer)(urv.ptr) == nil
if deref {
if isnil {
return true
@ -175,25 +176,31 @@ func isEmptyValue(v reflect.Value, tinfos *TypeInfos, deref, checkStruct bool) b
// --------------------------
// atomicTypeInfoSlice contains length and pointer to the array for a slice.
// It is expected to be 2 words.
//
// Previously, we atomically loaded and stored the length and array pointer separately,
// which could lead to some races.
// We now just atomically store and load the pointer to the value directly.
type atomicTypeInfoSlice struct { // expected to be 2 words
l int // length of the data array (must be first in struct, for 64-bit alignment necessary for 386)
v unsafe.Pointer // data array - Pointer (not uintptr) to maintain GC reference
l int64 // length of the data array
}
func (x *atomicTypeInfoSlice) load() []rtid2ti {
l := int(atomic.LoadInt64(&x.l))
if l == 0 {
xp := unsafe.Pointer(x)
x2 := *(*atomicTypeInfoSlice)(atomic.LoadPointer(&xp))
if x2.l == 0 {
return nil
}
return *(*[]rtid2ti)(unsafe.Pointer(&unsafeSlice{Data: atomic.LoadPointer(&x.v), Len: l, Cap: l}))
// return (*[]rtid2ti)(atomic.LoadPointer(&x.v))
return *(*[]rtid2ti)(unsafe.Pointer(&unsafeSlice{Data: x2.v, Len: x2.l, Cap: x2.l}))
}
func (x *atomicTypeInfoSlice) store(p []rtid2ti) {
s := (*unsafeSlice)(unsafe.Pointer(&p))
atomic.StorePointer(&x.v, s.Data)
atomic.StoreInt64(&x.l, int64(s.Len))
// atomic.StorePointer(&x.v, unsafe.Pointer(p))
xp := unsafe.Pointer(x)
atomic.StorePointer(&xp, unsafe.Pointer(&atomicTypeInfoSlice{l: s.Len, v: s.Data}))
}
// --------------------------

View file

@ -606,7 +606,7 @@ func (d *jsonDecDriver) ReadMapStart() int {
}
const xc uint8 = '{'
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read map - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
d.c = containerMapStart
@ -619,7 +619,7 @@ func (d *jsonDecDriver) ReadArrayStart() int {
}
const xc uint8 = '['
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read array - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
d.c = containerArrayStart
@ -638,9 +638,10 @@ func (d *jsonDecDriver) CheckBreak() bool {
// - ReadArrayElem would become:
// readContainerState(containerArrayElem, ',', d.c != containerArrayStart)
//
// However, until mid-stack inlining (go 1.10?) comes, supporting inlining of
// oneliners, we explicitly write them all 5 out to elide the extra func call.
// TODO: For Go 1.10, if inlined, consider consolidating these.
// However, until mid-stack inlining comes in go1.11 which supports inlining of
// one-liners, we explicitly write them all 5 out to elide the extra func call.
//
// TODO: For Go 1.11, if inlined, consider consolidating these.
func (d *jsonDecDriver) ReadArrayElem() {
const xc uint8 = ','
@ -649,7 +650,7 @@ func (d *jsonDecDriver) ReadArrayElem() {
}
if d.c != containerArrayStart {
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read array element - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
}
@ -662,7 +663,7 @@ func (d *jsonDecDriver) ReadArrayEnd() {
d.tok = d.r.skip(&jsonCharWhitespaceSet)
}
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read array end - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
d.c = containerArrayEnd
@ -675,7 +676,7 @@ func (d *jsonDecDriver) ReadMapElemKey() {
}
if d.c != containerMapStart {
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read map key - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
}
@ -688,7 +689,7 @@ func (d *jsonDecDriver) ReadMapElemValue() {
d.tok = d.r.skip(&jsonCharWhitespaceSet)
}
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read map value - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
d.c = containerMapValue
@ -700,7 +701,7 @@ func (d *jsonDecDriver) ReadMapEnd() {
d.tok = d.r.skip(&jsonCharWhitespaceSet)
}
if d.tok != xc {
d.d.errorf("expect char '%c' but got char '%c'", xc, d.tok)
d.d.errorf("read map end - expect char '%c' but got char '%c'", xc, d.tok)
}
d.tok = 0
d.c = containerMapEnd
@ -1267,7 +1268,7 @@ type JsonHandle struct {
// If not configured, raw bytes are encoded to/from base64 text.
RawBytesExt InterfaceExt
_ [3]uint64 // padding
_ [2]uint64 // padding
}
// Name returns the name of the handle: json

View file

@ -38224,7 +38224,6 @@ func (x codecSelfer19781) dectestMammoth2Basic(v *testMammoth2Basic, d *Decoder)
yyh1.ElemContainerState(yyj1)
// if indefinite, etc, then expand the slice if necessary
var yydb1 bool
if yyj1 >= len(yyv1) {
z.DecArrayCannotExpand(len(v), yyj1+1)
@ -38372,7 +38371,7 @@ func (x codecSelfer19781) decSliceTestMammoth2(v *[]TestMammoth2, d *Decoder) {
var yyj1 int
// var yydn1 bool
for ; (yyhl1 && yyj1 < yyl1) || !(yyhl1 || r.CheckBreak()); yyj1++ {
if yyj1 == 0 && len(yyv1) == 0 {
if yyj1 == 0 && yyv1 == nil {
if yyhl1 {
yyrl1 = z.DecInferLen(yyl1, z.DecBasicHandle().MaxInitLen, 4880)
} else {
@ -38383,7 +38382,6 @@ func (x codecSelfer19781) decSliceTestMammoth2(v *[]TestMammoth2, d *Decoder) {
}
yyh1.ElemContainerState(yyj1)
// if indefinite, etc, then expand the slice if necessary
var yydb1 bool
if yyj1 >= len(yyv1) {
yyv1 = append(yyv1, TestMammoth2{})
@ -38414,7 +38412,6 @@ func (x codecSelfer19781) decSliceTestMammoth2(v *[]TestMammoth2, d *Decoder) {
if yyc1 {
*v = yyv1
}
}
func (x codecSelfer19781) encArray4int64(v *[4]int64, e *Encoder) {
@ -38452,7 +38449,6 @@ func (x codecSelfer19781) decArray4int64(v *[4]int64, d *Decoder) {
yyh1.ElemContainerState(yyj1)
// if indefinite, etc, then expand the slice if necessary
var yydb1 bool
if yyj1 >= len(yyv1) {
z.DecArrayCannotExpand(len(v), yyj1+1)

View file

@ -82,6 +82,86 @@ const (
var mpTimeExtTag int8 = -1
var mpTimeExtTagU = uint8(mpTimeExtTag)
// var mpdesc = map[byte]string{
// mpPosFixNumMin: "PosFixNumMin",
// mpPosFixNumMax: "PosFixNumMax",
// mpFixMapMin: "FixMapMin",
// mpFixMapMax: "FixMapMax",
// mpFixArrayMin: "FixArrayMin",
// mpFixArrayMax: "FixArrayMax",
// mpFixStrMin: "FixStrMin",
// mpFixStrMax: "FixStrMax",
// mpNil: "Nil",
// mpFalse: "False",
// mpTrue: "True",
// mpFloat: "Float",
// mpDouble: "Double",
// mpUint8: "Uint8",
// mpUint16: "Uint16",
// mpUint32: "Uint32",
// mpUint64: "Uint64",
// mpInt8: "Int8",
// mpInt16: "Int16",
// mpInt32: "Int32",
// mpInt64: "Int64",
// mpBin8: "Bin8",
// mpBin16: "Bin16",
// mpBin32: "Bin32",
// mpExt8: "Ext8",
// mpExt16: "Ext16",
// mpExt32: "Ext32",
// mpFixExt1: "FixExt1",
// mpFixExt2: "FixExt2",
// mpFixExt4: "FixExt4",
// mpFixExt8: "FixExt8",
// mpFixExt16: "FixExt16",
// mpStr8: "Str8",
// mpStr16: "Str16",
// mpStr32: "Str32",
// mpArray16: "Array16",
// mpArray32: "Array32",
// mpMap16: "Map16",
// mpMap32: "Map32",
// mpNegFixNumMin: "NegFixNumMin",
// mpNegFixNumMax: "NegFixNumMax",
// }
func mpdesc(bd byte) string {
switch bd {
case mpNil:
return "nil"
case mpFalse:
return "false"
case mpTrue:
return "true"
case mpFloat, mpDouble:
return "float"
case mpUint8, mpUint16, mpUint32, mpUint64:
return "uint"
case mpInt8, mpInt16, mpInt32, mpInt64:
return "int"
default:
switch {
case bd >= mpPosFixNumMin && bd <= mpPosFixNumMax:
return "int"
case bd >= mpNegFixNumMin && bd <= mpNegFixNumMax:
return "int"
case bd == mpStr8, bd == mpStr16, bd == mpStr32, bd >= mpFixStrMin && bd <= mpFixStrMax:
return "string|bytes"
case bd == mpBin8, bd == mpBin16, bd == mpBin32:
return "bytes"
case bd == mpArray16, bd == mpArray32, bd >= mpFixArrayMin && bd <= mpFixArrayMax:
return "array"
case bd == mpMap16, bd == mpMap32, bd >= mpFixMapMin && bd <= mpFixMapMax:
return "map"
case bd >= mpFixExt1 && bd <= mpFixExt16, bd >= mpExt8 && bd <= mpExt32:
return "ext"
default:
return "unknown"
}
}
}
// MsgpackSpecRpcMultiArgs is a special type which signifies to the MsgpackSpecRpcCodec
// that the backend RPC service takes multiple arguments, which have been arranged
// in sequence in the slice.
@ -442,7 +522,7 @@ func (d *msgpackDecDriver) DecodeNaked() {
n.l = d.r.readx(clen)
}
default:
d.d.errorf("Nil-Deciphered DecodeValue: %s: hex: %x, dec: %d", msgBadDesc, bd, bd)
d.d.errorf("cannot infer value: %s: Ox%x/%d/%s", msgBadDesc, bd, bd, mpdesc(bd))
}
}
if !decodeFurther {
@ -484,7 +564,7 @@ func (d *msgpackDecDriver) DecodeInt64() (i int64) {
case d.bd >= mpNegFixNumMin && d.bd <= mpNegFixNumMax:
i = int64(int8(d.bd))
default:
d.d.errorf("Unhandled single-byte unsigned integer value: %s: %x", msgBadDesc, d.bd)
d.d.errorf("cannot decode signed integer: %s: %x/%s", msgBadDesc, d.bd, mpdesc(d.bd))
return
}
}
@ -510,28 +590,28 @@ func (d *msgpackDecDriver) DecodeUint64() (ui uint64) {
if i := int64(int8(d.r.readn1())); i >= 0 {
ui = uint64(i)
} else {
d.d.errorf("Assigning negative signed value: %v, to unsigned type", i)
d.d.errorf("assigning negative signed value: %v, to unsigned type", i)
return
}
case mpInt16:
if i := int64(int16(bigen.Uint16(d.r.readx(2)))); i >= 0 {
ui = uint64(i)
} else {
d.d.errorf("Assigning negative signed value: %v, to unsigned type", i)
d.d.errorf("assigning negative signed value: %v, to unsigned type", i)
return
}
case mpInt32:
if i := int64(int32(bigen.Uint32(d.r.readx(4)))); i >= 0 {
ui = uint64(i)
} else {
d.d.errorf("Assigning negative signed value: %v, to unsigned type", i)
d.d.errorf("assigning negative signed value: %v, to unsigned type", i)
return
}
case mpInt64:
if i := int64(bigen.Uint64(d.r.readx(8))); i >= 0 {
ui = uint64(i)
} else {
d.d.errorf("Assigning negative signed value: %v, to unsigned type", i)
d.d.errorf("assigning negative signed value: %v, to unsigned type", i)
return
}
default:
@ -539,10 +619,10 @@ func (d *msgpackDecDriver) DecodeUint64() (ui uint64) {
case d.bd >= mpPosFixNumMin && d.bd <= mpPosFixNumMax:
ui = uint64(d.bd)
case d.bd >= mpNegFixNumMin && d.bd <= mpNegFixNumMax:
d.d.errorf("Assigning negative signed value: %v, to unsigned type", int(d.bd))
d.d.errorf("assigning negative signed value: %v, to unsigned type", int(d.bd))
return
default:
d.d.errorf("Unhandled single-byte unsigned integer value: %s: %x", msgBadDesc, d.bd)
d.d.errorf("cannot decode unsigned integer: %s: %x/%s", msgBadDesc, d.bd, mpdesc(d.bd))
return
}
}
@ -576,7 +656,7 @@ func (d *msgpackDecDriver) DecodeBool() (b bool) {
} else if d.bd == mpTrue || d.bd == 1 {
b = true
} else {
d.d.errorf("Invalid single-byte value for bool: %s: %x", msgBadDesc, d.bd)
d.d.errorf("cannot decode bool: %s: %x/%s", msgBadDesc, d.bd, mpdesc(d.bd))
return
}
d.bdRead = false
@ -699,7 +779,7 @@ func (d *msgpackDecDriver) readContainerLen(ct msgpackContainerType) (clen int)
} else if (ct.bFixMin & bd) == ct.bFixMin {
clen = int(ct.bFixMin ^ bd)
} else {
d.d.errorf("readContainerLen: %s: hex: %x, decimal: %d", msgBadDesc, bd, bd)
d.d.errorf("cannot read container length: %s: hex: %x, decimal: %d", msgBadDesc, bd, bd)
return
}
d.bdRead = false
@ -800,7 +880,7 @@ func (d *msgpackDecDriver) decodeTime(clen int) (t time.Time) {
func (d *msgpackDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) (realxtag uint64) {
if xtag > 0xff {
d.d.errorf("decodeExt: tag must be <= 0xff; got: %v", xtag)
d.d.errorf("ext: tag must be <= 0xff; got: %v", xtag)
return
}
realxtag1, xbs := d.decodeExtV(ext != nil, uint8(xtag))
@ -829,7 +909,7 @@ func (d *msgpackDecDriver) decodeExtV(verifyTag bool, tag byte) (xtag byte, xbs
clen := d.readExtLen()
xtag = d.r.readn1()
if verifyTag && xtag != tag {
d.d.errorf("Wrong extension tag. Got %b. Expecting: %v", xtag, tag)
d.d.errorf("wrong extension tag - got %b, expecting %v", xtag, tag)
return
}
xbs = d.r.readx(clen)
@ -865,7 +945,7 @@ type MsgpackHandle struct {
binaryEncodingType
noElemSeparators
_ [1]uint64 // padding
// _ [1]uint64 // padding
}
// Name returns the name of the handle: msgpack
@ -970,13 +1050,13 @@ func (c *msgpackSpecRpcCodec) parseCustomHeader(expectTypeByte byte, msgid *uint
var b = ba[0]
if b != fia {
err = fmt.Errorf("Unexpected value for array descriptor: Expecting %v. Received %v", fia, b)
err = fmt.Errorf("not array - %s %x/%s", msgBadDesc, b, mpdesc(b))
} else {
err = c.read(&b)
if err == nil {
if b != expectTypeByte {
err = fmt.Errorf("Unexpected byte descriptor. Expecting %v; Received %v",
expectTypeByte, b)
err = fmt.Errorf("%s - expecting %v but got %x/%s",
msgBadDesc, expectTypeByte, b, mpdesc(b))
} else {
err = c.read(msgid)
if err == nil {

View file

@ -1,219 +0,0 @@
// Copyright (c) 2012-2018 Ugorji Nwoke. All rights reserved.
// Use of this source code is governed by a MIT license found in the LICENSE file.
// +build ignore
package codec
import (
"math/rand"
"time"
)
// NoopHandle returns a no-op handle. It basically does nothing.
// It is only useful for benchmarking, as it gives an idea of the
// overhead from the codec framework.
//
// LIBRARY USERS: *** DO NOT USE ***
func NoopHandle(slen int) *noopHandle {
h := noopHandle{}
h.rand = rand.New(rand.NewSource(time.Now().UnixNano()))
h.B = make([][]byte, slen)
h.S = make([]string, slen)
for i := 0; i < len(h.S); i++ {
b := make([]byte, i+1)
for j := 0; j < len(b); j++ {
b[j] = 'a' + byte(i)
}
h.B[i] = b
h.S[i] = string(b)
}
return &h
}
// noopHandle does nothing.
// It is used to simulate the overhead of the codec framework.
type noopHandle struct {
BasicHandle
binaryEncodingType
noopDrv // noopDrv is unexported here, so we can get a copy of it when needed.
}
type noopDrv struct {
d *Decoder
e *Encoder
i int
S []string
B [][]byte
mks []bool // stack. if map (true), else if array (false)
mk bool // top of stack. what container are we on? map or array?
ct valueType // last response for IsContainerType.
cb int // counter for ContainerType
rand *rand.Rand
}
func (h *noopDrv) r(v int) int { return h.rand.Intn(v) }
func (h *noopDrv) m(v int) int { h.i++; return h.i % v }
func (h *noopDrv) newEncDriver(e *Encoder) encDriver { h.e = e; return h }
func (h *noopDrv) newDecDriver(d *Decoder) decDriver { h.d = d; return h }
func (h *noopDrv) reset() {}
func (h *noopDrv) uncacheRead() {}
// --- encDriver
// stack functions (for map and array)
func (h *noopDrv) start(b bool) {
// println("start", len(h.mks)+1)
h.mks = append(h.mks, b)
h.mk = b
}
func (h *noopDrv) end() {
// println("end: ", len(h.mks)-1)
h.mks = h.mks[:len(h.mks)-1]
if len(h.mks) > 0 {
h.mk = h.mks[len(h.mks)-1]
} else {
h.mk = false
}
}
func (h *noopDrv) EncodeBuiltin(rt uintptr, v interface{}) {}
func (h *noopDrv) EncodeNil() {}
func (h *noopDrv) EncodeInt(i int64) {}
func (h *noopDrv) EncodeUint(i uint64) {}
func (h *noopDrv) EncodeBool(b bool) {}
func (h *noopDrv) EncodeFloat32(f float32) {}
func (h *noopDrv) EncodeFloat64(f float64) {}
func (h *noopDrv) EncodeRawExt(re *RawExt, e *Encoder) {}
func (h *noopDrv) EncodeArrayStart(length int) { h.start(true) }
func (h *noopDrv) EncodeMapStart(length int) { h.start(false) }
func (h *noopDrv) EncodeEnd() { h.end() }
func (h *noopDrv) EncodeString(c charEncoding, v string) {}
// func (h *noopDrv) EncodeSymbol(v string) {}
func (h *noopDrv) EncodeStringBytes(c charEncoding, v []byte) {}
func (h *noopDrv) EncodeExt(rv interface{}, xtag uint64, ext Ext, e *Encoder) {}
// ---- decDriver
func (h *noopDrv) initReadNext() {}
func (h *noopDrv) CheckBreak() bool { return false }
func (h *noopDrv) IsBuiltinType(rt uintptr) bool { return false }
func (h *noopDrv) DecodeBuiltin(rt uintptr, v interface{}) {}
func (h *noopDrv) DecodeInt(bitsize uint8) (i int64) { return int64(h.m(15)) }
func (h *noopDrv) DecodeUint(bitsize uint8) (ui uint64) { return uint64(h.m(35)) }
func (h *noopDrv) DecodeFloat(chkOverflow32 bool) (f float64) { return float64(h.m(95)) }
func (h *noopDrv) DecodeBool() (b bool) { return h.m(2) == 0 }
func (h *noopDrv) DecodeString() (s string) { return h.S[h.m(8)] }
func (h *noopDrv) DecodeStringAsBytes() []byte { return h.DecodeBytes(nil, true) }
func (h *noopDrv) DecodeBytes(bs []byte, zerocopy bool) []byte { return h.B[h.m(len(h.B))] }
func (h *noopDrv) ReadEnd() { h.end() }
// toggle map/slice
func (h *noopDrv) ReadMapStart() int { h.start(true); return h.m(10) }
func (h *noopDrv) ReadArrayStart() int { h.start(false); return h.m(10) }
func (h *noopDrv) ContainerType() (vt valueType) {
// return h.m(2) == 0
// handle kStruct, which will bomb is it calls this and
// doesn't get back a map or array.
// consequently, if the return value is not map or array,
// reset it to one of them based on h.m(7) % 2
// for kstruct: at least one out of every 2 times,
// return one of valueTypeMap or Array (else kstruct bombs)
// however, every 10th time it is called, we just return something else.
var vals = [...]valueType{valueTypeArray, valueTypeMap}
// ------------ TAKE ------------
// if h.cb%2 == 0 {
// if h.ct == valueTypeMap || h.ct == valueTypeArray {
// } else {
// h.ct = vals[h.m(2)]
// }
// } else if h.cb%5 == 0 {
// h.ct = valueType(h.m(8))
// } else {
// h.ct = vals[h.m(2)]
// }
// ------------ TAKE ------------
// if h.cb%16 == 0 {
// h.ct = valueType(h.cb % 8)
// } else {
// h.ct = vals[h.cb%2]
// }
h.ct = vals[h.cb%2]
h.cb++
return h.ct
// if h.ct == valueTypeNil || h.ct == valueTypeString || h.ct == valueTypeBytes {
// return h.ct
// }
// return valueTypeUnset
// TODO: may need to tweak this so it works.
// if h.ct == valueTypeMap && vt == valueTypeArray ||
// h.ct == valueTypeArray && vt == valueTypeMap {
// h.cb = !h.cb
// h.ct = vt
// return h.cb
// }
// // go in a loop and check it.
// h.ct = vt
// h.cb = h.m(7) == 0
// return h.cb
}
func (h *noopDrv) TryDecodeAsNil() bool {
if h.mk {
return false
} else {
return h.m(8) == 0
}
}
func (h *noopDrv) DecodeExt(rv interface{}, xtag uint64, ext Ext) uint64 {
return 0
}
func (h *noopDrv) DecodeNaked() {
// use h.r (random) not h.m() because h.m() could cause the same value to be given.
var sk int
if h.mk {
// if mapkey, do not support values of nil OR bytes, array, map or rawext
sk = h.r(7) + 1
} else {
sk = h.r(12)
}
n := &h.d.n
switch sk {
case 0:
n.v = valueTypeNil
case 1:
n.v, n.b = valueTypeBool, false
case 2:
n.v, n.b = valueTypeBool, true
case 3:
n.v, n.i = valueTypeInt, h.DecodeInt(64)
case 4:
n.v, n.u = valueTypeUint, h.DecodeUint(64)
case 5:
n.v, n.f = valueTypeFloat, h.DecodeFloat(true)
case 6:
n.v, n.f = valueTypeFloat, h.DecodeFloat(false)
case 7:
n.v, n.s = valueTypeString, h.DecodeString()
case 8:
n.v, n.l = valueTypeBytes, h.B[h.m(len(h.B))]
case 9:
n.v = valueTypeArray
case 10:
n.v = valueTypeMap
default:
n.v = valueTypeExt
n.u = h.DecodeUint(64)
n.l = h.B[h.m(len(h.B))]
}
h.ct = n.v
return
}

View file

@ -104,7 +104,7 @@ func (c *rpcCodec) write(obj1, obj2 interface{}, writeObj2 bool) (err error) {
if err == nil {
err = c.f.Flush()
} else {
c.f.Flush()
_ = c.f.Flush() // swallow flush error, so we maintain prior error on write
}
}
return
@ -144,15 +144,6 @@ func (c *rpcCodec) Close() error {
}
c.clsmu.Lock()
c.cls = true
// var fErr error
// if c.f != nil {
// fErr = c.f.Flush()
// }
// _ = fErr
// c.clsErr = c.c.Close()
// if c.clsErr == nil && fErr != nil {
// c.clsErr = fErr
// }
c.clsErr = c.c.Close()
c.clsmu.Unlock()
return c.clsErr

View file

@ -290,7 +290,7 @@ func (d *simpleDecDriver) decCheckInteger() (ui uint64, neg bool) {
ui = uint64(bigen.Uint64(d.r.readx(8)))
neg = true
default:
d.d.errorf("Integer only valid from pos/neg integer1..8. Invalid descriptor: %v", d.bd)
d.d.errorf("integer only valid from pos/neg integer1..8. Invalid descriptor: %v", d.bd)
return
}
// don't do this check, because callers may only want the unsigned value.
@ -314,7 +314,7 @@ func (d *simpleDecDriver) DecodeInt64() (i int64) {
func (d *simpleDecDriver) DecodeUint64() (ui uint64) {
ui, neg := d.decCheckInteger()
if neg {
d.d.errorf("Assigning negative signed value to unsigned type")
d.d.errorf("assigning negative signed value to unsigned type")
return
}
d.bdRead = false
@ -333,7 +333,7 @@ func (d *simpleDecDriver) DecodeFloat64() (f float64) {
if d.bd >= simpleVdPosInt && d.bd <= simpleVdNegInt+3 {
f = float64(d.DecodeInt64())
} else {
d.d.errorf("Float only valid from float32/64: Invalid descriptor: %v", d.bd)
d.d.errorf("float only valid from float32/64: Invalid descriptor: %v", d.bd)
return
}
}
@ -350,7 +350,7 @@ func (d *simpleDecDriver) DecodeBool() (b bool) {
b = true
} else if d.bd == simpleVdFalse {
} else {
d.d.errorf("Invalid single-byte value for bool: %s: %x", msgBadDesc, d.bd)
d.d.errorf("cannot decode bool - %s: %x", msgBadDesc, d.bd)
return
}
d.bdRead = false
@ -418,7 +418,7 @@ func (d *simpleDecDriver) decLen() int {
}
return int(ui)
}
d.d.errorf("decLen: Cannot read length: bd%%8 must be in range 0..4. Got: %d", d.bd%8)
d.d.errorf("cannot read length: bd%%8 must be in range 0..4. Got: %d", d.bd%8)
return -1
}
@ -482,7 +482,7 @@ func (d *simpleDecDriver) DecodeTime() (t time.Time) {
func (d *simpleDecDriver) DecodeExt(rv interface{}, xtag uint64, ext Ext) (realxtag uint64) {
if xtag > 0xff {
d.d.errorf("decodeExt: tag must be <= 0xff; got: %v", xtag)
d.d.errorf("ext: tag must be <= 0xff; got: %v", xtag)
return
}
realxtag1, xbs := d.decodeExtV(ext != nil, uint8(xtag))
@ -506,7 +506,7 @@ func (d *simpleDecDriver) decodeExtV(verifyTag bool, tag byte) (xtag byte, xbs [
l := d.decLen()
xtag = d.r.readn1()
if verifyTag && xtag != tag {
d.d.errorf("Wrong extension tag. Got %b. Expecting: %v", xtag, tag)
d.d.errorf("wrong extension tag. Got %b. Expecting: %v", xtag, tag)
return
}
xbs = d.r.readx(l)
@ -514,7 +514,7 @@ func (d *simpleDecDriver) decodeExtV(verifyTag bool, tag byte) (xtag byte, xbs [
simpleVdByteArray + 2, simpleVdByteArray + 3, simpleVdByteArray + 4:
xbs = d.DecodeBytes(nil, true)
default:
d.d.errorf("Invalid descriptor - expecting extensions/bytearray, got: 0x%x", d.bd)
d.d.errorf("ext - %s - expecting extensions/bytearray, got: 0x%x", msgBadDesc, d.bd)
return
}
d.bdRead = false
@ -579,7 +579,7 @@ func (d *simpleDecDriver) DecodeNaked() {
n.v = valueTypeMap
decodeFurther = true
default:
d.d.errorf("decodeNaked: Unrecognized d.bd: 0x%x", d.bd)
d.d.errorf("cannot infer value - %s 0x%x", msgBadDesc, d.bd)
}
if !decodeFurther {
@ -616,7 +616,7 @@ type SimpleHandle struct {
// EncZeroValuesAsNil says to encode zero values for numbers, bool, string, etc as nil
EncZeroValuesAsNil bool
_ [1]uint64 // padding
// _ [1]uint64 // padding
}
// Name returns the name of the handle: simple

View file

@ -5,7 +5,12 @@
package codec
import "time"
import (
"strings"
"time"
)
const teststrucflexChanCap = 64
// This file contains values used by tests alone.
// This is where we may try out different things,
@ -40,10 +45,6 @@ type SstructbigMapBySlice struct {
Sptr *Sstructbig
}
type Sinterface interface {
Noop()
}
// small struct for testing that codecgen works for unexported types
type tLowerFirstLetter struct {
I int
@ -62,6 +63,7 @@ type AnonInTestStrucIntf struct {
Ms map[string]interface{}
Nintf interface{} //don't set this, so we can test for nil
T time.Time
Tptr *time.Time
}
var testWRepeated512 wrapBytes
@ -79,6 +81,8 @@ type TestStrucFlex struct {
_struct struct{} `codec:",omitempty"` //set omitempty for every field
TestStrucCommon
Chstr chan string
Mis map[int]string
Mbu64 map[bool]struct{}
Miwu64s map[int]wrapUint64Slice
@ -108,8 +112,21 @@ type TestStrucFlex struct {
Nteststruc *TestStrucFlex
}
func emptyTestStrucFlex() *TestStrucFlex {
var ts TestStrucFlex
// we initialize and start draining the chan, so that we can decode into it without it blocking due to no consumer
ts.Chstr = make(chan string, teststrucflexChanCap)
go func() {
for range ts.Chstr {
}
}() // drain it
return &ts
}
func newTestStrucFlex(depth, n int, bench, useInterface, useStringKeyOnly bool) (ts *TestStrucFlex) {
ts = &TestStrucFlex{
Chstr: make(chan string, teststrucflexChanCap),
Miwu64s: map[int]wrapUint64Slice{
5: []wrapUint64{1, 2, 3, 4, 5},
3: []wrapUint64{1, 2, 3},
@ -156,6 +173,11 @@ func newTestStrucFlex(depth, n int, bench, useInterface, useStringKeyOnly bool)
ArrStrUi64T: [4]stringUint64T{{"4", 4}, {"3", 3}, {"2", 2}, {"1", 1}},
}
numChanSend := cap(ts.Chstr) / 4 // 8
for i := 0; i < numChanSend; i++ {
ts.Chstr <- strings.Repeat("A", i+1)
}
ts.Ui64slicearray = []*[4]uint64{&ts.Ui64array, &ts.Ui64array}
if useInterface {

View file

@ -229,6 +229,11 @@ func testCodecGroup(t *testing.T) {
t.Run("TestMsgpackScalars", TestMsgpackScalars)
t.Run("TestBincScalars", TestBincScalars)
t.Run("TestSimpleScalars", TestSimpleScalars)
t.Run("TestJsonOmitempty", TestJsonOmitempty)
t.Run("TestCborOmitempty", TestCborOmitempty)
t.Run("TestMsgpackOmitempty", TestMsgpackOmitempty)
t.Run("TestBincOmitempty", TestBincOmitempty)
t.Run("TestSimpleOmitempty", TestSimpleOmitempty)
t.Run("TestJsonIntfMapping", TestJsonIntfMapping)
t.Run("TestCborIntfMapping", TestCborIntfMapping)
t.Run("TestMsgpackIntfMapping", TestMsgpackIntfMapping)
@ -265,6 +270,7 @@ func testJsonGroup(t *testing.T) {
t.Run("TestJsonUintToInt", TestJsonUintToInt)
t.Run("TestJsonDifferentMapOrSliceType", TestJsonDifferentMapOrSliceType)
t.Run("TestJsonScalars", TestJsonScalars)
t.Run("TestJsonOmitempty", TestJsonOmitempty)
t.Run("TestJsonIntfMapping", TestJsonIntfMapping)
}
@ -289,6 +295,8 @@ func testBincGroup(t *testing.T) {
t.Run("TestBincUintToInt", TestBincUintToInt)
t.Run("TestBincDifferentMapOrSliceType", TestBincDifferentMapOrSliceType)
t.Run("TestBincScalars", TestBincScalars)
t.Run("TestBincOmitempty", TestBincOmitempty)
t.Run("TestBincIntfMapping", TestBincIntfMapping)
}
func testCborGroup(t *testing.T) {
@ -313,7 +321,8 @@ func testCborGroup(t *testing.T) {
t.Run("TestCborUintToInt", TestCborUintToInt)
t.Run("TestCborDifferentMapOrSliceType", TestCborDifferentMapOrSliceType)
t.Run("TestCborScalars", TestCborScalars)
t.Run("TestCborOmitempty", TestCborOmitempty)
t.Run("TestCborIntfMapping", TestCborIntfMapping)
t.Run("TestCborHalfFloat", TestCborHalfFloat)
}
@ -337,6 +346,8 @@ func testMsgpackGroup(t *testing.T) {
t.Run("TestMsgpackUintToInt", TestMsgpackUintToInt)
t.Run("TestMsgpackDifferentMapOrSliceType", TestMsgpackDifferentMapOrSliceType)
t.Run("TestMsgpackScalars", TestMsgpackScalars)
t.Run("TestMsgpackOmitempty", TestMsgpackOmitempty)
t.Run("TestMsgpackIntfMapping", TestMsgpackIntfMapping)
}
func testSimpleGroup(t *testing.T) {
@ -358,6 +369,8 @@ func testSimpleGroup(t *testing.T) {
t.Run("TestSimpleUintToInt", TestSimpleUintToInt)
t.Run("TestSimpleDifferentMapOrSliceType", TestSimpleDifferentMapOrSliceType)
t.Run("TestSimpleScalars", TestSimpleScalars)
t.Run("TestSimpleOmitempty", TestSimpleOmitempty)
t.Run("TestSimpleIntfMapping", TestSimpleIntfMapping)
}
func testSimpleMammothGroup(t *testing.T) {