Dep helper (#2151)

* Add dep task to update go dependencies

* Update go dependencies
This commit is contained in:
Manuel Alejandro de Brito Fontes 2018-09-29 19:47:07 -03:00 committed by Miek Gieben
parent 8f8b81f56b
commit 0e8977761d
764 changed files with 172 additions and 267451 deletions

View file

@ -1,32 +0,0 @@
package jmespath
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestValidPrecompiledExpressionSearches(t *testing.T) {
assert := assert.New(t)
data := make(map[string]interface{})
data["foo"] = "bar"
precompiled, err := Compile("foo")
assert.Nil(err)
result, err := precompiled.Search(data)
assert.Nil(err)
assert.Equal("bar", result)
}
func TestInvalidPrecompileErrors(t *testing.T) {
assert := assert.New(t)
_, err := Compile("not a valid expression")
assert.NotNil(err)
}
func TestInvalidMustCompilePanics(t *testing.T) {
defer func() {
r := recover()
assert.NotNil(t, r)
}()
MustCompile("not a valid expression")
}

View file

@ -1,123 +0,0 @@
package jmespath
import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
)
type TestSuite struct {
Given interface{}
TestCases []TestCase `json:"cases"`
Comment string
}
type TestCase struct {
Comment string
Expression string
Result interface{}
Error string
}
var whiteListed = []string{
"compliance/basic.json",
"compliance/current.json",
"compliance/escape.json",
"compliance/filters.json",
"compliance/functions.json",
"compliance/identifiers.json",
"compliance/indices.json",
"compliance/literal.json",
"compliance/multiselect.json",
"compliance/ormatch.json",
"compliance/pipe.json",
"compliance/slice.json",
"compliance/syntax.json",
"compliance/unicode.json",
"compliance/wildcard.json",
"compliance/boolean.json",
}
func allowed(path string) bool {
for _, el := range whiteListed {
if el == path {
return true
}
}
return false
}
func TestCompliance(t *testing.T) {
assert := assert.New(t)
var complianceFiles []string
err := filepath.Walk("compliance", func(path string, _ os.FileInfo, _ error) error {
//if strings.HasSuffix(path, ".json") {
if allowed(path) {
complianceFiles = append(complianceFiles, path)
}
return nil
})
if assert.Nil(err) {
for _, filename := range complianceFiles {
runComplianceTest(assert, filename)
}
}
}
func runComplianceTest(assert *assert.Assertions, filename string) {
var testSuites []TestSuite
data, err := ioutil.ReadFile(filename)
if assert.Nil(err) {
err := json.Unmarshal(data, &testSuites)
if assert.Nil(err) {
for _, testsuite := range testSuites {
runTestSuite(assert, testsuite, filename)
}
}
}
}
func runTestSuite(assert *assert.Assertions, testsuite TestSuite, filename string) {
for _, testcase := range testsuite.TestCases {
if testcase.Error != "" {
// This is a test case that verifies we error out properly.
runSyntaxTestCase(assert, testsuite.Given, testcase, filename)
} else {
runTestCase(assert, testsuite.Given, testcase, filename)
}
}
}
func runSyntaxTestCase(assert *assert.Assertions, given interface{}, testcase TestCase, filename string) {
// Anything with an .Error means that we expect that JMESPath should return
// an error when we try to evaluate the expression.
_, err := Search(testcase.Expression, given)
assert.NotNil(err, fmt.Sprintf("Expression: %s", testcase.Expression))
}
func runTestCase(assert *assert.Assertions, given interface{}, testcase TestCase, filename string) {
lexer := NewLexer()
var err error
_, err = lexer.tokenize(testcase.Expression)
if err != nil {
errMsg := fmt.Sprintf("(%s) Could not lex expression: %s -- %s", filename, testcase.Expression, err.Error())
assert.Fail(errMsg)
return
}
parser := NewParser()
_, err = parser.Parse(testcase.Expression)
if err != nil {
errMsg := fmt.Sprintf("(%s) Could not parse expression: %s -- %s", filename, testcase.Expression, err.Error())
assert.Fail(errMsg)
return
}
actual, err := Search(testcase.Expression, given)
if assert.Nil(err, fmt.Sprintf("Expression: %s", testcase.Expression)) {
assert.Equal(testcase.Result, actual, fmt.Sprintf("Expression: %s", testcase.Expression))
}
}

View file

@ -1,221 +0,0 @@
package jmespath
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
type scalars struct {
Foo string
Bar string
}
type sliceType struct {
A string
B []scalars
C []*scalars
}
type benchmarkStruct struct {
Fooasdfasdfasdfasdf string
}
type benchmarkNested struct {
Fooasdfasdfasdfasdf nestedA
}
type nestedA struct {
Fooasdfasdfasdfasdf nestedB
}
type nestedB struct {
Fooasdfasdfasdfasdf nestedC
}
type nestedC struct {
Fooasdfasdfasdfasdf string
}
type nestedSlice struct {
A []sliceType
}
func TestCanSupportEmptyInterface(t *testing.T) {
assert := assert.New(t)
data := make(map[string]interface{})
data["foo"] = "bar"
result, err := Search("foo", data)
assert.Nil(err)
assert.Equal("bar", result)
}
func TestCanSupportUserDefinedStructsValue(t *testing.T) {
assert := assert.New(t)
s := scalars{Foo: "one", Bar: "bar"}
result, err := Search("Foo", s)
assert.Nil(err)
assert.Equal("one", result)
}
func TestCanSupportUserDefinedStructsRef(t *testing.T) {
assert := assert.New(t)
s := scalars{Foo: "one", Bar: "bar"}
result, err := Search("Foo", &s)
assert.Nil(err)
assert.Equal("one", result)
}
func TestCanSupportStructWithSliceAll(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("B[].Foo", data)
assert.Nil(err)
assert.Equal([]interface{}{"f1", "correct"}, result)
}
func TestCanSupportStructWithSlicingExpression(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("B[:].Foo", data)
assert.Nil(err)
assert.Equal([]interface{}{"f1", "correct"}, result)
}
func TestCanSupportStructWithFilterProjection(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("B[? `true` ].Foo", data)
assert.Nil(err)
assert.Equal([]interface{}{"f1", "correct"}, result)
}
func TestCanSupportStructWithSlice(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("B[-1].Foo", data)
assert.Nil(err)
assert.Equal("correct", result)
}
func TestCanSupportStructWithOrExpressions(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", C: nil}
result, err := Search("C || A", data)
assert.Nil(err)
assert.Equal("foo", result)
}
func TestCanSupportStructWithSlicePointer(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", C: []*scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("C[-1].Foo", data)
assert.Nil(err)
assert.Equal("correct", result)
}
func TestWillAutomaticallyCapitalizeFieldNames(t *testing.T) {
assert := assert.New(t)
s := scalars{Foo: "one", Bar: "bar"}
// Note that there's a lower cased "foo" instead of "Foo",
// but it should still correspond to the Foo field in the
// scalars struct
result, err := Search("foo", &s)
assert.Nil(err)
assert.Equal("one", result)
}
func TestCanSupportStructWithSliceLowerCased(t *testing.T) {
assert := assert.New(t)
data := sliceType{A: "foo", B: []scalars{{"f1", "b1"}, {"correct", "b2"}}}
result, err := Search("b[-1].foo", data)
assert.Nil(err)
assert.Equal("correct", result)
}
func TestCanSupportStructWithNestedPointers(t *testing.T) {
assert := assert.New(t)
data := struct{ A *struct{ B int } }{}
result, err := Search("A.B", data)
assert.Nil(err)
assert.Nil(result)
}
func TestCanSupportFlattenNestedSlice(t *testing.T) {
assert := assert.New(t)
data := nestedSlice{A: []sliceType{
{B: []scalars{{Foo: "f1a"}, {Foo: "f1b"}}},
{B: []scalars{{Foo: "f2a"}, {Foo: "f2b"}}},
}}
result, err := Search("A[].B[].Foo", data)
assert.Nil(err)
assert.Equal([]interface{}{"f1a", "f1b", "f2a", "f2b"}, result)
}
func TestCanSupportFlattenNestedEmptySlice(t *testing.T) {
assert := assert.New(t)
data := nestedSlice{A: []sliceType{
{}, {B: []scalars{{Foo: "a"}}},
}}
result, err := Search("A[].B[].Foo", data)
assert.Nil(err)
assert.Equal([]interface{}{"a"}, result)
}
func TestCanSupportProjectionsWithStructs(t *testing.T) {
assert := assert.New(t)
data := nestedSlice{A: []sliceType{
{A: "first"}, {A: "second"}, {A: "third"},
}}
result, err := Search("A[*].A", data)
assert.Nil(err)
assert.Equal([]interface{}{"first", "second", "third"}, result)
}
func TestCanSupportSliceOfStructsWithFunctions(t *testing.T) {
assert := assert.New(t)
data := []scalars{scalars{"a1", "b1"}, scalars{"a2", "b2"}}
result, err := Search("length(@)", data)
assert.Nil(err)
assert.Equal(result.(float64), 2.0)
}
func BenchmarkInterpretSingleFieldStruct(b *testing.B) {
intr := newInterpreter()
parser := NewParser()
ast, _ := parser.Parse("fooasdfasdfasdfasdf")
data := benchmarkStruct{"foobarbazqux"}
for i := 0; i < b.N; i++ {
intr.Execute(ast, &data)
}
}
func BenchmarkInterpretNestedStruct(b *testing.B) {
intr := newInterpreter()
parser := NewParser()
ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf")
data := benchmarkNested{
nestedA{
nestedB{
nestedC{"foobarbazqux"},
},
},
}
for i := 0; i < b.N; i++ {
intr.Execute(ast, &data)
}
}
func BenchmarkInterpretNestedMaps(b *testing.B) {
jsonData := []byte(`{"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": {"fooasdfasdfasdfasdf": "foobarbazqux"}}}}`)
var data interface{}
json.Unmarshal(jsonData, &data)
intr := newInterpreter()
parser := NewParser()
ast, _ := parser.Parse("fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf.fooasdfasdfasdfasdf")
for i := 0; i < b.N; i++ {
intr.Execute(ast, data)
}
}

View file

@ -1,161 +0,0 @@
package jmespath
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
var lexingTests = []struct {
expression string
expected []token
}{
{"*", []token{{tStar, "*", 0, 1}}},
{".", []token{{tDot, ".", 0, 1}}},
{"[?", []token{{tFilter, "[?", 0, 2}}},
{"[]", []token{{tFlatten, "[]", 0, 2}}},
{"(", []token{{tLparen, "(", 0, 1}}},
{")", []token{{tRparen, ")", 0, 1}}},
{"[", []token{{tLbracket, "[", 0, 1}}},
{"]", []token{{tRbracket, "]", 0, 1}}},
{"{", []token{{tLbrace, "{", 0, 1}}},
{"}", []token{{tRbrace, "}", 0, 1}}},
{"||", []token{{tOr, "||", 0, 2}}},
{"|", []token{{tPipe, "|", 0, 1}}},
{"29", []token{{tNumber, "29", 0, 2}}},
{"2", []token{{tNumber, "2", 0, 1}}},
{"0", []token{{tNumber, "0", 0, 1}}},
{"-20", []token{{tNumber, "-20", 0, 3}}},
{"foo", []token{{tUnquotedIdentifier, "foo", 0, 3}}},
{`"bar"`, []token{{tQuotedIdentifier, "bar", 0, 3}}},
// Escaping the delimiter
{`"bar\"baz"`, []token{{tQuotedIdentifier, `bar"baz`, 0, 7}}},
{",", []token{{tComma, ",", 0, 1}}},
{":", []token{{tColon, ":", 0, 1}}},
{"<", []token{{tLT, "<", 0, 1}}},
{"<=", []token{{tLTE, "<=", 0, 2}}},
{">", []token{{tGT, ">", 0, 1}}},
{">=", []token{{tGTE, ">=", 0, 2}}},
{"==", []token{{tEQ, "==", 0, 2}}},
{"!=", []token{{tNE, "!=", 0, 2}}},
{"`[0, 1, 2]`", []token{{tJSONLiteral, "[0, 1, 2]", 1, 9}}},
{"'foo'", []token{{tStringLiteral, "foo", 1, 3}}},
{"'a'", []token{{tStringLiteral, "a", 1, 1}}},
{`'foo\'bar'`, []token{{tStringLiteral, "foo'bar", 1, 7}}},
{"@", []token{{tCurrent, "@", 0, 1}}},
{"&", []token{{tExpref, "&", 0, 1}}},
// Quoted identifier unicode escape sequences
{`"\u2713"`, []token{{tQuotedIdentifier, "✓", 0, 3}}},
{`"\\"`, []token{{tQuotedIdentifier, `\`, 0, 1}}},
{"`\"foo\"`", []token{{tJSONLiteral, "\"foo\"", 1, 5}}},
// Combinations of tokens.
{"foo.bar", []token{
{tUnquotedIdentifier, "foo", 0, 3},
{tDot, ".", 3, 1},
{tUnquotedIdentifier, "bar", 4, 3},
}},
{"foo[0]", []token{
{tUnquotedIdentifier, "foo", 0, 3},
{tLbracket, "[", 3, 1},
{tNumber, "0", 4, 1},
{tRbracket, "]", 5, 1},
}},
{"foo[?a<b]", []token{
{tUnquotedIdentifier, "foo", 0, 3},
{tFilter, "[?", 3, 2},
{tUnquotedIdentifier, "a", 5, 1},
{tLT, "<", 6, 1},
{tUnquotedIdentifier, "b", 7, 1},
{tRbracket, "]", 8, 1},
}},
}
func TestCanLexTokens(t *testing.T) {
assert := assert.New(t)
lexer := NewLexer()
for _, tt := range lexingTests {
tokens, err := lexer.tokenize(tt.expression)
if assert.Nil(err) {
errMsg := fmt.Sprintf("Mismatch expected number of tokens: (expected: %s, actual: %s)",
tt.expected, tokens)
tt.expected = append(tt.expected, token{tEOF, "", len(tt.expression), 0})
if assert.Equal(len(tt.expected), len(tokens), errMsg) {
for i, token := range tokens {
expected := tt.expected[i]
assert.Equal(expected, token, "Token not equal")
}
}
}
}
}
var lexingErrorTests = []struct {
expression string
msg string
}{
{"'foo", "Missing closing single quote"},
{"[?foo==bar?]", "Unknown char '?'"},
}
func TestLexingErrors(t *testing.T) {
assert := assert.New(t)
lexer := NewLexer()
for _, tt := range lexingErrorTests {
_, err := lexer.tokenize(tt.expression)
assert.NotNil(err, fmt.Sprintf("Expected lexing error: %s", tt.msg))
}
}
var exprIdentifier = "abcdefghijklmnopqrstuvwxyz"
var exprSubexpr = "abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz"
var deeplyNested50 = "j49.j48.j47.j46.j45.j44.j43.j42.j41.j40.j39.j38.j37.j36.j35.j34.j33.j32.j31.j30.j29.j28.j27.j26.j25.j24.j23.j22.j21.j20.j19.j18.j17.j16.j15.j14.j13.j12.j11.j10.j9.j8.j7.j6.j5.j4.j3.j2.j1.j0"
var deeplyNested50Pipe = "j49|j48|j47|j46|j45|j44|j43|j42|j41|j40|j39|j38|j37|j36|j35|j34|j33|j32|j31|j30|j29|j28|j27|j26|j25|j24|j23|j22|j21|j20|j19|j18|j17|j16|j15|j14|j13|j12|j11|j10|j9|j8|j7|j6|j5|j4|j3|j2|j1|j0"
var deeplyNested50Index = "[49][48][47][46][45][44][43][42][41][40][39][38][37][36][35][34][33][32][31][30][29][28][27][26][25][24][23][22][21][20][19][18][17][16][15][14][13][12][11][10][9][8][7][6][5][4][3][2][1][0]"
var deepProjection104 = "a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*].a[*].b[*].c[*].d[*].e[*].f[*].g[*].h[*].i[*].j[*].k[*].l[*].m[*].n[*].o[*].p[*].q[*].r[*].s[*].t[*].u[*].v[*].w[*].x[*].y[*].z[*]"
var exprQuotedIdentifier = `"abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz"`
var quotedIdentifierEscapes = `"\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t\n\r\b\t"`
var rawStringLiteral = `'abcdefghijklmnopqrstuvwxyz.abcdefghijklmnopqrstuvwxyz'`
func BenchmarkLexIdentifier(b *testing.B) {
runLexBenchmark(b, exprIdentifier)
}
func BenchmarkLexSubexpression(b *testing.B) {
runLexBenchmark(b, exprSubexpr)
}
func BenchmarkLexDeeplyNested50(b *testing.B) {
runLexBenchmark(b, deeplyNested50)
}
func BenchmarkLexDeepNested50Pipe(b *testing.B) {
runLexBenchmark(b, deeplyNested50Pipe)
}
func BenchmarkLexDeepNested50Index(b *testing.B) {
runLexBenchmark(b, deeplyNested50Index)
}
func BenchmarkLexQuotedIdentifier(b *testing.B) {
runLexBenchmark(b, exprQuotedIdentifier)
}
func BenchmarkLexQuotedIdentifierEscapes(b *testing.B) {
runLexBenchmark(b, quotedIdentifierEscapes)
}
func BenchmarkLexRawStringLiteral(b *testing.B) {
runLexBenchmark(b, rawStringLiteral)
}
func BenchmarkLexDeepProjection104(b *testing.B) {
runLexBenchmark(b, deepProjection104)
}
func runLexBenchmark(b *testing.B, expression string) {
lexer := NewLexer()
for i := 0; i < b.N; i++ {
lexer.tokenize(expression)
}
}

View file

@ -1,136 +0,0 @@
package jmespath
import (
"fmt"
"testing"
"github.com/stretchr/testify/assert"
)
var parsingErrorTests = []struct {
expression string
msg string
}{
{"foo.", "Incopmlete expression"},
{"[foo", "Incopmlete expression"},
{"]", "Invalid"},
{")", "Invalid"},
{"}", "Invalid"},
{"foo..bar", "Invalid"},
{`foo."bar`, "Forwards lexer errors"},
{`{foo: bar`, "Incomplete expression"},
{`{foo bar}`, "Invalid"},
{`[foo bar]`, "Invalid"},
{`foo@`, "Invalid"},
{`&&&&&&&&&&&&t(`, "Invalid"},
{`[*][`, "Invalid"},
}
func TestParsingErrors(t *testing.T) {
assert := assert.New(t)
parser := NewParser()
for _, tt := range parsingErrorTests {
_, err := parser.Parse(tt.expression)
assert.NotNil(err, fmt.Sprintf("Expected parsing error: %s, for expression: %s", tt.msg, tt.expression))
}
}
var prettyPrinted = `ASTProjection {
children: {
ASTField {
value: "foo"
}
ASTSubexpression {
children: {
ASTSubexpression {
children: {
ASTField {
value: "bar"
}
ASTField {
value: "baz"
}
}
ASTField {
value: "qux"
}
}
}
`
var prettyPrintedCompNode = `ASTFilterProjection {
children: {
ASTField {
value: "a"
}
ASTIdentity {
}
ASTComparator {
value: tLTE
children: {
ASTField {
value: "b"
}
ASTField {
value: "c"
}
}
}
`
func TestPrettyPrintedAST(t *testing.T) {
assert := assert.New(t)
parser := NewParser()
parsed, _ := parser.Parse("foo[*].bar.baz.qux")
assert.Equal(parsed.PrettyPrint(0), prettyPrinted)
}
func TestPrettyPrintedCompNode(t *testing.T) {
assert := assert.New(t)
parser := NewParser()
parsed, _ := parser.Parse("a[?b<=c]")
assert.Equal(parsed.PrettyPrint(0), prettyPrintedCompNode)
}
func BenchmarkParseIdentifier(b *testing.B) {
runParseBenchmark(b, exprIdentifier)
}
func BenchmarkParseSubexpression(b *testing.B) {
runParseBenchmark(b, exprSubexpr)
}
func BenchmarkParseDeeplyNested50(b *testing.B) {
runParseBenchmark(b, deeplyNested50)
}
func BenchmarkParseDeepNested50Pipe(b *testing.B) {
runParseBenchmark(b, deeplyNested50Pipe)
}
func BenchmarkParseDeepNested50Index(b *testing.B) {
runParseBenchmark(b, deeplyNested50Index)
}
func BenchmarkParseQuotedIdentifier(b *testing.B) {
runParseBenchmark(b, exprQuotedIdentifier)
}
func BenchmarkParseQuotedIdentifierEscapes(b *testing.B) {
runParseBenchmark(b, quotedIdentifierEscapes)
}
func BenchmarkParseRawStringLiteral(b *testing.B) {
runParseBenchmark(b, rawStringLiteral)
}
func BenchmarkParseDeepProjection104(b *testing.B) {
runParseBenchmark(b, deepProjection104)
}
func runParseBenchmark(b *testing.B, expression string) {
parser := NewParser()
for i := 0; i < b.N; i++ {
parser.Parse(expression)
}
}

View file

@ -1,73 +0,0 @@
package jmespath
import (
"github.com/stretchr/testify/assert"
"testing"
)
func TestSlicePositiveStep(t *testing.T) {
assert := assert.New(t)
input := make([]interface{}, 5)
input[0] = 0
input[1] = 1
input[2] = 2
input[3] = 3
input[4] = 4
result, err := slice(input, []sliceParam{{0, true}, {3, true}, {1, true}})
assert.Nil(err)
assert.Equal(input[:3], result)
}
func TestIsFalseJSONTypes(t *testing.T) {
assert := assert.New(t)
assert.True(isFalse(false))
assert.True(isFalse(""))
var empty []interface{}
assert.True(isFalse(empty))
m := make(map[string]interface{})
assert.True(isFalse(m))
assert.True(isFalse(nil))
}
func TestIsFalseWithUserDefinedStructs(t *testing.T) {
assert := assert.New(t)
type nilStructType struct {
SliceOfPointers []*string
}
nilStruct := nilStructType{SliceOfPointers: nil}
assert.True(isFalse(nilStruct.SliceOfPointers))
// A user defined struct will never be false though,
// even if it's fields are the zero type.
assert.False(isFalse(nilStruct))
}
func TestIsFalseWithNilInterface(t *testing.T) {
assert := assert.New(t)
var a *int = nil
var nilInterface interface{}
nilInterface = a
assert.True(isFalse(nilInterface))
}
func TestIsFalseWithMapOfUserStructs(t *testing.T) {
assert := assert.New(t)
type foo struct {
Bar string
Baz string
}
m := make(map[int]foo)
assert.True(isFalse(m))
}
func TestObjsEqual(t *testing.T) {
assert := assert.New(t)
assert.True(objsEqual("foo", "foo"))
assert.True(objsEqual(20, 20))
assert.True(objsEqual([]int{1, 2, 3}, []int{1, 2, 3}))
assert.True(objsEqual(nil, nil))
assert.True(!objsEqual(nil, "foo"))
assert.True(objsEqual([]int{}, []int{}))
assert.True(!objsEqual([]int{}, nil))
}