Skip to content

Commit

Permalink
Merge pull request #215 from rrgilchrist/fix-repeat-paths
Browse files Browse the repository at this point in the history
Fixed repeated paths from incorrectly incrementing data offset.
  • Loading branch information
buger committed Nov 24, 2020
2 parents 49146d0 + 3980c9c commit 94ddf53
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 9 deletions.
3 changes: 1 addition & 2 deletions parser.go
Expand Up @@ -437,11 +437,10 @@ func EachKey(data []byte, cb func(int, []byte, ValueType, error), paths ...[]str

match = pi

i++
pathsMatched++
pathFlags[pi] = true

v, dt, _, e := Get(data[i:])
v, dt, _, e := Get(data[i+1:])
cb(pi, v, dt, e)

if pathsMatched == len(paths) {
Expand Down
19 changes: 12 additions & 7 deletions parser_test.go
Expand Up @@ -1420,9 +1420,9 @@ func TestArrayEachWithWhiteSpace(t *testing.T) {
keys []string
}
tests := []struct {
name string
args args
wantErr bool
name string
args args
wantErr bool
}{
{"Array with white space", args{[]byte(` ["AAA", "BBB", "CCC"]`), funcSuccess, []string{}}, false},
{"Array with only one character after white space", args{[]byte(` 1`), funcError, []string{}}, true},
Expand Down Expand Up @@ -1675,8 +1675,9 @@ func TestEachKey(t *testing.T) {
{"arrInt", "[3]"},
{"arrInt", "[5]"}, // Should not find last key
{"nested"},
{"arr", "["}, // issue#177 Invalid arguments
{"a\n", "b\n"}, // issue#165
{"arr", "["}, // issue#177 Invalid arguments
{"a\n", "b\n"}, // issue#165
{"nested", "b"}, // Should find repeated key
}

keysFound := 0
Expand Down Expand Up @@ -1729,13 +1730,17 @@ func TestEachKey(t *testing.T) {
if string(value) != "99" {
t.Error("Should find 10 key", string(value))
}
case 12:
if string(value) != "2" {
t.Errorf("Should find 11 key")
}
default:
t.Errorf("Should find only 10 keys, got %v key", idx)
}
}, paths...)

if keysFound != 10 {
t.Errorf("Should find 10 keys: %d", keysFound)
if keysFound != 11 {
t.Errorf("Should find 11 keys: %d", keysFound)
}
}

Expand Down

0 comments on commit 94ddf53

Please sign in to comment.