Skip to content

Commit

Permalink
Merge pull request #73 from caesarxuchao/move-should-add
Browse files Browse the repository at this point in the history
"move should functionally equal to "remove" followed by "add"
  • Loading branch information
evanphx committed Feb 3, 2019
2 parents 498b574 + bbf30d6 commit 5858425
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 55 deletions.
8 changes: 0 additions & 8 deletions README.md
Expand Up @@ -34,14 +34,6 @@ go get -u github.com/evanphx/json-patch
functionality can be disabled by setting `jsonpatch.SupportNegativeIndices =
false`.

* There is a global configuration variable `jsonpatch.ArraySizeLimit`, which
limits the length of any array the patched object can have. It defaults to 0,
which means there is no limit.

* There is a global configuration variable `jsonpatch.ArraySizeAdditionLimit`,
which limits the increase of array length caused by each operation. It
defaults to 0, which means there is no limit.

* There is a global configuration variable `jsonpatch.AccumulatedCopySizeLimit`,
which limits the total size increase in bytes caused by "copy" operations in a
patch. It defaults to 0, which means there is no limit.
Expand Down
43 changes: 4 additions & 39 deletions patch.go
Expand Up @@ -19,8 +19,6 @@ var (
// allowing negative indices to mean indices starting at the end of an array.
// Default to true.
SupportNegativeIndices bool = true
ArraySizeLimit int = 0
ArraySizeAdditionLimit int = 0
// AccumulatedCopySizeLimit limits the total size increase in bytes caused by
// "copy" operations in a patch.
AccumulatedCopySizeLimit int64 = 0
Expand Down Expand Up @@ -368,44 +366,14 @@ func (d *partialDoc) remove(key string) error {
return nil
}

// set should only be used to implement the "replace" operation, so "key" must
// be an already existing index in "d".
func (d *partialArray) set(key string, val *lazyNode) error {
if key == "-" {
*d = append(*d, val)
return nil
}

idx, err := strconv.Atoi(key)
if err != nil {
return err
}

sz := len(*d)

if diff := idx + 1 - sz; ArraySizeAdditionLimit > 0 && diff > ArraySizeAdditionLimit {
return fmt.Errorf("Unable to increase the array size by %d, the limit is %d", diff, ArraySizeAdditionLimit)
}

if idx+1 > sz {
sz = idx + 1
}

if ArraySizeLimit > 0 && sz > ArraySizeLimit {
return NewArraySizeError(ArraySizeLimit, sz)
}

ary := make([]*lazyNode, sz)

cur := *d

copy(ary, cur)

if idx >= len(ary) {
return fmt.Errorf("Unable to access invalid index: %d", idx)
}

ary[idx] = val

*d = ary
(*d)[idx] = val
return nil
}

Expand All @@ -421,9 +389,6 @@ func (d *partialArray) add(key string, val *lazyNode) error {
}

sz := len(*d) + 1
if ArraySizeLimit > 0 && sz > ArraySizeLimit {
return fmt.Errorf("Unable to create array of size %d, limit is %d", sz, ArraySizeLimit)
}

ary := make([]*lazyNode, sz)

Expand Down Expand Up @@ -565,7 +530,7 @@ func (p Patch) move(doc *container, op operation) error {
return fmt.Errorf("jsonpatch move operation does not apply: doc is missing destination path: %s", path)
}

return con.set(key, val)
return con.add(key, val)
}

func (p Patch) test(doc *container, op operation) error {
Expand Down
20 changes: 12 additions & 8 deletions patch_test.go
Expand Up @@ -121,6 +121,11 @@ var Cases = []Case{
`[ { "op": "move", "from": "/foo/1", "path": "/foo/3" } ]`,
`{ "foo": [ "all", "cows", "eat", "grass" ] }`,
},
{
`{ "foo": [ "all", "grass", "cows", "eat" ] }`,
`[ { "op": "move", "from": "/foo/1", "path": "/foo/2" } ]`,
`{ "foo": [ "all", "cows", "grass", "eat" ] }`,
},
{
`{ "foo": "bar" }`,
`[ { "op": "add", "path": "/child", "value": { "grandchild": { } } } ]`,
Expand Down Expand Up @@ -322,25 +327,24 @@ var BadCases = []BadCase{
`[ { "op": "copy", "path": "/foo/-", "from": "/foo/1" },
{ "op": "copy", "path": "/foo/-", "from": "/foo/1" }]`,
},
// Can't move into an index greater than or equal to the size of the array
{
`{ "foo": [ "all", "grass", "cows", "eat" ] }`,
`[ { "op": "move", "from": "/foo/1", "path": "/foo/4" } ]`,
},
}

// This is not thread safe, so we cannot run patch tests in parallel.
func configureGlobals(arraySizeLimit, arraySizeAdditionLimit int, accumulatedCopySizeLimit int64) func() {
oldArraySizeLimit := ArraySizeLimit
oldArraySizeAdditionLimit := ArraySizeAdditionLimit
func configureGlobals(accumulatedCopySizeLimit int64) func() {
oldAccumulatedCopySizeLimit := AccumulatedCopySizeLimit
ArraySizeLimit = arraySizeLimit
ArraySizeAdditionLimit = arraySizeAdditionLimit
AccumulatedCopySizeLimit = accumulatedCopySizeLimit
return func() {
ArraySizeLimit = oldArraySizeLimit
ArraySizeAdditionLimit = oldArraySizeAdditionLimit
AccumulatedCopySizeLimit = oldAccumulatedCopySizeLimit
}
}

func TestAllCases(t *testing.T) {
defer configureGlobals(1000, 10, int64(100))()
defer configureGlobals(int64(100))()
for _, c := range Cases {
out, err := applyPatch(c.doc, c.patch)

Expand Down

0 comments on commit 5858425

Please sign in to comment.