diff --git a/typed/merge.go b/typed/merge.go index e5d8ec0e..fa227ac4 100644 --- a/typed/merge.go +++ b/typed/merge.go @@ -180,14 +180,18 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err } out := make([]interface{}, 0, outLen) - rhsOrder, observedRHS, rhsErrs := w.indexListPathElements(t, rhs) + rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs, false) errs = append(errs, rhsErrs...) - lhsOrder, observedLHS, lhsErrs := w.indexListPathElements(t, lhs) + lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs, true) errs = append(errs, lhsErrs...) + if len(errs) != 0 { + return errs + } + sharedOrder := make([]*fieldpath.PathElement, 0, rLen) - for i := range rhsOrder { - pe := &rhsOrder[i] + for i := range rhsPEs { + pe := &rhsPEs[i] if _, ok := observedLHS.Get(*pe); ok { sharedOrder = append(sharedOrder, pe) } @@ -199,13 +203,15 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err sharedOrder = sharedOrder[1:] } - lLen, rLen = len(lhsOrder), len(rhsOrder) + mergedRHS := fieldpath.MakePathElementMap(len(rhsPEs)) + lLen, rLen = len(lhsPEs), len(rhsPEs) for lI, rI := 0, 0; lI < lLen || rI < rLen; { if lI < lLen && rI < rLen { - pe := lhsOrder[lI] - if pe.Equals(rhsOrder[rI]) { + pe := lhsPEs[lI] + if pe.Equals(rhsPEs[rI]) { // merge LHS & RHS items - lChild, _ := observedLHS.Get(pe) + mergedRHS.Insert(pe, struct{}{}) + lChild, _ := observedLHS.Get(pe) // may be nil if the PE is duplicaated. rChild, _ := observedRHS.Get(pe) mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild) errs = append(errs, errs...) @@ -222,17 +228,17 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err } continue } - if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsOrder[lI]) { + if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsPEs[lI]) { // shared item, but not the one we want in this round lI++ continue } } if lI < lLen { - pe := lhsOrder[lI] + pe := lhsPEs[lI] if _, ok := observedRHS.Get(pe); !ok { - // take LHS item - lChild, _ := observedLHS.Get(pe) + // take LHS item using At to make sure we get the right item (observed may not contain the right item). + lChild := lhs.AtUsing(w.allocator, lI) mergeOut, errs := w.mergeListItem(t, pe, lChild, nil) errs = append(errs, errs...) if mergeOut != nil { @@ -240,12 +246,16 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err } lI++ continue + } else if _, ok := mergedRHS.Get(pe); ok { + // we've already merged it with RHS, we don't want to duplicate it, skip it. + lI++ } } if rI < rLen { // Take the RHS item, merge with matching LHS item if possible - pe := rhsOrder[rI] - lChild, _ := observedLHS.Get(pe) // may be nil + pe := rhsPEs[rI] + mergedRHS.Insert(pe, struct{}{}) + lChild, _ := observedLHS.Get(pe) // may be nil if absent or duplicaated. rChild, _ := observedRHS.Get(pe) mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild) errs = append(errs, errs...) @@ -272,7 +282,7 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err return errs } -func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) { +func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List, allowDuplicates bool) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) { var errs ValidationErrors length := 0 if list != nil { @@ -290,11 +300,15 @@ func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) ( // this element. continue } - if _, found := observed.Get(pe); found { + if _, found := observed.Get(pe); found && !allowDuplicates { errs = append(errs, errorf("duplicate entries for key %v", pe.String())...) continue + } else if !found { + observed.Insert(pe, child) + } else { + // Duplicated items are not merged with the new value, make them nil. + observed.Insert(pe, value.NewValueInterface(nil)) } - observed.Insert(pe, child) pes = append(pes, pe) } return pes, observed, errs diff --git a/typed/merge_test.go b/typed/merge_test.go index 63745cdb..1e8562f1 100644 --- a/typed/merge_test.go +++ b/typed/merge_test.go @@ -328,6 +328,34 @@ var mergeCases = []mergeTestCase{{ `{"setStr":["a","b","c","d","e","f","g","h","i","j"]}`, `{"setStr":["1","b","2","h","3","e","4","k","l"]}`, `{"setStr":["a","1","b","c","d","f","g","2","h","i","j","3","e","4","k","l"]}`, + }, { // We have a duplicate in LHS + `{"setStr":["a","b","b"]}`, + `{"setStr":["c"]}`, + `{"setStr":["a","b","b","c"]}`, + }, { // We have a duplicate in LHS. + `{"setStr":["a","b","b"]}`, + `{"setStr":["b"]}`, + `{"setStr":["a","b"]}`, + }, { // We have a duplicate in LHS. + `{"setStr":["a","b","b"]}`, + `{"setStr":["a"]}`, + `{"setStr":["a","b","b"]}`, + }, { // We have a duplicate in LHS. + `{"setStr":["a","b","c","d","e","c"]}`, + `{"setStr":["1","b","2","e","d"]}`, + `{"setStr":["a","1","b","c","2","e","c","d"]}`, + }, { // We have a duplicate in LHS, also present in RHS, keep only one. + `{"setStr":["a","b","c","d","e","c"]}`, + `{"setStr":["1","b","2","c","e","d"]}`, + `{"setStr":["a","1","b","2","c","e","d"]}`, + }, { // We have 2 duplicates in LHS, one is replaced. + `{"setStr":["a","a","b","b"]}`, + `{"setStr":["b","c","d"]}`, + `{"setStr":["a","a","b","c","d"]}`, + }, { // We have 2 duplicates in LHS, and nothing on the right + `{"setStr":["a","a","b","b"]}`, + `{"setStr":[]}`, + `{"setStr":["a","a","b","b"]}`, }, { `{"setBool":[true]}`, `{"setBool":[false]}`, @@ -336,6 +364,22 @@ var mergeCases = []mergeTestCase{{ `{"setNumeric":[1,2,3.14159]}`, `{"setNumeric":[1,2,3]}`, `{"setNumeric":[1,2,3.14159,3]}`, + }, { + `{"setStr":["c","a","g","f","c","a"]}`, + `{"setStr":["c","f","a","g"]}`, + `{"setStr":["c","f","a","g"]}`, + }, { + `{"setNumeric":[1,2,3.14159,1,2]}`, + `{"setNumeric":[1,2,3]}`, + `{"setNumeric":[1,2,3.14159,3]}`, + }, { + `{"setBool":[true,false,true]}`, + `{"setBool":[false]}`, + `{"setBool":[true,false,true]}`, + }, { + `{"setBool":[true,false,true]}`, + `{"setBool":[true]}`, + `{"setBool":[true, false]}`, }, }, }, { @@ -423,6 +467,18 @@ var mergeCases = []mergeTestCase{{ `{"atomicList":["a","a","a"]}`, `{"atomicList":["a","a"]}`, `{"atomicList":["a","a"]}`, + }, { + `{"list":[{"key":"a","id":1,"bv":true},{"key":"b","id":2},{"key":"a","id":1,"bv":false,"nv":2}]}`, + `{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`, + `{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`, + }, { + `{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`, + `{"list":[]}`, + `{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`, + }, { + `{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`, + `{}`, + `{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`, }}, }} @@ -437,18 +493,16 @@ func (tt mergeTestCase) test(t *testing.T) { t.Run(fmt.Sprintf("%v-valid-%v", tt.name, i), func(t *testing.T) { t.Parallel() pt := parser.Type(tt.rootTypeName) - - lhs, err := pt.FromYAML(triplet.lhs) + // Former object can have duplicates in sets. + lhs, err := pt.FromYAML(triplet.lhs, typed.AllowDuplicates) if err != nil { t.Fatalf("unable to parser/validate lhs yaml: %v\n%v", err, triplet.lhs) } - rhs, err := pt.FromYAML(triplet.rhs) if err != nil { t.Fatalf("unable to parser/validate rhs yaml: %v\n%v", err, triplet.rhs) } - - out, err := pt.FromYAML(triplet.out) + out, err := pt.FromYAML(triplet.out, typed.AllowDuplicates) if err != nil { t.Fatalf("unable to parser/validate out yaml: %v\n%v", err, triplet.out) }