Skip to content

Commit c9d20e7

Browse files
committed
typed: Allow duplicates when we merge
The goal is to ignore duplicates if they are not included in the partial object, and to replace all of the occurences if they are in the partial object.
1 parent f8c7b27 commit c9d20e7

File tree

2 files changed

+76
-11
lines changed

2 files changed

+76
-11
lines changed

typed/merge.go

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -180,11 +180,15 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
180180
}
181181
out := make([]interface{}, 0, outLen)
182182

183-
rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs)
183+
rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs, false)
184184
errs = append(errs, rhsErrs...)
185-
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs)
185+
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs, true)
186186
errs = append(errs, lhsErrs...)
187187

188+
if len(errs) != 0 {
189+
return errs
190+
}
191+
188192
sharedOrder := make([]*fieldpath.PathElement, 0, rLen)
189193
for i := range rhsPEs {
190194
pe := &rhsPEs[i]
@@ -199,12 +203,14 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
199203
sharedOrder = sharedOrder[1:]
200204
}
201205

206+
mergedRHS := fieldpath.MakePathElementMap(len(rhsPEs))
202207
lLen, rLen = len(lhsPEs), len(rhsPEs)
203208
for lI, rI := 0, 0; lI < lLen || rI < rLen; {
204209
if lI < lLen && rI < rLen {
205210
pe := lhsPEs[lI]
206211
if pe.Equals(rhsPEs[rI]) {
207212
// merge LHS & RHS items
213+
mergedRHS.Insert(pe, struct{}{})
208214
lChild, _ := observedLHS.Get(pe)
209215
rChild, _ := observedRHS.Get(pe)
210216
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
@@ -232,19 +238,23 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
232238
pe := lhsPEs[lI]
233239
if _, ok := observedRHS.Get(pe); !ok {
234240
// take LHS item
235-
lChild, _ := observedLHS.Get(pe)
241+
lChild := lhs.AtUsing(w.allocator, lI)
236242
mergeOut, errs := w.mergeListItem(t, pe, lChild, nil)
237243
errs = append(errs, errs...)
238244
if mergeOut != nil {
239245
out = append(out, *mergeOut)
240246
}
241247
lI++
242248
continue
249+
} else if _, ok := mergedRHS.Get(pe); ok {
250+
// we've already merged it with RHS, we don't want to duplicate it, skip it.
251+
lI++
243252
}
244253
}
245254
if rI < rLen {
246255
// Take the RHS item, merge with matching LHS item if possible
247256
pe := rhsPEs[rI]
257+
mergedRHS.Insert(pe, struct{}{})
248258
lChild, _ := observedLHS.Get(pe) // may be nil
249259
rChild, _ := observedRHS.Get(pe)
250260
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
@@ -272,7 +282,7 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
272282
return errs
273283
}
274284

275-
func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
285+
func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List, allowDuplicates bool) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
276286
var errs ValidationErrors
277287
length := 0
278288
if list != nil {
@@ -290,11 +300,12 @@ func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) (
290300
// this element.
291301
continue
292302
}
293-
if _, found := observed.Get(pe); found {
303+
if _, found := observed.Get(pe); found && !allowDuplicates {
294304
errs = append(errs, errorf("duplicate entries for key %v", pe.String())...)
295305
continue
306+
} else if !found {
307+
observed.Insert(pe, child)
296308
}
297-
observed.Insert(pe, child)
298309
pes = append(pes, pe)
299310
}
300311
return pes, observed, errs

typed/merge_test.go

Lines changed: 59 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -328,6 +328,34 @@ var mergeCases = []mergeTestCase{{
328328
`{"setStr":["a","b","c","d","e","f","g","h","i","j"]}`,
329329
`{"setStr":["1","b","2","h","3","e","4","k","l"]}`,
330330
`{"setStr":["a","1","b","c","d","f","g","2","h","i","j","3","e","4","k","l"]}`,
331+
}, { // We have a duplicate in LHS
332+
`{"setStr":["a","b","b"]}`,
333+
`{"setStr":["c"]}`,
334+
`{"setStr":["a","b","b","c"]}`,
335+
}, { // We have a duplicate in LHS.
336+
`{"setStr":["a","b","b"]}`,
337+
`{"setStr":["b"]}`,
338+
`{"setStr":["a","b"]}`,
339+
}, { // We have a duplicate in LHS.
340+
`{"setStr":["a","b","b"]}`,
341+
`{"setStr":["a"]}`,
342+
`{"setStr":["a","b","b"]}`,
343+
}, { // We have a duplicate in LHS.
344+
`{"setStr":["a","b","c","d","e","c"]}`,
345+
`{"setStr":["1","b","2","e","d"]}`,
346+
`{"setStr":["a","1","b","c","2","e","c","d"]}`,
347+
}, { // We have a duplicate in LHS, also present in RHS, keep only one.
348+
`{"setStr":["a","b","c","d","e","c"]}`,
349+
`{"setStr":["1","b","2","c","e","d"]}`,
350+
`{"setStr":["a","1","b","2","c","e","d"]}`,
351+
}, { // We have 2 duplicates in LHS, one is replaced.
352+
`{"setStr":["a","a","b","b"]}`,
353+
`{"setStr":["b","c","d"]}`,
354+
`{"setStr":["a","a","b","c","d"]}`,
355+
}, { // We have 2 duplicates in LHS, and nothing on the right
356+
`{"setStr":["a","a","b","b"]}`,
357+
`{"setStr":[]}`,
358+
`{"setStr":["a","a","b","b"]}`,
331359
}, {
332360
`{"setBool":[true]}`,
333361
`{"setBool":[false]}`,
@@ -336,6 +364,22 @@ var mergeCases = []mergeTestCase{{
336364
`{"setNumeric":[1,2,3.14159]}`,
337365
`{"setNumeric":[1,2,3]}`,
338366
`{"setNumeric":[1,2,3.14159,3]}`,
367+
}, {
368+
`{"setStr":["c","a","g","f","c","a"]}`,
369+
`{"setStr":["c","f","a","g"]}`,
370+
`{"setStr":["c","f","a","g"]}`,
371+
}, {
372+
`{"setNumeric":[1,2,3.14159,1,2]}`,
373+
`{"setNumeric":[1,2,3]}`,
374+
`{"setNumeric":[1,2,3.14159,3]}`,
375+
}, {
376+
`{"setBool":[true,false,true]}`,
377+
`{"setBool":[false]}`,
378+
`{"setBool":[true,false,true]}`,
379+
}, {
380+
`{"setBool":[true,false,true]}`,
381+
`{"setBool":[true]}`,
382+
`{"setBool":[true, false]}`,
339383
},
340384
},
341385
}, {
@@ -423,6 +467,18 @@ var mergeCases = []mergeTestCase{{
423467
`{"atomicList":["a","a","a"]}`,
424468
`{"atomicList":["a","a"]}`,
425469
`{"atomicList":["a","a"]}`,
470+
}, {
471+
`{"list":[{"key":"a","id":1,"bv":true},{"key":"b","id":2},{"key":"a","id":1,"bv":false,"nv":2}]}`,
472+
`{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
473+
`{"list":[{"key":"a","id":1,"bv":true,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
474+
}, {
475+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
476+
`{"list":[]}`,
477+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
478+
}, {
479+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
480+
`{}`,
481+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
426482
}},
427483
}}
428484

@@ -437,18 +493,16 @@ func (tt mergeTestCase) test(t *testing.T) {
437493
t.Run(fmt.Sprintf("%v-valid-%v", tt.name, i), func(t *testing.T) {
438494
t.Parallel()
439495
pt := parser.Type(tt.rootTypeName)
440-
441-
lhs, err := pt.FromYAML(triplet.lhs)
496+
// Former object can have duplicates in sets.
497+
lhs, err := pt.FromYAML(triplet.lhs, typed.AllowDuplicates)
442498
if err != nil {
443499
t.Fatalf("unable to parser/validate lhs yaml: %v\n%v", err, triplet.lhs)
444500
}
445-
446501
rhs, err := pt.FromYAML(triplet.rhs)
447502
if err != nil {
448503
t.Fatalf("unable to parser/validate rhs yaml: %v\n%v", err, triplet.rhs)
449504
}
450-
451-
out, err := pt.FromYAML(triplet.out)
505+
out, err := pt.FromYAML(triplet.out, typed.AllowDuplicates)
452506
if err != nil {
453507
t.Fatalf("unable to parser/validate out yaml: %v\n%v", err, triplet.out)
454508
}

0 commit comments

Comments
 (0)