Skip to content

Commit 9c99f49

Browse files
authored
Merge pull request #253 from apelisse/merge-duplicates
typed: Allow duplicates when we merge
2 parents 1233172 + 4daa91c commit 9c99f49

File tree

2 files changed

+90
-22
lines changed

2 files changed

+90
-22
lines changed

Diff for: typed/merge.go

+31-17
Original file line numberDiff line numberDiff line change
@@ -180,14 +180,18 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
180180
}
181181
out := make([]interface{}, 0, outLen)
182182

183-
rhsOrder, observedRHS, rhsErrs := w.indexListPathElements(t, rhs)
183+
rhsPEs, observedRHS, rhsErrs := w.indexListPathElements(t, rhs, false)
184184
errs = append(errs, rhsErrs...)
185-
lhsOrder, observedLHS, lhsErrs := w.indexListPathElements(t, lhs)
185+
lhsPEs, observedLHS, lhsErrs := w.indexListPathElements(t, lhs, true)
186186
errs = append(errs, lhsErrs...)
187187

188+
if len(errs) != 0 {
189+
return errs
190+
}
191+
188192
sharedOrder := make([]*fieldpath.PathElement, 0, rLen)
189-
for i := range rhsOrder {
190-
pe := &rhsOrder[i]
193+
for i := range rhsPEs {
194+
pe := &rhsPEs[i]
191195
if _, ok := observedLHS.Get(*pe); ok {
192196
sharedOrder = append(sharedOrder, pe)
193197
}
@@ -199,13 +203,15 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
199203
sharedOrder = sharedOrder[1:]
200204
}
201205

202-
lLen, rLen = len(lhsOrder), len(rhsOrder)
206+
mergedRHS := fieldpath.MakePathElementMap(len(rhsPEs))
207+
lLen, rLen = len(lhsPEs), len(rhsPEs)
203208
for lI, rI := 0, 0; lI < lLen || rI < rLen; {
204209
if lI < lLen && rI < rLen {
205-
pe := lhsOrder[lI]
206-
if pe.Equals(rhsOrder[rI]) {
210+
pe := lhsPEs[lI]
211+
if pe.Equals(rhsPEs[rI]) {
207212
// merge LHS & RHS items
208-
lChild, _ := observedLHS.Get(pe)
213+
mergedRHS.Insert(pe, struct{}{})
214+
lChild, _ := observedLHS.Get(pe) // may be nil if the PE is duplicaated.
209215
rChild, _ := observedRHS.Get(pe)
210216
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
211217
errs = append(errs, errs...)
@@ -222,30 +228,34 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
222228
}
223229
continue
224230
}
225-
if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsOrder[lI]) {
231+
if _, ok := observedRHS.Get(pe); ok && nextShared != nil && !nextShared.Equals(lhsPEs[lI]) {
226232
// shared item, but not the one we want in this round
227233
lI++
228234
continue
229235
}
230236
}
231237
if lI < lLen {
232-
pe := lhsOrder[lI]
238+
pe := lhsPEs[lI]
233239
if _, ok := observedRHS.Get(pe); !ok {
234-
// take LHS item
235-
lChild, _ := observedLHS.Get(pe)
240+
// take LHS item using At to make sure we get the right item (observed may not contain the right item).
241+
lChild := lhs.AtUsing(w.allocator, lI)
236242
mergeOut, errs := w.mergeListItem(t, pe, lChild, nil)
237243
errs = append(errs, errs...)
238244
if mergeOut != nil {
239245
out = append(out, *mergeOut)
240246
}
241247
lI++
242248
continue
249+
} else if _, ok := mergedRHS.Get(pe); ok {
250+
// we've already merged it with RHS, we don't want to duplicate it, skip it.
251+
lI++
243252
}
244253
}
245254
if rI < rLen {
246255
// Take the RHS item, merge with matching LHS item if possible
247-
pe := rhsOrder[rI]
248-
lChild, _ := observedLHS.Get(pe) // may be nil
256+
pe := rhsPEs[rI]
257+
mergedRHS.Insert(pe, struct{}{})
258+
lChild, _ := observedLHS.Get(pe) // may be nil if absent or duplicaated.
249259
rChild, _ := observedRHS.Get(pe)
250260
mergeOut, errs := w.mergeListItem(t, pe, lChild, rChild)
251261
errs = append(errs, errs...)
@@ -272,7 +282,7 @@ func (w *mergingWalker) visitListItems(t *schema.List, lhs, rhs value.List) (err
272282
return errs
273283
}
274284

275-
func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
285+
func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List, allowDuplicates bool) ([]fieldpath.PathElement, fieldpath.PathElementValueMap, ValidationErrors) {
276286
var errs ValidationErrors
277287
length := 0
278288
if list != nil {
@@ -290,11 +300,15 @@ func (w *mergingWalker) indexListPathElements(t *schema.List, list value.List) (
290300
// this element.
291301
continue
292302
}
293-
if _, found := observed.Get(pe); found {
303+
if _, found := observed.Get(pe); found && !allowDuplicates {
294304
errs = append(errs, errorf("duplicate entries for key %v", pe.String())...)
295305
continue
306+
} else if !found {
307+
observed.Insert(pe, child)
308+
} else {
309+
// Duplicated items are not merged with the new value, make them nil.
310+
observed.Insert(pe, value.NewValueInterface(nil))
296311
}
297-
observed.Insert(pe, child)
298312
pes = append(pes, pe)
299313
}
300314
return pes, observed, errs

Diff for: typed/merge_test.go

+59-5
Original file line numberDiff line numberDiff line change
@@ -328,6 +328,34 @@ var mergeCases = []mergeTestCase{{
328328
`{"setStr":["a","b","c","d","e","f","g","h","i","j"]}`,
329329
`{"setStr":["1","b","2","h","3","e","4","k","l"]}`,
330330
`{"setStr":["a","1","b","c","d","f","g","2","h","i","j","3","e","4","k","l"]}`,
331+
}, { // We have a duplicate in LHS
332+
`{"setStr":["a","b","b"]}`,
333+
`{"setStr":["c"]}`,
334+
`{"setStr":["a","b","b","c"]}`,
335+
}, { // We have a duplicate in LHS.
336+
`{"setStr":["a","b","b"]}`,
337+
`{"setStr":["b"]}`,
338+
`{"setStr":["a","b"]}`,
339+
}, { // We have a duplicate in LHS.
340+
`{"setStr":["a","b","b"]}`,
341+
`{"setStr":["a"]}`,
342+
`{"setStr":["a","b","b"]}`,
343+
}, { // We have a duplicate in LHS.
344+
`{"setStr":["a","b","c","d","e","c"]}`,
345+
`{"setStr":["1","b","2","e","d"]}`,
346+
`{"setStr":["a","1","b","c","2","e","c","d"]}`,
347+
}, { // We have a duplicate in LHS, also present in RHS, keep only one.
348+
`{"setStr":["a","b","c","d","e","c"]}`,
349+
`{"setStr":["1","b","2","c","e","d"]}`,
350+
`{"setStr":["a","1","b","2","c","e","d"]}`,
351+
}, { // We have 2 duplicates in LHS, one is replaced.
352+
`{"setStr":["a","a","b","b"]}`,
353+
`{"setStr":["b","c","d"]}`,
354+
`{"setStr":["a","a","b","c","d"]}`,
355+
}, { // We have 2 duplicates in LHS, and nothing on the right
356+
`{"setStr":["a","a","b","b"]}`,
357+
`{"setStr":[]}`,
358+
`{"setStr":["a","a","b","b"]}`,
331359
}, {
332360
`{"setBool":[true]}`,
333361
`{"setBool":[false]}`,
@@ -336,6 +364,22 @@ var mergeCases = []mergeTestCase{{
336364
`{"setNumeric":[1,2,3.14159]}`,
337365
`{"setNumeric":[1,2,3]}`,
338366
`{"setNumeric":[1,2,3.14159,3]}`,
367+
}, {
368+
`{"setStr":["c","a","g","f","c","a"]}`,
369+
`{"setStr":["c","f","a","g"]}`,
370+
`{"setStr":["c","f","a","g"]}`,
371+
}, {
372+
`{"setNumeric":[1,2,3.14159,1,2]}`,
373+
`{"setNumeric":[1,2,3]}`,
374+
`{"setNumeric":[1,2,3.14159,3]}`,
375+
}, {
376+
`{"setBool":[true,false,true]}`,
377+
`{"setBool":[false]}`,
378+
`{"setBool":[true,false,true]}`,
379+
}, {
380+
`{"setBool":[true,false,true]}`,
381+
`{"setBool":[true]}`,
382+
`{"setBool":[true, false]}`,
339383
},
340384
},
341385
}, {
@@ -423,6 +467,18 @@ var mergeCases = []mergeTestCase{{
423467
`{"atomicList":["a","a","a"]}`,
424468
`{"atomicList":["a","a"]}`,
425469
`{"atomicList":["a","a"]}`,
470+
}, {
471+
`{"list":[{"key":"a","id":1,"bv":true},{"key":"b","id":2},{"key":"a","id":1,"bv":false,"nv":2}]}`,
472+
`{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
473+
`{"list":[{"key":"a","id":1,"nv":3},{"key":"c","id":3},{"key":"b","id":2}]}`,
474+
}, {
475+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
476+
`{"list":[]}`,
477+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
478+
}, {
479+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
480+
`{}`,
481+
`{"list":[{"key":"a","id":1,"nv":1},{"key":"a","id":1,"nv":2}]}`,
426482
}},
427483
}}
428484

@@ -437,18 +493,16 @@ func (tt mergeTestCase) test(t *testing.T) {
437493
t.Run(fmt.Sprintf("%v-valid-%v", tt.name, i), func(t *testing.T) {
438494
t.Parallel()
439495
pt := parser.Type(tt.rootTypeName)
440-
441-
lhs, err := pt.FromYAML(triplet.lhs)
496+
// Former object can have duplicates in sets.
497+
lhs, err := pt.FromYAML(triplet.lhs, typed.AllowDuplicates)
442498
if err != nil {
443499
t.Fatalf("unable to parser/validate lhs yaml: %v\n%v", err, triplet.lhs)
444500
}
445-
446501
rhs, err := pt.FromYAML(triplet.rhs)
447502
if err != nil {
448503
t.Fatalf("unable to parser/validate rhs yaml: %v\n%v", err, triplet.rhs)
449504
}
450-
451-
out, err := pt.FromYAML(triplet.out)
505+
out, err := pt.FromYAML(triplet.out, typed.AllowDuplicates)
452506
if err != nil {
453507
t.Fatalf("unable to parser/validate out yaml: %v\n%v", err, triplet.out)
454508
}

0 commit comments

Comments
 (0)