@@ -829,11 +829,33 @@ def test_groupby_math_bitshift() -> None:
829
829
}
830
830
)
831
831
832
+ left_manual = []
833
+ for lev , group in ds .groupby ("level" ):
834
+ shifter = shift .sel (level = lev )
835
+ left_manual .append (group << shifter )
836
+ left_actual = xr .concat (left_manual , dim = "index" ).reset_coords (names = "level" )
837
+ assert_equal (left_expected , left_actual )
838
+
832
839
left_actual = (ds .groupby ("level" ) << shift ).reset_coords (names = "level" )
833
840
assert_equal (left_expected , left_actual )
834
841
842
+ right_expected = Dataset (
843
+ {
844
+ "x" : ("index" , [0 , 0 , 2 , 2 ]),
845
+ "y" : ("index" , [- 1 , - 1 , - 2 , - 2 ]),
846
+ "level" : ("index" , [0 , 0 , 4 , 4 ]),
847
+ "index" : [0 , 1 , 2 , 3 ],
848
+ }
849
+ )
850
+ right_manual = []
851
+ for lev , group in left_expected .groupby ("level" ):
852
+ shifter = shift .sel (level = lev )
853
+ right_manual .append (group >> shifter )
854
+ right_actual = xr .concat (right_manual , dim = "index" ).reset_coords (names = "level" )
855
+ assert_equal (right_expected , right_actual )
856
+
835
857
right_actual = (left_expected .groupby ("level" ) >> shift ).reset_coords (names = "level" )
836
- assert_equal (ds , right_actual )
858
+ assert_equal (right_expected , right_actual )
837
859
838
860
839
861
@pytest .mark .parametrize ("use_flox" , [True , False ])
@@ -1302,8 +1324,15 @@ def test_groupby_math_not_aligned(self):
1302
1324
expected = DataArray ([10 , 11 , np .nan , np .nan ], array .coords )
1303
1325
assert_identical (expected , actual )
1304
1326
1327
+ # regression test for #7797
1328
+ other = array .groupby ("b" ).sum ()
1329
+ actual = array .sel (x = [0 , 1 ]).groupby ("b" ) - other
1330
+ expected = DataArray ([- 1 , 0 ], {"b" : ("x" , [0 , 0 ]), "x" : [0 , 1 ]}, dims = "x" )
1331
+ assert_identical (expected , actual )
1332
+
1305
1333
other = DataArray ([10 ], coords = {"c" : 123 , "b" : [0 ]}, dims = "b" )
1306
1334
actual = array .groupby ("b" ) + other
1335
+ expected = DataArray ([10 , 11 , np .nan , np .nan ], array .coords )
1307
1336
expected .coords ["c" ] = (["x" ], [123 ] * 2 + [np .nan ] * 2 )
1308
1337
assert_identical (expected , actual )
1309
1338
@@ -2289,3 +2318,20 @@ def test_resample_cumsum(method: str, expected_array: list[float]) -> None:
2289
2318
actual = getattr (ds .foo .resample (time = "3M" ), method )(dim = "time" )
2290
2319
expected .coords ["time" ] = ds .time
2291
2320
assert_identical (expected .drop_vars (["time" ]).foo , actual )
2321
+
2322
+
2323
+ def test_groupby_binary_op_regression () -> None :
2324
+ # regression test for #7797
2325
+ # monthly timeseries that should return "zero anomalies" everywhere
2326
+ time = xr .date_range ("2023-01-01" , "2023-12-31" , freq = "MS" )
2327
+ data = np .linspace (- 1 , 1 , 12 )
2328
+ x = xr .DataArray (data , coords = {"time" : time })
2329
+ clim = xr .DataArray (data , coords = {"month" : np .arange (1 , 13 , 1 )})
2330
+
2331
+ # seems to give the correct result if we use the full x, but not with a slice
2332
+ x_slice = x .sel (time = ["2023-04-01" ])
2333
+
2334
+ # two typical ways of computing anomalies
2335
+ anom_gb = x_slice .groupby ("time.month" ) - clim
2336
+
2337
+ assert_identical (xr .zeros_like (anom_gb ), anom_gb )
0 commit comments