@@ -1676,8 +1676,8 @@ def dot(self, other: AnyArrayLike | DataFrame) -> DataFrame | Series:
1676
1676
if len (common ) > len (self .columns ) or len (common ) > len (other .index ):
1677
1677
raise ValueError ("matrices are not aligned" )
1678
1678
1679
- left = self .reindex (columns = common , copy = False )
1680
- right = other .reindex (index = common , copy = False )
1679
+ left = self .reindex (columns = common )
1680
+ right = other .reindex (index = common )
1681
1681
lvals = left .values
1682
1682
rvals = right ._values
1683
1683
else :
@@ -3800,27 +3800,6 @@ def _iter_column_arrays(self) -> Iterator[ArrayLike]:
3800
3800
for i in range (len (self .columns )):
3801
3801
yield self ._get_column_array (i )
3802
3802
3803
- def _getitem_nocopy (self , key : list ):
3804
- """
3805
- Behaves like __getitem__, but returns a view in cases where __getitem__
3806
- would make a copy.
3807
- """
3808
- # TODO(CoW): can be removed if/when we are always Copy-on-Write
3809
- indexer = self .columns ._get_indexer_strict (key , "columns" )[1 ]
3810
- new_axis = self .columns [indexer ]
3811
-
3812
- new_mgr = self ._mgr .reindex_indexer (
3813
- new_axis ,
3814
- indexer ,
3815
- axis = 0 ,
3816
- allow_dups = True ,
3817
- copy = False ,
3818
- only_slice = True ,
3819
- )
3820
- result = self ._constructor_from_mgr (new_mgr , axes = new_mgr .axes )
3821
- result = result .__finalize__ (self )
3822
- return result
3823
-
3824
3803
def __getitem__ (self , key ):
3825
3804
check_dict_or_set_indexers (key )
3826
3805
key = lib .item_from_zerodim (key )
@@ -3911,7 +3890,7 @@ def _getitem_bool_array(self, key):
3911
3890
key = check_bool_indexer (self .index , key )
3912
3891
3913
3892
if key .all ():
3914
- return self .copy (deep = None )
3893
+ return self .copy (deep = False )
3915
3894
3916
3895
indexer = key .nonzero ()[0 ]
3917
3896
return self .take (indexer , axis = 0 )
@@ -4774,7 +4753,7 @@ def predicate(arr: ArrayLike) -> bool:
4774
4753
4775
4754
return True
4776
4755
4777
- mgr = self ._mgr ._get_data_subset (predicate ).copy (deep = None )
4756
+ mgr = self ._mgr ._get_data_subset (predicate ).copy (deep = False )
4778
4757
return self ._constructor_from_mgr (mgr , axes = mgr .axes ).__finalize__ (self )
4779
4758
4780
4759
def insert (
@@ -4919,7 +4898,7 @@ def assign(self, **kwargs) -> DataFrame:
4919
4898
Portland 17.0 62.6 290.15
4920
4899
Berkeley 25.0 77.0 298.15
4921
4900
"""
4922
- data = self .copy (deep = None )
4901
+ data = self .copy (deep = False )
4923
4902
4924
4903
for k , v in kwargs .items ():
4925
4904
data [k ] = com .apply_if_callable (v , data )
@@ -4996,7 +4975,6 @@ def _reindex_multi(self, axes: dict[str, Index], fill_value) -> DataFrame:
4996
4975
else :
4997
4976
return self ._reindex_with_indexers (
4998
4977
{0 : [new_index , row_indexer ], 1 : [new_columns , col_indexer ]},
4999
- copy = False ,
5000
4978
fill_value = fill_value ,
5001
4979
)
5002
4980
@@ -5038,7 +5016,7 @@ def set_axis(
5038
5016
axis : Axis = 0 ,
5039
5017
copy : bool | None = None ,
5040
5018
) -> DataFrame :
5041
- return super ().set_axis (labels , axis = axis , copy = copy )
5019
+ return super ().set_axis (labels , axis = axis )
5042
5020
5043
5021
@doc (
5044
5022
NDFrame .reindex ,
@@ -5065,7 +5043,6 @@ def reindex(
5065
5043
columns = columns ,
5066
5044
axis = axis ,
5067
5045
method = method ,
5068
- copy = copy ,
5069
5046
level = level ,
5070
5047
fill_value = fill_value ,
5071
5048
limit = limit ,
@@ -5463,7 +5440,6 @@ def rename(
5463
5440
index = index ,
5464
5441
columns = columns ,
5465
5442
axis = axis ,
5466
- copy = copy ,
5467
5443
inplace = inplace ,
5468
5444
level = level ,
5469
5445
errors = errors ,
@@ -5534,7 +5510,7 @@ def _replace_columnwise(
5534
5510
DataFrame or None
5535
5511
"""
5536
5512
# Operate column-wise
5537
- res = self if inplace else self .copy (deep = None )
5513
+ res = self if inplace else self .copy (deep = False )
5538
5514
ax = self .columns
5539
5515
5540
5516
for i , ax_value in enumerate (ax ):
@@ -5823,8 +5799,7 @@ def set_index(
5823
5799
if inplace :
5824
5800
frame = self
5825
5801
else :
5826
- # GH 49473 Use "lazy copy" with Copy-on-Write
5827
- frame = self .copy (deep = None )
5802
+ frame = self .copy (deep = False )
5828
5803
5829
5804
arrays : list [Index ] = []
5830
5805
names : list [Hashable ] = []
@@ -6114,7 +6089,7 @@ class max type
6114
6089
if inplace :
6115
6090
new_obj = self
6116
6091
else :
6117
- new_obj = self .copy (deep = None )
6092
+ new_obj = self .copy (deep = False )
6118
6093
if allow_duplicates is not lib .no_default :
6119
6094
allow_duplicates = validate_bool_kwarg (allow_duplicates , "allow_duplicates" )
6120
6095
@@ -6386,7 +6361,7 @@ def dropna(
6386
6361
raise ValueError (f"invalid how option: { how } " )
6387
6362
6388
6363
if np .all (mask ):
6389
- result = self .copy (deep = None )
6364
+ result = self .copy (deep = False )
6390
6365
else :
6391
6366
result = self .loc (axis = axis )[mask ]
6392
6367
@@ -6515,7 +6490,7 @@ def drop_duplicates(
6515
6490
4 Indomie pack 5.0
6516
6491
"""
6517
6492
if self .empty :
6518
- return self .copy (deep = None )
6493
+ return self .copy (deep = False )
6519
6494
6520
6495
inplace = validate_bool_kwarg (inplace , "inplace" )
6521
6496
ignore_index = validate_bool_kwarg (ignore_index , "ignore_index" )
@@ -6631,7 +6606,7 @@ def duplicated(
6631
6606
6632
6607
def f (vals ) -> tuple [np .ndarray , int ]:
6633
6608
labels , shape = algorithms .factorize (vals , size_hint = len (self ))
6634
- return labels .astype ("i8" , copy = False ), len (shape )
6609
+ return labels .astype ("i8" ), len (shape )
6635
6610
6636
6611
if subset is None :
6637
6612
# https://github.com/pandas-dev/pandas/issues/28770
@@ -6914,7 +6889,7 @@ def sort_values(
6914
6889
if inplace :
6915
6890
return self ._update_inplace (self )
6916
6891
else :
6917
- return self .copy (deep = None )
6892
+ return self .copy (deep = False )
6918
6893
6919
6894
if is_range_indexer (indexer , len (indexer )):
6920
6895
result = self .copy (deep = False )
@@ -7570,7 +7545,7 @@ def nsmallest(
7570
7545
),
7571
7546
)
7572
7547
def swaplevel (self , i : Axis = - 2 , j : Axis = - 1 , axis : Axis = 0 ) -> DataFrame :
7573
- result = self .copy (deep = None )
7548
+ result = self .copy (deep = False )
7574
7549
7575
7550
axis = self ._get_axis_number (axis )
7576
7551
@@ -7630,7 +7605,7 @@ class diet
7630
7605
if not isinstance (self ._get_axis (axis ), MultiIndex ): # pragma: no cover
7631
7606
raise TypeError ("Can only reorder levels on a hierarchical axis." )
7632
7607
7633
- result = self .copy (deep = None )
7608
+ result = self .copy (deep = False )
7634
7609
7635
7610
if axis == 0 :
7636
7611
assert isinstance (result .index , MultiIndex )
@@ -7933,9 +7908,7 @@ def to_series(right):
7933
7908
if flex is not None and isinstance (right , DataFrame ):
7934
7909
if not left ._indexed_same (right ):
7935
7910
if flex :
7936
- left , right = left .align (
7937
- right , join = "outer" , level = level , copy = False
7938
- )
7911
+ left , right = left .align (right , join = "outer" , level = level )
7939
7912
else :
7940
7913
raise ValueError (
7941
7914
"Can only compare identically-labeled (both index and columns) "
@@ -7948,7 +7921,7 @@ def to_series(right):
7948
7921
if not left .axes [axis ].equals (right .index ):
7949
7922
raise ValueError (
7950
7923
"Operands are not aligned. Do "
7951
- "`left, right = left.align(right, axis=1, copy=False )` "
7924
+ "`left, right = left.align(right, axis=1)` "
7952
7925
"before operating."
7953
7926
)
7954
7927
@@ -7957,7 +7930,6 @@ def to_series(right):
7957
7930
join = "outer" ,
7958
7931
axis = axis ,
7959
7932
level = level ,
7960
- copy = False ,
7961
7933
)
7962
7934
right = left ._maybe_align_series_as_frame (right , axis )
7963
7935
@@ -8467,7 +8439,7 @@ def combine(
8467
8439
"""
8468
8440
other_idxlen = len (other .index ) # save for compare
8469
8441
8470
- this , other = self .align (other , copy = False )
8442
+ this , other = self .align (other )
8471
8443
new_index = this .index
8472
8444
8473
8445
if other .empty and len (new_index ) == len (self .index ):
@@ -8507,15 +8479,15 @@ def combine(
8507
8479
# try to promote series, which is all NaN, as other_dtype.
8508
8480
new_dtype = other_dtype
8509
8481
try :
8510
- series = series .astype (new_dtype , copy = False )
8482
+ series = series .astype (new_dtype )
8511
8483
except ValueError :
8512
8484
# e.g. new_dtype is integer types
8513
8485
pass
8514
8486
else :
8515
8487
# if we have different dtypes, possibly promote
8516
8488
new_dtype = find_common_type ([this_dtype , other_dtype ])
8517
- series = series .astype (new_dtype , copy = False )
8518
- other_series = other_series .astype (new_dtype , copy = False )
8489
+ series = series .astype (new_dtype )
8490
+ other_series = other_series .astype (new_dtype )
8519
8491
8520
8492
arr = func (series , other_series )
8521
8493
if isinstance (new_dtype , np .dtype ):
@@ -9567,7 +9539,7 @@ def explode(
9567
9539
result .index = default_index (len (result ))
9568
9540
else :
9569
9541
result .index = self .index .take (result .index )
9570
- result = result .reindex (columns = self .columns , copy = False )
9542
+ result = result .reindex (columns = self .columns )
9571
9543
9572
9544
return result .__finalize__ (self , method = "explode" )
9573
9545
@@ -10263,9 +10235,7 @@ def _append(
10263
10235
row_df = other .to_frame ().T
10264
10236
# infer_objects is needed for
10265
10237
# test_append_empty_frame_to_series_with_dateutil_tz
10266
- other = row_df .infer_objects (copy = False ).rename_axis (
10267
- index .names , copy = False
10268
- )
10238
+ other = row_df .infer_objects ().rename_axis (index .names )
10269
10239
elif isinstance (other , list ):
10270
10240
if not other :
10271
10241
pass
@@ -10509,7 +10479,7 @@ def join(
10509
10479
res = concat (
10510
10480
frames , axis = 1 , join = "outer" , verify_integrity = True , sort = sort
10511
10481
)
10512
- return res .reindex (self .index , copy = False )
10482
+ return res .reindex (self .index )
10513
10483
else :
10514
10484
return concat (
10515
10485
frames , axis = 1 , join = how , verify_integrity = True , sort = sort
@@ -10559,7 +10529,6 @@ def merge(
10559
10529
right_index = right_index ,
10560
10530
sort = sort ,
10561
10531
suffixes = suffixes ,
10562
- copy = copy ,
10563
10532
indicator = indicator ,
10564
10533
validate = validate ,
10565
10534
)
@@ -11024,7 +10993,7 @@ def corrwith(
11024
10993
11025
10994
if numeric_only :
11026
10995
other = other ._get_numeric_data ()
11027
- left , right = this .align (other , join = "inner" , copy = False )
10996
+ left , right = this .align (other , join = "inner" )
11028
10997
11029
10998
if axis == 1 :
11030
10999
left = left .T
@@ -11161,7 +11130,7 @@ def count(self, axis: Axis = 0, numeric_only: bool = False):
11161
11130
else :
11162
11131
result = notna (frame ).sum (axis = axis )
11163
11132
11164
- return result .astype ("int64" , copy = False ).__finalize__ (self , method = "count" )
11133
+ return result .astype ("int64" ).__finalize__ (self , method = "count" )
11165
11134
11166
11135
def _reduce (
11167
11136
self ,
@@ -11225,7 +11194,7 @@ def _get_data() -> DataFrame:
11225
11194
if axis is None :
11226
11195
dtype = find_common_type ([arr .dtype for arr in df ._mgr .arrays ])
11227
11196
if isinstance (dtype , ExtensionDtype ):
11228
- df = df .astype (dtype , copy = False )
11197
+ df = df .astype (dtype )
11229
11198
arr = concat_compat (list (df ._iter_column_arrays ()))
11230
11199
return arr ._reduce (name , skipna = skipna , keepdims = False , ** kwds )
11231
11200
return func (df .values )
@@ -11257,7 +11226,7 @@ def _get_data() -> DataFrame:
11257
11226
# be equivalent to transposing the original frame and aggregating
11258
11227
# with axis=0.
11259
11228
name = {"argmax" : "idxmax" , "argmin" : "idxmin" }.get (name , name )
11260
- df = df .astype (dtype , copy = False )
11229
+ df = df .astype (dtype )
11261
11230
arr = concat_compat (list (df ._iter_column_arrays ()))
11262
11231
nrows , ncols = df .shape
11263
11232
row_index = np .tile (np .arange (nrows ), ncols )
0 commit comments