diff --git a/pandas/core/indexing.py b/pandas/core/indexing.py index b31973de5bca0..b86293e78a80d 100755 --- a/pandas/core/indexing.py +++ b/pandas/core/indexing.py @@ -232,7 +232,7 @@ def _has_valid_tuple(self, key: Tuple): except ValueError: raise ValueError( "Location based indexing can only have " - "[{types}] types".format(types=self._valid_types) + f"[{self._valid_types}] types" ) def _is_nested_tuple_indexer(self, tup: Tuple) -> bool: @@ -286,7 +286,7 @@ def _has_valid_positional_setitem_indexer(self, indexer) -> bool: bool """ if isinstance(indexer, dict): - raise IndexError("{0} cannot enlarge its target object".format(self.name)) + raise IndexError(f"{self.name} cannot enlarge its target object") else: if not isinstance(indexer, tuple): indexer = _tuplify(self.ndim, indexer) @@ -300,13 +300,10 @@ def _has_valid_positional_setitem_indexer(self, indexer) -> bool: elif is_integer(i): if i >= len(ax): raise IndexError( - "{name} cannot enlarge its target " - "object".format(name=self.name) + f"{self.name} cannot enlarge its target object" ) elif isinstance(i, dict): - raise IndexError( - "{name} cannot enlarge its target object".format(name=self.name) - ) + raise IndexError(f"{self.name} cannot enlarge its target object") return True @@ -1166,17 +1163,14 @@ def _validate_read_indexer( if missing: if missing == len(indexer): - raise KeyError( - "None of [{key}] are in the [{axis}]".format( - key=key, axis=self.obj._get_axis_name(axis) - ) - ) + axis_name = self.obj._get_axis_name(axis) + raise KeyError(f"None of [{key}] are in the [{axis_name}]") # We (temporarily) allow for some missing keys with .loc, except in # some cases (e.g. setting) in which "raise_missing" will be False if not (self.name == "loc" and not raise_missing): not_found = list(set(key) - set(ax)) - raise KeyError("{} not in index".format(not_found)) + raise KeyError(f"{not_found} not in index") # we skip the warning on Categorical/Interval # as this check is actually done (check for @@ -1905,18 +1899,13 @@ def _validate_key(self, key, axis: int): # check that the key has a numeric dtype if not is_numeric_dtype(arr.dtype): - raise IndexError( - ".iloc requires numeric indexers, got {arr}".format(arr=arr) - ) + raise IndexError(f".iloc requires numeric indexers, got {arr}") # check that the key does not exceed the maximum size of the index if len(arr) and (arr.max() >= len_axis or arr.min() < -len_axis): raise IndexError("positional indexers are out-of-bounds") else: - raise ValueError( - "Can only index by location with " - "a [{types}]".format(types=self._valid_types) - ) + raise ValueError(f"Can only index by location with a [{self._valid_types}]") def _has_valid_setitem_indexer(self, indexer): self._has_valid_positional_setitem_indexer(indexer) @@ -2063,10 +2052,7 @@ def _convert_to_indexer(self, obj, axis: int, raise_missing: bool = False): self._validate_key(obj, axis) return obj except ValueError: - raise ValueError( - "Can only index by location with " - "a [{types}]".format(types=self._valid_types) - ) + raise ValueError(f"Can only index by location with a [{self._valid_types}]") class _ScalarAccessIndexer(_NDFrameIndexerBase): @@ -2327,7 +2313,7 @@ def check_bool_indexer(index: Index, key) -> np.ndarray: # GH26658 if len(result) != len(index): raise IndexError( - "Item wrong length {} instead of {}.".format(len(result), len(index)) + f"Item wrong length {len(result)} instead of {len(index)}." ) return result diff --git a/pandas/core/internals/blocks.py b/pandas/core/internals/blocks.py index 610a39a05148b..eb5b5181d894d 100644 --- a/pandas/core/internals/blocks.py +++ b/pandas/core/internals/blocks.py @@ -115,8 +115,8 @@ def __init__(self, values, placement, ndim=None): if self._validate_ndim and self.ndim and len(self.mgr_locs) != len(self.values): raise ValueError( - "Wrong number of items passed {val}, placement implies " - "{mgr}".format(val=len(self.values), mgr=len(self.mgr_locs)) + f"Wrong number of items passed {len(self.values)}, " + f"placement implies {len(self.mgr_locs)}" ) def _check_ndim(self, values, ndim): @@ -144,9 +144,10 @@ def _check_ndim(self, values, ndim): ndim = values.ndim if self._validate_ndim and values.ndim != ndim: - msg = "Wrong number of dimensions. values.ndim != ndim [{} != {}]" - raise ValueError(msg.format(values.ndim, ndim)) - + raise ValueError( + "Wrong number of dimensions. " + f"values.ndim != ndim [{values.ndim} != {ndim}]" + ) return ndim @property @@ -184,7 +185,7 @@ def is_categorical_astype(self, dtype): if dtype is Categorical or dtype is CategoricalDtype: # this is a pd.Categorical, but is not # a valid type for astypeing - raise TypeError("invalid type {0} for astype".format(dtype)) + raise TypeError(f"invalid type {dtype} for astype") elif is_categorical_dtype(dtype): return True @@ -264,18 +265,14 @@ def __repr__(self) -> str: name = type(self).__name__ if self._is_single_block: - result = "{name}: {len} dtype: {dtype}".format( - name=name, len=len(self), dtype=self.dtype - ) + result = f"{name}: {len(self)} dtype: {self.dtype}" else: shape = " x ".join(pprint_thing(s) for s in self.shape) - result = "{name}: {index}, {shape}, dtype: {dtype}".format( - name=name, - index=pprint_thing(self.mgr_locs.indexer), - shape=shape, - dtype=self.dtype, + result = ( + f"{name}: {pprint_thing(self.mgr_locs.indexer)}, " + f"{shape}, dtype: {self.dtype}" ) return result @@ -329,7 +326,7 @@ def ftype(self): dtype = self.dtype.subtype else: dtype = self.dtype - return "{dtype}:{ftype}".format(dtype=dtype, ftype=self._ftype) + return f"{dtype}:{self._ftype}" def merge(self, other): return _merge_blocks([self, other]) @@ -544,15 +541,15 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"): if errors not in errors_legal_values: invalid_arg = ( - "Expected value of kwarg 'errors' to be one of {}. " - "Supplied value is '{}'".format(list(errors_legal_values), errors) + "Expected value of kwarg 'errors' to be one of " + f"{list(errors_legal_values)}. Supplied value is '{errors}'" ) raise ValueError(invalid_arg) if inspect.isclass(dtype) and issubclass(dtype, ExtensionDtype): msg = ( - "Expected an instance of {}, but got the class instead. " - "Try instantiating 'dtype'.".format(dtype.__name__) + f"Expected an instance of {dtype.__name__}, " + "but got the class instead. Try instantiating 'dtype'." ) raise TypeError(msg) @@ -613,15 +610,9 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"): if newb.is_numeric and self.is_numeric: if newb.shape != self.shape: raise TypeError( - "cannot set astype for copy = [{copy}] for dtype " - "({dtype} [{shape}]) to different shape " - "({newb_dtype} [{newb_shape}])".format( - copy=copy, - dtype=self.dtype.name, - shape=self.shape, - newb_dtype=newb.dtype.name, - newb_shape=newb.shape, - ) + f"cannot set astype for copy = [{copy}] for dtype " + f"({self.dtype.name} [{self.shape}]) to different shape " + f"({newb.dtype.name} [{newb.shape}])" ) return newb @@ -658,7 +649,7 @@ def to_native_types(self, slicer=None, na_rep="nan", quoting=None, **kwargs): if not self.is_object and not quoting: itemsize = writers.word_len(na_rep) - values = values.astype(" str: output = type(self).__name__ for i, ax in enumerate(self.axes): if i == 0: - output += "\nItems: {ax}".format(ax=ax) + output += f"\nItems: {ax}" else: - output += "\nAxis {i}: {ax}".format(i=i, ax=ax) + output += f"\nAxis {i}: {ax}" for block in self.blocks: - output += "\n{block}".format(block=pprint_thing(block)) + output += f"\n{pprint_thing(block)}" return output def _verify_integrity(self): @@ -336,8 +336,8 @@ def _verify_integrity(self): if len(self.items) != tot_items: raise AssertionError( "Number of manager items must equal union of " - "block items\n# manager items: {0}, # " - "tot_items: {1}".format(len(self.items), tot_items) + f"block items\n# manager items: {len(self.items)}, # " + f"tot_items: {tot_items}" ) def apply(self, f: str, filter=None, **kwargs): @@ -1140,7 +1140,7 @@ def insert(self, loc: int, item, value, allow_duplicates: bool = False): """ if not allow_duplicates and item in self.items: # Should this be a different kind of error?? - raise ValueError("cannot insert {}, already exists".format(item)) + raise ValueError(f"cannot insert {item}, already exists") if not isinstance(loc, int): raise TypeError("loc must be int") @@ -1661,9 +1661,7 @@ def construction_error(tot_items, block_shape, axes, e=None): raise e if block_shape[0] == 0: raise ValueError("Empty data passed with indices specified.") - raise ValueError( - "Shape of passed values is {0}, indices imply {1}".format(passed, implied) - ) + raise ValueError(f"Shape of passed values is {passed}, indices imply {implied}") # ----------------------------------------------------------------------- @@ -1899,10 +1897,10 @@ def _compare_or_regex_search(a, b, regex=False): type_names = [type(a).__name__, type(b).__name__] if is_a_array: - type_names[0] = "ndarray(dtype={dtype})".format(dtype=a.dtype) + type_names[0] = f"ndarray(dtype={a.dtype})" if is_b_array: - type_names[1] = "ndarray(dtype={dtype})".format(dtype=b.dtype) + type_names[1] = f"ndarray(dtype={b.dtype})" raise TypeError( f"Cannot compare types {repr(type_names[0])} and {repr(type_names[1])}" diff --git a/pandas/core/resample.py b/pandas/core/resample.py index 67f06ea7bea6a..2294c846e81c7 100644 --- a/pandas/core/resample.py +++ b/pandas/core/resample.py @@ -90,13 +90,11 @@ def __str__(self) -> str: Provide a nice str repr of our rolling object. """ attrs = ( - "{k}={v}".format(k=k, v=getattr(self.groupby, k)) + f"{k}={getattr(self.groupby, k)}" for k in self._attributes if getattr(self.groupby, k, None) is not None ) - return "{klass} [{attrs}]".format( - klass=type(self).__name__, attrs=", ".join(attrs) - ) + return f"{type(self).__name__} [{', '.join(attrs)}]" def __getattr__(self, attr): if attr in self._internal_names_set: @@ -1188,8 +1186,8 @@ def _downsample(self, how, **kwargs): return self.asfreq() raise IncompatibleFrequency( - "Frequency {} cannot be resampled to {}, as they are not " - "sub or super periods".format(ax.freq, self.freq) + f"Frequency {ax.freq} cannot be resampled to {self.freq}, " + "as they are not sub or super periods" ) def _upsample(self, method, limit=None, fill_value=None): @@ -1333,11 +1331,11 @@ def __init__( # Check for correctness of the keyword arguments which would # otherwise silently use the default if misspelled if label not in {None, "left", "right"}: - raise ValueError("Unsupported value {} for `label`".format(label)) + raise ValueError(f"Unsupported value {label} for `label`") if closed not in {None, "left", "right"}: - raise ValueError("Unsupported value {} for `closed`".format(closed)) + raise ValueError(f"Unsupported value {closed} for `closed`") if convention not in {None, "start", "end", "e", "s"}: - raise ValueError("Unsupported value {} for `convention`".format(convention)) + raise ValueError(f"Unsupported value {convention} for `convention`") freq = to_offset(freq) @@ -1407,7 +1405,7 @@ def _get_resampler(self, obj, kind=None): raise TypeError( "Only valid with DatetimeIndex, " "TimedeltaIndex or PeriodIndex, " - "but got an instance of '{typ}'".format(typ=type(ax).__name__) + f"but got an instance of '{type(ax).__name__}'" ) def _get_grouper(self, obj, validate=True): @@ -1420,7 +1418,7 @@ def _get_time_bins(self, ax): if not isinstance(ax, DatetimeIndex): raise TypeError( "axis must be a DatetimeIndex, but got " - "an instance of {typ}".format(typ=type(ax).__name__) + f"an instance of {type(ax).__name__}" ) if len(ax) == 0: @@ -1496,7 +1494,7 @@ def _get_time_delta_bins(self, ax): if not isinstance(ax, TimedeltaIndex): raise TypeError( "axis must be a TimedeltaIndex, but got " - "an instance of {typ}".format(typ=type(ax).__name__) + f"an instance of {type(ax).__name__}" ) if not len(ax): @@ -1521,7 +1519,7 @@ def _get_time_period_bins(self, ax): if not isinstance(ax, DatetimeIndex): raise TypeError( "axis must be a DatetimeIndex, but got " - "an instance of {typ}".format(typ=type(ax).__name__) + f"an instance of {type(ax).__name__}" ) freq = self.freq @@ -1543,7 +1541,7 @@ def _get_period_bins(self, ax): if not isinstance(ax, PeriodIndex): raise TypeError( "axis must be a PeriodIndex, but got " - "an instance of {typ}".format(typ=type(ax).__name__) + f"an instance of {type(ax).__name__}" ) memb = ax.asfreq(self.freq, how=self.convention)