Skip to content

CLN: Old string formatting: .format() -> f"" #30328

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Dec 20, 2019
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 10 additions & 19 deletions pandas/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def _has_valid_tuple(self, key: Tuple):
except ValueError:
raise ValueError(
"Location based indexing can only have "
"[{types}] types".format(types=self._valid_types)
f"[{self._valid_types}] types"
)

def _is_nested_tuple_indexer(self, tup: Tuple) -> bool:
Expand Down Expand Up @@ -286,7 +286,7 @@ def _has_valid_positional_setitem_indexer(self, indexer) -> bool:
bool
"""
if isinstance(indexer, dict):
raise IndexError("{0} cannot enlarge its target object".format(self.name))
raise IndexError(f"{self.name} cannot enlarge its target object")
else:
if not isinstance(indexer, tuple):
indexer = _tuplify(self.ndim, indexer)
Expand All @@ -300,13 +300,10 @@ def _has_valid_positional_setitem_indexer(self, indexer) -> bool:
elif is_integer(i):
if i >= len(ax):
raise IndexError(
"{name} cannot enlarge its target "
"object".format(name=self.name)
f"{self.name} cannot enlarge its target object"
)
elif isinstance(i, dict):
raise IndexError(
"{name} cannot enlarge its target object".format(name=self.name)
)
raise IndexError(f"{self.name} cannot enlarge its target object")

return True

Expand Down Expand Up @@ -1167,16 +1164,14 @@ def _validate_read_indexer(
if missing:
if missing == len(indexer):
raise KeyError(
"None of [{key}] are in the [{axis}]".format(
key=key, axis=self.obj._get_axis_name(axis)
)
f"None of [{key}] are in the [{self.obj._get_axis_name(axis)}]"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nitpick: can you define axis_name = self.obj._get_axis_name(axis) on the previous line and use axis_name inside the fstring?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done.

)

# We (temporarily) allow for some missing keys with .loc, except in
# some cases (e.g. setting) in which "raise_missing" will be False
if not (self.name == "loc" and not raise_missing):
not_found = list(set(key) - set(ax))
raise KeyError("{} not in index".format(not_found))
raise KeyError(f"{not_found} not in index")

# we skip the warning on Categorical/Interval
# as this check is actually done (check for
Expand Down Expand Up @@ -1905,17 +1900,14 @@ def _validate_key(self, key, axis: int):

# check that the key has a numeric dtype
if not is_numeric_dtype(arr.dtype):
raise IndexError(
".iloc requires numeric indexers, got {arr}".format(arr=arr)
)
raise IndexError(f".iloc requires numeric indexers, got {arr}")

# check that the key does not exceed the maximum size of the index
if len(arr) and (arr.max() >= len_axis or arr.min() < -len_axis):
raise IndexError("positional indexers are out-of-bounds")
else:
raise ValueError(
"Can only index by location with "
"a [{types}]".format(types=self._valid_types)
"Can only index by location with " f"a [{self._valid_types}]"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

extra space crept in, also down on 2059 (im going to stop pointing those out now)

Copy link
Contributor Author

@baevpetr baevpetr Dec 18, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

My fault. Missed them. Working... Expected such things to be fixed by a linter

)

def _has_valid_setitem_indexer(self, indexer):
Expand Down Expand Up @@ -2064,8 +2056,7 @@ def _convert_to_indexer(self, obj, axis: int, raise_missing: bool = False):
return obj
except ValueError:
raise ValueError(
"Can only index by location with "
"a [{types}]".format(types=self._valid_types)
"Can only index by location with " f"a [{self._valid_types}]"
)


Expand Down Expand Up @@ -2327,7 +2318,7 @@ def check_bool_indexer(index: Index, key) -> np.ndarray:
# GH26658
if len(result) != len(index):
raise IndexError(
"Item wrong length {} instead of {}.".format(len(result), len(index))
f"Item wrong length {len(result)} instead of {len(index)}."
)

return result
Expand Down
64 changes: 26 additions & 38 deletions pandas/core/internals/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,8 @@ def __init__(self, values, placement, ndim=None):

if self._validate_ndim and self.ndim and len(self.mgr_locs) != len(self.values):
raise ValueError(
"Wrong number of items passed {val}, placement implies "
"{mgr}".format(val=len(self.values), mgr=len(self.mgr_locs))
f"Wrong number of items passed {len(self.values)}, "
f"placement implies {len(self.mgr_locs)}"
)

def _check_ndim(self, values, ndim):
Expand Down Expand Up @@ -144,9 +144,10 @@ def _check_ndim(self, values, ndim):
ndim = values.ndim

if self._validate_ndim and values.ndim != ndim:
msg = "Wrong number of dimensions. values.ndim != ndim [{} != {}]"
raise ValueError(msg.format(values.ndim, ndim))

raise ValueError(
"Wrong number of dimensions. "
f"values.ndim != ndim [{values.ndim} != {ndim}]"
)
return ndim

@property
Expand Down Expand Up @@ -184,7 +185,7 @@ def is_categorical_astype(self, dtype):
if dtype is Categorical or dtype is CategoricalDtype:
# this is a pd.Categorical, but is not
# a valid type for astypeing
raise TypeError("invalid type {0} for astype".format(dtype))
raise TypeError(f"invalid type {dtype} for astype")

elif is_categorical_dtype(dtype):
return True
Expand Down Expand Up @@ -264,18 +265,14 @@ def __repr__(self) -> str:
name = type(self).__name__
if self._is_single_block:

result = "{name}: {len} dtype: {dtype}".format(
name=name, len=len(self), dtype=self.dtype
)
result = f"{name}: {len(self)} dtype: {self.dtype}"

else:

shape = " x ".join(pprint_thing(s) for s in self.shape)
result = "{name}: {index}, {shape}, dtype: {dtype}".format(
name=name,
index=pprint_thing(self.mgr_locs.indexer),
shape=shape,
dtype=self.dtype,
result = (
f"{name}: {pprint_thing(self.mgr_locs.indexer)}, "
f"{shape}, dtype: {self.dtype}"
)

return result
Expand Down Expand Up @@ -329,7 +326,7 @@ def ftype(self):
dtype = self.dtype.subtype
else:
dtype = self.dtype
return "{dtype}:{ftype}".format(dtype=dtype, ftype=self._ftype)
return f"{dtype}:{self._ftype}"

def merge(self, other):
return _merge_blocks([self, other])
Expand Down Expand Up @@ -544,15 +541,15 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"):

if errors not in errors_legal_values:
invalid_arg = (
"Expected value of kwarg 'errors' to be one of {}. "
"Supplied value is '{}'".format(list(errors_legal_values), errors)
f"Expected value of kwarg 'errors' to be one of "
f"{list(errors_legal_values)}. Supplied value is '{errors}'"
)
raise ValueError(invalid_arg)

if inspect.isclass(dtype) and issubclass(dtype, ExtensionDtype):
msg = (
"Expected an instance of {}, but got the class instead. "
"Try instantiating 'dtype'.".format(dtype.__name__)
f"Expected an instance of {dtype.__name__}, "
f"but got the class instead. Try instantiating 'dtype'."
)
raise TypeError(msg)

Expand Down Expand Up @@ -613,15 +610,9 @@ def astype(self, dtype, copy: bool = False, errors: str = "raise"):
if newb.is_numeric and self.is_numeric:
if newb.shape != self.shape:
raise TypeError(
"cannot set astype for copy = [{copy}] for dtype "
"({dtype} [{shape}]) to different shape "
"({newb_dtype} [{newb_shape}])".format(
copy=copy,
dtype=self.dtype.name,
shape=self.shape,
newb_dtype=newb.dtype.name,
newb_shape=newb.shape,
)
f"cannot set astype for copy = [{copy}] for dtype "
f"({self.dtype.name} [{self.shape}]) to different shape "
f"({newb.dtype.name} [{newb.shape}])"
)
return newb

Expand Down Expand Up @@ -658,7 +649,7 @@ def to_native_types(self, slicer=None, na_rep="nan", quoting=None, **kwargs):

if not self.is_object and not quoting:
itemsize = writers.word_len(na_rep)
values = values.astype("<U{size}".format(size=itemsize))
values = values.astype(f"<U{itemsize}")
else:
values = np.array(values, dtype="object")

Expand Down Expand Up @@ -1045,8 +1036,7 @@ def coerce_to_target_dtype(self, other):
return self.astype(object)

raise AssertionError(
"possible recursion in "
"coerce_to_target_dtype: {} {}".format(self, other)
f"possible recursion in coerce_to_target_dtype: {self} {other}"
)

elif self.is_timedelta or is_timedelta64_dtype(dtype):
Expand All @@ -1056,8 +1046,7 @@ def coerce_to_target_dtype(self, other):
return self.astype(object)

raise AssertionError(
"possible recursion in "
"coerce_to_target_dtype: {} {}".format(self, other)
f"possible recursion in coerce_to_target_dtype: {self} {other}"
)

try:
Expand Down Expand Up @@ -1202,8 +1191,7 @@ def _interpolate(
if method in ("krogh", "piecewise_polynomial", "pchip"):
if not index.is_monotonic:
raise ValueError(
"{0} interpolation requires that the "
"index be monotonic.".format(method)
f"{method} interpolation requires that the index be monotonic."
)
# process 1-d slices in the axis direction

Expand Down Expand Up @@ -1585,15 +1573,15 @@ def iget(self, col):
if self.ndim == 2 and isinstance(col, tuple):
col, loc = col
if not com.is_null_slice(col) and col != 0:
raise IndexError("{0} only contains one item".format(self))
raise IndexError(f"{self} only contains one item")
elif isinstance(col, slice):
if col != slice(None):
raise NotImplementedError(col)
return self.values[[loc]]
return self.values[loc]
else:
if col != 0:
raise IndexError("{0} only contains one item".format(self))
raise IndexError(f"{self} only contains one item")
return self.values

def should_store(self, value):
Expand Down Expand Up @@ -2312,7 +2300,7 @@ def _slice(self, slicer):
if isinstance(slicer, tuple):
col, loc = slicer
if not com.is_null_slice(col) and col != 0:
raise IndexError("{0} only contains one item".format(self))
raise IndexError(f"{self} only contains one item")
return self.values[loc]
return self.values[slicer]

Expand Down
13 changes: 6 additions & 7 deletions pandas/core/internals/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,8 +167,7 @@ def init_ndarray(values, index, columns, dtype=None, copy=False):
except Exception as orig:
# e.g. ValueError when trying to cast object dtype to float64
raise ValueError(
"failed to cast to '{dtype}' (Exception "
"was: {orig})".format(dtype=dtype, orig=orig)
f"failed to cast to '{dtype}' (Exception " f"was: {orig})"
) from orig

index, columns = _get_axes(*values.shape, index=index, columns=columns)
Expand Down Expand Up @@ -365,8 +364,8 @@ def extract_index(data):
if have_series:
if lengths[0] != len(index):
msg = (
"array length {length} does not match index "
"length {idx_len}".format(length=lengths[0], idx_len=len(index))
f"array length {lengths[0]} does not match index "
f"length {len(index)}"
)
raise ValueError(msg)
else:
Expand Down Expand Up @@ -401,7 +400,7 @@ def get_names_from_index(data):
if n is not None:
index[i] = n
else:
index[i] = "Unnamed {count}".format(count=count)
index[i] = f"Unnamed {count}"
count += 1

return index
Expand Down Expand Up @@ -571,8 +570,8 @@ def _convert_object_array(content, columns, coerce_float=False, dtype=None):
if len(columns) != len(content): # pragma: no cover
# caller's responsibility to check for this...
raise AssertionError(
"{col:d} columns passed, passed data had "
"{con} columns".format(col=len(columns), con=len(content))
f"{len(columns)} columns passed, passed data had "
f"{len(content)} columns"
)

# provide soft conversion of object dtypes
Expand Down
28 changes: 13 additions & 15 deletions pandas/core/internals/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,8 @@ def __init__(
for block in blocks:
if self.ndim != block.ndim:
raise AssertionError(
"Number of Block dimensions ({block}) must equal "
"number of axes ({self})".format(block=block.ndim, self=self.ndim)
f"Number of Block dimensions ({block.ndim}) must equal "
f"number of axes ({self.ndim})"
)

if do_integrity_check:
Expand Down Expand Up @@ -176,8 +176,8 @@ def set_axis(self, axis, new_labels):

if new_len != old_len:
raise ValueError(
"Length mismatch: Expected axis has {old} elements, new "
"values have {new} elements".format(old=old_len, new=new_len)
f"Length mismatch: Expected axis has {old_len} elements, new "
f"values have {new_len} elements"
)

self.axes[axis] = new_labels
Expand Down Expand Up @@ -319,12 +319,12 @@ def __repr__(self) -> str:
output = type(self).__name__
for i, ax in enumerate(self.axes):
if i == 0:
output += "\nItems: {ax}".format(ax=ax)
output += f"\nItems: {ax}"
else:
output += "\nAxis {i}: {ax}".format(i=i, ax=ax)
output += f"\nAxis {i}: {ax}"

for block in self.blocks:
output += "\n{block}".format(block=pprint_thing(block))
output += f"\n{pprint_thing(block)}"
return output

def _verify_integrity(self):
Expand All @@ -336,8 +336,8 @@ def _verify_integrity(self):
if len(self.items) != tot_items:
raise AssertionError(
"Number of manager items must equal union of "
"block items\n# manager items: {0}, # "
"tot_items: {1}".format(len(self.items), tot_items)
f"block items\n# manager items: {len(self.items)}, # "
f"tot_items: {tot_items}"
)

def apply(self, f: str, filter=None, **kwargs):
Expand Down Expand Up @@ -1140,7 +1140,7 @@ def insert(self, loc: int, item, value, allow_duplicates: bool = False):
"""
if not allow_duplicates and item in self.items:
# Should this be a different kind of error??
raise ValueError("cannot insert {}, already exists".format(item))
raise ValueError(f"cannot insert {item}, already exists")

if not isinstance(loc, int):
raise TypeError("loc must be int")
Expand Down Expand Up @@ -1661,9 +1661,7 @@ def construction_error(tot_items, block_shape, axes, e=None):
raise e
if block_shape[0] == 0:
raise ValueError("Empty data passed with indices specified.")
raise ValueError(
"Shape of passed values is {0}, indices imply {1}".format(passed, implied)
)
raise ValueError(f"Shape of passed values is {passed}, indices imply {implied}")


# -----------------------------------------------------------------------
Expand Down Expand Up @@ -1899,10 +1897,10 @@ def _compare_or_regex_search(a, b, regex=False):
type_names = [type(a).__name__, type(b).__name__]

if is_a_array:
type_names[0] = "ndarray(dtype={dtype})".format(dtype=a.dtype)
type_names[0] = f"ndarray(dtype={a.dtype})"

if is_b_array:
type_names[1] = "ndarray(dtype={dtype})".format(dtype=b.dtype)
type_names[1] = f"ndarray(dtype={b.dtype})"

raise TypeError(
f"Cannot compare types {repr(type_names[0])} and {repr(type_names[1])}"
Expand Down
Loading