Skip to content

[MRG] f-string updates for issue #29547 #31556

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 21 commits into from
Feb 4, 2020
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 5 additions & 14 deletions pandas/core/reshape/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,8 +352,8 @@ def __init__(
for obj in objs:
if not isinstance(obj, (Series, DataFrame)):
msg = (
"cannot concatenate object of type '{typ}'; "
"only Series and DataFrame objs are valid".format(typ=type(obj))
f"cannot concatenate object of type '{type(obj)}'; "
"only Series and DataFrame objs are valid"
)
raise TypeError(msg)

Expand Down Expand Up @@ -403,8 +403,7 @@ def __init__(
self._is_series = isinstance(sample, ABCSeries)
if not 0 <= axis <= sample.ndim:
raise AssertionError(
"axis must be between 0 and {ndim}, input was "
"{axis}".format(ndim=sample.ndim, axis=axis)
f"axis must be between 0 and {sample.ndim}, input was {axis}"
)

# if we have mixed ndims, then convert to highest ndim
Expand Down Expand Up @@ -622,11 +621,7 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiInde
try:
i = level.get_loc(key)
except KeyError:
raise ValueError(
"Key {key!s} not in level {level!s}".format(
key=key, level=level
)
)
raise ValueError(f"Key {key!s} not in level {level!s}")

to_concat.append(np.repeat(i, len(index)))
codes_list.append(np.concatenate(to_concat))
Expand Down Expand Up @@ -677,11 +672,7 @@ def _make_concat_multiindex(indexes, keys, levels=None, names=None) -> MultiInde

mask = mapped == -1
if mask.any():
raise ValueError(
"Values not found in passed level: {hlevel!s}".format(
hlevel=hlevel[mask]
)
)
raise ValueError(f"Values not found in passed level: {hlevel[mask]!s}")

new_codes.append(np.repeat(mapped, n))

Expand Down
8 changes: 2 additions & 6 deletions pandas/core/reshape/melt.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,7 @@ def melt(
if len(frame.columns.names) == len(set(frame.columns.names)):
var_name = frame.columns.names
else:
var_name = [
"variable_{i}".format(i=i) for i in range(len(frame.columns.names))
]
var_name = [f"variable_{i}" for i in range(len(frame.columns.names))]
else:
var_name = [
frame.columns.name if frame.columns.name is not None else "variable"
Expand Down Expand Up @@ -417,9 +415,7 @@ def wide_to_long(
"""

def get_var_names(df, stub: str, sep: str, suffix: str) -> List[str]:
regex = r"^{stub}{sep}{suffix}$".format(
stub=re.escape(stub), sep=re.escape(sep), suffix=suffix
)
regex = fr"^{re.escape(stub)}{re.escape(sep)}{suffix}$"
pattern = re.compile(regex)
return [col for col in df.columns if pattern.match(col)]

Expand Down
68 changes: 32 additions & 36 deletions pandas/core/reshape/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -611,8 +611,9 @@ def __init__(
if _left.columns.nlevels != _right.columns.nlevels:
msg = (
"merging between different levels can give an unintended "
"result ({left} levels on the left, {right} on the right)"
).format(left=_left.columns.nlevels, right=_right.columns.nlevels)
f"result ({left.columns.nlevels} levels on the left,"
f"{right.columns.nlevels} on the right)"
)
warnings.warn(msg, UserWarning)

self._validate_specification()
Expand Down Expand Up @@ -679,7 +680,7 @@ def _indicator_pre_merge(
if i in columns:
raise ValueError(
"Cannot use `indicator=True` option when "
"data contains a column named {name}".format(name=i)
f"data contains a column named {i}"
)
if self.indicator_name in columns:
raise ValueError(
Expand Down Expand Up @@ -831,7 +832,7 @@ def _maybe_add_join_keys(self, result, left_indexer, right_indexer):
else:
result.index = Index(key_col, name=name)
else:
result.insert(i, name or "key_{i}".format(i=i), key_col)
result.insert(i, name or f"key_{i}", key_col)

def _get_join_indexers(self):
""" return the join indexers """
Expand Down Expand Up @@ -1185,13 +1186,10 @@ def _validate_specification(self):
if len(common_cols) == 0:
raise MergeError(
"No common columns to perform merge on. "
"Merge options: left_on={lon}, right_on={ron}, "
"left_index={lidx}, right_index={ridx}".format(
lon=self.left_on,
ron=self.right_on,
lidx=self.left_index,
ridx=self.right_index,
)
f"Merge options: left_on={self.left_on}, "
f"right_on={self.right_on}, "
f"left_index={self.left_index}, "
f"right_index={self.right_index}"
)
if not common_cols.is_unique:
raise MergeError(f"Data columns not unique: {repr(common_cols)}")
Expand Down Expand Up @@ -1486,12 +1484,12 @@ def get_result(self):


def _asof_function(direction: str):
name = "asof_join_{dir}".format(dir=direction)
name = f"asof_join_{direction}"
return getattr(libjoin, name, None)


def _asof_by_function(direction: str):
name = "asof_join_{dir}_on_X_by_Y".format(dir=direction)
name = f"asof_join_{direction}_on_X_by_Y"
return getattr(libjoin, name, None)


Expand Down Expand Up @@ -1601,9 +1599,7 @@ def _validate_specification(self):

# check 'direction' is valid
if self.direction not in ["backward", "forward", "nearest"]:
raise MergeError(
"direction invalid: {direction}".format(direction=self.direction)
)
raise MergeError(f"direction invalid: {self.direction}")

@property
def _asof_key(self):
Expand All @@ -1628,17 +1624,13 @@ def _get_merge_keys(self):
# later with a ValueError, so we don't *need* to check
# for them here.
msg = (
"incompatible merge keys [{i}] {lkdtype} and "
"{rkdtype}, both sides category, but not equal ones".format(
i=i, lkdtype=repr(lk.dtype), rkdtype=repr(rk.dtype)
)
f"incompatible merge keys [{i}] {repr(lk.dtype)} and "
f"{repr(rk.dtype)}, both sides category, but not equal ones"
)
else:
msg = (
"incompatible merge keys [{i}] {lkdtype} and "
"{rkdtype}, must be the same type".format(
i=i, lkdtype=repr(lk.dtype), rkdtype=repr(rk.dtype)
)
f"incompatible merge keys [{i}] {repr(lk.dtype)} and "
f"{repr(rk.dtype)}, must be the same type"
)
raise MergeError(msg)

Expand All @@ -1651,10 +1643,8 @@ def _get_merge_keys(self):
lt = left_join_keys[-1]

msg = (
"incompatible tolerance {tolerance}, must be compat "
"with type {lkdtype}".format(
tolerance=type(self.tolerance), lkdtype=repr(lt.dtype)
)
f"incompatible tolerance {self.tolerance}, must be compat "
f"with type {repr(lk.dtype)}"
)

if needs_i8_conversion(lt):
Expand All @@ -1680,8 +1670,11 @@ def _get_merge_keys(self):

# validate allow_exact_matches
if not is_bool(self.allow_exact_matches):
msg = "allow_exact_matches must be boolean, passed {passed}"
raise MergeError(msg.format(passed=self.allow_exact_matches))
msg = (
f"allow_exact_matches must be boolean, "
f"passed {self.allow_exact_matches}"
)
raise MergeError(msg)

return left_join_keys, right_join_keys, join_names

Expand All @@ -1708,20 +1701,23 @@ def flip(xs):
tolerance = self.tolerance

# we require sortedness and non-null values in the join keys
msg_sorted = "{side} keys must be sorted"
msg_missings = "Merge keys contain null values on {side} side"
def _msg_sorted(side):
return f"{side} keys must be sorted"

def _msg_missings(side):
return f"Merge keys contain null values on {side} side"

if not Index(left_values).is_monotonic:
if isna(left_values).any():
raise ValueError(msg_missings.format(side="left"))
raise ValueError(_msg_missings(side="left"))
else:
raise ValueError(msg_sorted.format(side="left"))
raise ValueError(_msg_sorted(side="left"))

if not Index(right_values).is_monotonic:
if isna(right_values).any():
raise ValueError(msg_missings.format(side="right"))
raise ValueError(_msg_missings(side="right"))
else:
raise ValueError(msg_sorted.format(side="right"))
raise ValueError(_msg_sorted(side="right"))

# initial type conversion as needed
if needs_i8_conversion(left_values):
Expand Down
8 changes: 3 additions & 5 deletions pandas/core/reshape/pivot.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def _add_margins(
if not isinstance(margins_name, str):
raise ValueError("margins_name argument must be a string")

msg = 'Conflicting name "{name}" in margins'.format(name=margins_name)
msg = f'Conflicting name "{margins_name}" in margins'
for level in table.index.names:
if margins_name in table.index.get_level_values(level):
raise ValueError(msg)
Expand Down Expand Up @@ -650,9 +650,7 @@ def _normalize(table, normalize, margins: bool, margins_name="All"):
if (margins_name not in table.iloc[-1, :].name) | (
margins_name != table.iloc[:, -1].name
):
raise ValueError(
"{mname} not in pivoted DataFrame".format(mname=margins_name)
)
raise ValueError(f"{margins_name} not in pivoted DataFrame")
column_margin = table.iloc[:-1, -1]
index_margin = table.iloc[-1, :-1]

Expand Down Expand Up @@ -702,7 +700,7 @@ def _get_names(arrs, names, prefix: str = "row"):
if isinstance(arr, ABCSeries) and arr.name is not None:
names.append(arr.name)
else:
names.append("{prefix}_{i}".format(prefix=prefix, i=i))
names.append(f"{prefix}_{i}")
else:
if len(names) != len(arrs):
raise AssertionError("arrays and names must have the same length")
Expand Down
13 changes: 5 additions & 8 deletions pandas/core/reshape/reshape.py
Original file line number Diff line number Diff line change
Expand Up @@ -873,15 +873,13 @@ def get_dummies(

# validate prefixes and separator to avoid silently dropping cols
def check_len(item, name):
len_msg = (
"Length of '{name}' ({len_item}) did not match the "
"length of the columns being encoded ({len_enc})."
)

if is_list_like(item):
if not len(item) == data_to_encode.shape[1]:
len_msg = len_msg.format(
name=name, len_item=len(item), len_enc=data_to_encode.shape[1]
len_msg = (
f"Length of '{name}' ({len(item)}) did not match the "
"length of the columns being encoded "
f"({data_to_encode.shape[1]})."
)
raise ValueError(len_msg)

Expand Down Expand Up @@ -990,8 +988,7 @@ def get_empty_frame(data) -> DataFrame:

# PY2 embedded unicode, gh-22084
def _make_col_name(prefix, prefix_sep, level) -> str:
fstr = "{prefix}{prefix_sep}{level}"
return fstr.format(prefix=prefix, prefix_sep=prefix_sep, level=level)
return f"{prefix}{prefix_sep}{level}"

dummy_cols = [_make_col_name(prefix, prefix_sep, level) for level in levels]

Expand Down
8 changes: 3 additions & 5 deletions pandas/tests/reshape/merge/test_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,11 +370,9 @@ def test_no_overlap_more_informative_error(self):
df2 = DataFrame({"y": ["b", "c"]}, index=[dt, dt])

msg = (
"No common columns to perform merge on. "
"Merge options: left_on={lon}, right_on={ron}, "
"left_index={lidx}, right_index={ridx}".format(
lon=None, ron=None, lidx=False, ridx=False
)
f"No common columns to perform merge on. "
f"Merge options: left_on={None}, right_on={None}, "
f"left_index={False}, right_index={False}"
)

with pytest.raises(MergeError, match=msg):
Expand Down