Skip to content

Commit f896dc7

Browse files
committed
reformat
1 parent c60a123 commit f896dc7

File tree

3 files changed

+47
-27
lines changed

3 files changed

+47
-27
lines changed

pandas/core/tools/datetimes.py

+22-10
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,9 @@ def _guess_datetime_format_for_array(arr, **kwargs):
6868
return _guess_datetime_format(arr[non_nan_elements[0]], **kwargs)
6969

7070

71-
def should_cache(arg: ArrayConvertible, unique_share: float = 0.7,
72-
check_count: Optional[int] = None) -> bool:
71+
def should_cache(
72+
arg: ArrayConvertible, unique_share: float = 0.7, check_count: Optional[int] = None
73+
) -> bool:
7374
"""
7475
Decides whether to do caching.
7576
@@ -109,12 +110,13 @@ def should_cache(arg: ArrayConvertible, unique_share: float = 0.7,
109110
else:
110111
check_count = 500
111112
else:
112-
assert 0 <= check_count <= len(arg), \
113-
'check_count must be in next bounds: [0; len(arg)]'
113+
assert (
114+
0 <= check_count <= len(arg)
115+
), "check_count must be in next bounds: [0; len(arg)]"
114116
if check_count == 0:
115117
return False
116118

117-
assert 0 < unique_share < 1, 'unique_share must be in next bounds: (0; 1)'
119+
assert 0 < unique_share < 1, "unique_share must be in next bounds: (0; 1)"
118120

119121
unique_elements = unique(arg[:check_count])
120122
if len(unique_elements) > check_count * unique_share:
@@ -554,11 +556,21 @@ def _adjust_to_origin(arg, origin, unit):
554556
return arg
555557

556558

557-
@deprecate_kwarg(old_arg_name='box', new_arg_name=None)
558-
def to_datetime(arg, errors='raise', dayfirst=False, yearfirst=False,
559-
utc=None, box=True, format=None, exact=True,
560-
unit=None, infer_datetime_format=False, origin='unix',
561-
cache=True):
559+
@deprecate_kwarg(old_arg_name="box", new_arg_name=None)
560+
def to_datetime(
561+
arg,
562+
errors="raise",
563+
dayfirst=False,
564+
yearfirst=False,
565+
utc=None,
566+
box=True,
567+
format=None,
568+
exact=True,
569+
unit=None,
570+
infer_datetime_format=False,
571+
origin="unix",
572+
cache=True,
573+
):
562574
"""
563575
Convert argument to datetime.
564576

pandas/tests/indexes/datetimes/test_tools.py

+15-10
Original file line numberDiff line numberDiff line change
@@ -2243,19 +2243,24 @@ def test_arg_tz_ns_unit(self, offset, utc, exp):
22432243
tm.assert_index_equal(result, expected)
22442244

22452245

2246-
@pytest.mark.parametrize('listlike,do_caching', [
2247-
([1, 2, 3, 4, 5, 6, 7, 8, 9, 0], False),
2248-
([1, 1, 1, 1, 4, 5, 6, 7, 8, 9], True)
2249-
])
2246+
@pytest.mark.parametrize(
2247+
"listlike,do_caching",
2248+
[([1, 2, 3, 4, 5, 6, 7, 8, 9, 0], False), ([1, 1, 1, 1, 4, 5, 6, 7, 8, 9], True)],
2249+
)
22502250
def test_should_cache(listlike, do_caching):
2251-
assert tools.should_cache(listlike, check_count=len(listlike),
2252-
unique_share=0.7) == do_caching
2251+
assert (
2252+
tools.should_cache(listlike, check_count=len(listlike), unique_share=0.7)
2253+
== do_caching
2254+
)
22532255

22542256

2255-
@pytest.mark.parametrize('unique_share,check_count, err_message', [
2256-
(0.5, 11, r'check_count must be in next bounds: \[0; len\(arg\)\]'),
2257-
(10, 2, r'unique_share must be in next bounds: \(0; 1\)')
2258-
])
2257+
@pytest.mark.parametrize(
2258+
"unique_share,check_count, err_message",
2259+
[
2260+
(0.5, 11, r"check_count must be in next bounds: \[0; len\(arg\)\]"),
2261+
(10, 2, r"unique_share must be in next bounds: \(0; 1\)"),
2262+
],
2263+
)
22592264
def test_should_cache_errors(unique_share, check_count, err_message):
22602265
arg = [5] * 10
22612266

pandas/tests/io/parser/test_parse_dates.py

+10-7
Original file line numberDiff line numberDiff line change
@@ -1096,18 +1096,21 @@ def test_read_with_parse_dates_invalid_type(all_parsers, parse_dates):
10961096

10971097

10981098
@pytest.mark.parametrize("cache_dates", [True, False])
1099-
@pytest.mark.parametrize("value", [
1100-
'nan', '0', ''])
1099+
@pytest.mark.parametrize("value", ["nan", "0", ""])
11011100
def test_bad_date_parse(all_parsers, cache_dates, value):
11021101
# if we have an invalid date make sure that we handle this with
11031102
# and w/o the cache properly
11041103
parser = all_parsers
1105-
s = StringIO(('%s,\n' % value) * 50000)
1104+
s = StringIO(("%s,\n" % value) * 50000)
11061105

1107-
parser.read_csv(s,
1108-
header=None, names=['foo', 'bar'], parse_dates=['foo'],
1109-
infer_datetime_format=False,
1110-
cache_dates=cache_dates)
1106+
parser.read_csv(
1107+
s,
1108+
header=None,
1109+
names=["foo", "bar"],
1110+
parse_dates=["foo"],
1111+
infer_datetime_format=False,
1112+
cache_dates=cache_dates,
1113+
)
11111114

11121115

11131116
def test_parse_dates_empty_string(all_parsers):

0 commit comments

Comments
 (0)