Skip to content

Commit 3577b5a

Browse files
jbrockmendelWillAyd
authored andcommitted
CLN: use stdlib Iterator instead of BaseIterator (#30370)
1 parent f0cb545 commit 3577b5a

File tree

6 files changed

+18
-28
lines changed

6 files changed

+18
-28
lines changed

pandas/io/common.py

+4-15
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
import bz2
44
import codecs
5+
from collections.abc import Iterator
56
import gzip
67
from io import BufferedIOBase, BytesIO
78
import mmap
@@ -49,18 +50,6 @@
4950
_VALID_URLS.discard("")
5051

5152

52-
class BaseIterator:
53-
"""Subclass this and provide a "__next__()" method to obtain an iterator.
54-
Useful only when the object being iterated is non-reusable (e.g. OK for a
55-
parser, not for an in-memory table, yes for its iterator)."""
56-
57-
def __iter__(self) -> "BaseIterator":
58-
return self
59-
60-
def __next__(self):
61-
raise AbstractMethodError(self)
62-
63-
6453
def is_url(url) -> bool:
6554
"""
6655
Check to see if a URL has a valid protocol.
@@ -515,7 +504,7 @@ def closed(self):
515504
return self.fp is None
516505

517506

518-
class _MMapWrapper(BaseIterator):
507+
class _MMapWrapper(Iterator):
519508
"""
520509
Wrapper for the Python's mmap class so that it can be properly read in
521510
by Python's csv.reader class.
@@ -552,7 +541,7 @@ def __next__(self) -> str:
552541
return newline
553542

554543

555-
class UTF8Recoder(BaseIterator):
544+
class UTF8Recoder(Iterator):
556545
"""
557546
Iterator that reads an encoded stream and re-encodes the input to UTF-8
558547
"""
@@ -566,7 +555,7 @@ def read(self, bytes: int = -1) -> bytes:
566555
def readline(self) -> bytes:
567556
return self.reader.readline().encode("utf-8")
568557

569-
def next(self) -> bytes:
558+
def __next__(self) -> bytes:
570559
return next(self.reader).encode("utf-8")
571560

572561
def close(self):

pandas/io/json/_json.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from collections import OrderedDict
2+
from collections.abc import Iterator
23
import functools
34
from io import StringIO
45
from itertools import islice
@@ -19,7 +20,6 @@
1920
from pandas.core.reshape.concat import concat
2021

2122
from pandas.io.common import (
22-
BaseIterator,
2323
get_filepath_or_buffer,
2424
get_handle,
2525
infer_compression,
@@ -616,7 +616,7 @@ def read_json(
616616
return result
617617

618618

619-
class JsonReader(BaseIterator):
619+
class JsonReader(Iterator):
620620
"""
621621
JsonReader provides an interface for reading in a JSON file.
622622

pandas/io/parsers.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
"""
44

55
from collections import defaultdict
6+
from collections.abc import Iterator
67
import csv
78
import datetime
89
from io import StringIO
@@ -62,7 +63,6 @@
6263
from pandas.core.tools import datetimes as tools
6364

6465
from pandas.io.common import (
65-
BaseIterator,
6666
UTF8Recoder,
6767
get_filepath_or_buffer,
6868
get_handle,
@@ -786,7 +786,7 @@ def read_fwf(
786786
return _read(filepath_or_buffer, kwds)
787787

788788

789-
class TextFileReader(BaseIterator):
789+
class TextFileReader(Iterator):
790790
"""
791791
792792
Passed dialect overrides any of the related parser options
@@ -3582,7 +3582,7 @@ def _get_col_names(colspec, columns):
35823582
return colnames
35833583

35843584

3585-
class FixedWidthReader(BaseIterator):
3585+
class FixedWidthReader(Iterator):
35863586
"""
35873587
A reader of fixed-width lines.
35883588
"""

pandas/io/sas/sas7bdat.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
Reference for binary data compression:
1414
http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1992/9210/ross/ross.htm
1515
"""
16+
from collections.abc import Iterator
1617
from datetime import datetime
1718
import struct
1819

@@ -22,7 +23,7 @@
2223

2324
import pandas as pd
2425

25-
from pandas.io.common import BaseIterator, get_filepath_or_buffer
26+
from pandas.io.common import get_filepath_or_buffer
2627
from pandas.io.sas._sas import Parser
2728
import pandas.io.sas.sas_constants as const
2829

@@ -36,7 +37,7 @@ class _column:
3637

3738

3839
# SAS7BDAT represents a SAS data file in SAS7BDAT format.
39-
class SAS7BDATReader(BaseIterator):
40+
class SAS7BDATReader(Iterator):
4041
"""
4142
Read SAS files in SAS7BDAT format.
4243

pandas/io/sas/sas_xport.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
88
https://support.sas.com/techsup/technote/ts140.pdf
99
"""
10-
10+
from collections.abc import Iterator
1111
from datetime import datetime
1212
from io import BytesIO
1313
import struct
@@ -19,7 +19,7 @@
1919

2020
import pandas as pd
2121

22-
from pandas.io.common import BaseIterator, get_filepath_or_buffer
22+
from pandas.io.common import get_filepath_or_buffer
2323

2424
_correct_line1 = (
2525
"HEADER RECORD*******LIBRARY HEADER RECORD!!!!!!!"
@@ -251,7 +251,7 @@ def _parse_float_vec(vec):
251251
return ieee
252252

253253

254-
class XportReader(BaseIterator):
254+
class XportReader(Iterator):
255255
__doc__ = _xport_reader_doc
256256

257257
def __init__(

pandas/io/stata.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
You can find more information on http://presbrey.mit.edu/PyDTA and
1010
http://www.statsmodels.org/devel/
1111
"""
12-
12+
from collections.abc import Iterator
1313
import datetime
1414
from io import BytesIO
1515
import os
@@ -44,7 +44,7 @@
4444
from pandas.core.frame import DataFrame
4545
from pandas.core.series import Series
4646

47-
from pandas.io.common import BaseIterator, get_filepath_or_buffer, stringify_path
47+
from pandas.io.common import get_filepath_or_buffer, stringify_path
4848

4949
_version_error = (
5050
"Version of given Stata file is not 104, 105, 108, "
@@ -1010,7 +1010,7 @@ def __init__(self):
10101010
)
10111011

10121012

1013-
class StataReader(StataParser, BaseIterator):
1013+
class StataReader(StataParser, Iterator):
10141014
__doc__ = _stata_reader_doc
10151015

10161016
def __init__(

0 commit comments

Comments
 (0)