Skip to content

CLN: Put exit_stack inside _query_iterator. #51125

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Feb 3, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
132 changes: 62 additions & 70 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,6 @@
# -- Helper functions


def _cleanup_after_generator(generator, exit_stack: ExitStack):
"""Does the cleanup after iterating through the generator."""
try:
yield from generator
finally:
exit_stack.close()


def _convert_params(sql, params):
"""Convert SQL and params args to DBAPI2.0 compliant format."""
args = [sql]
Expand Down Expand Up @@ -1093,6 +1085,7 @@ def insert(
def _query_iterator(
self,
result,
exit_stack: ExitStack,
chunksize: str | None,
columns,
coerce_float: bool = True,
Expand All @@ -1101,28 +1094,29 @@ def _query_iterator(
):
"""Return generator through chunked result set."""
has_read_data = False
while True:
data = result.fetchmany(chunksize)
if not data:
if not has_read_data:
yield DataFrame.from_records(
[], columns=columns, coerce_float=coerce_float
)
break
with exit_stack:
while True:
data = result.fetchmany(chunksize)
if not data:
if not has_read_data:
yield DataFrame.from_records(
[], columns=columns, coerce_float=coerce_float
)
break

has_read_data = True
self.frame = _convert_arrays_to_dataframe(
data, columns, coerce_float, use_nullable_dtypes
)
has_read_data = True
self.frame = _convert_arrays_to_dataframe(
data, columns, coerce_float, use_nullable_dtypes
)

self._harmonize_columns(
parse_dates=parse_dates, use_nullable_dtypes=use_nullable_dtypes
)
self._harmonize_columns(
parse_dates=parse_dates, use_nullable_dtypes=use_nullable_dtypes
)

if self.index is not None:
self.frame.set_index(self.index, inplace=True)
if self.index is not None:
self.frame.set_index(self.index, inplace=True)

yield self.frame
yield self.frame

def read(
self,
Expand All @@ -1147,16 +1141,14 @@ def read(
column_names = result.keys()

if chunksize is not None:
return _cleanup_after_generator(
self._query_iterator(
result,
chunksize,
column_names,
coerce_float=coerce_float,
parse_dates=parse_dates,
use_nullable_dtypes=use_nullable_dtypes,
),
return self._query_iterator(
result,
exit_stack,
chunksize,
column_names,
coerce_float=coerce_float,
parse_dates=parse_dates,
use_nullable_dtypes=use_nullable_dtypes,
)
else:
data = result.fetchall()
Expand Down Expand Up @@ -1693,6 +1685,7 @@ def read_table(
@staticmethod
def _query_iterator(
result,
exit_stack: ExitStack,
chunksize: int,
columns,
index_col=None,
Expand All @@ -1703,31 +1696,32 @@ def _query_iterator(
):
"""Return generator through chunked result set"""
has_read_data = False
while True:
data = result.fetchmany(chunksize)
if not data:
if not has_read_data:
yield _wrap_result(
[],
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
)
break
with exit_stack:
while True:
data = result.fetchmany(chunksize)
if not data:
if not has_read_data:
yield _wrap_result(
[],
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
)
break

has_read_data = True
yield _wrap_result(
data,
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
)
has_read_data = True
yield _wrap_result(
data,
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
)

def read_query(
self,
Expand Down Expand Up @@ -1793,18 +1787,16 @@ def read_query(

if chunksize is not None:
self.returns_generator = True
return _cleanup_after_generator(
self._query_iterator(
result,
chunksize,
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
),
return self._query_iterator(
result,
self.exit_stack,
chunksize,
columns,
index_col=index_col,
coerce_float=coerce_float,
parse_dates=parse_dates,
dtype=dtype,
use_nullable_dtypes=use_nullable_dtypes,
)
else:
data = result.fetchall()
Expand Down