Skip to content

Commit 1539aed

Browse files
cdcadmanChuck Cadman
and
Chuck Cadman
authored
CLN: Put exit_stack inside _query_iterator. (#51125)
Co-authored-by: Chuck Cadman <charles.cadman@standard.com>
1 parent 92e2379 commit 1539aed

File tree

1 file changed

+62
-70
lines changed

1 file changed

+62
-70
lines changed

pandas/io/sql.py

Lines changed: 62 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -75,14 +75,6 @@
7575
# -- Helper functions
7676

7777

78-
def _cleanup_after_generator(generator, exit_stack: ExitStack):
79-
"""Does the cleanup after iterating through the generator."""
80-
try:
81-
yield from generator
82-
finally:
83-
exit_stack.close()
84-
85-
8678
def _convert_params(sql, params):
8779
"""Convert SQL and params args to DBAPI2.0 compliant format."""
8880
args = [sql]
@@ -1093,6 +1085,7 @@ def insert(
10931085
def _query_iterator(
10941086
self,
10951087
result,
1088+
exit_stack: ExitStack,
10961089
chunksize: str | None,
10971090
columns,
10981091
coerce_float: bool = True,
@@ -1101,28 +1094,29 @@ def _query_iterator(
11011094
):
11021095
"""Return generator through chunked result set."""
11031096
has_read_data = False
1104-
while True:
1105-
data = result.fetchmany(chunksize)
1106-
if not data:
1107-
if not has_read_data:
1108-
yield DataFrame.from_records(
1109-
[], columns=columns, coerce_float=coerce_float
1110-
)
1111-
break
1097+
with exit_stack:
1098+
while True:
1099+
data = result.fetchmany(chunksize)
1100+
if not data:
1101+
if not has_read_data:
1102+
yield DataFrame.from_records(
1103+
[], columns=columns, coerce_float=coerce_float
1104+
)
1105+
break
11121106

1113-
has_read_data = True
1114-
self.frame = _convert_arrays_to_dataframe(
1115-
data, columns, coerce_float, use_nullable_dtypes
1116-
)
1107+
has_read_data = True
1108+
self.frame = _convert_arrays_to_dataframe(
1109+
data, columns, coerce_float, use_nullable_dtypes
1110+
)
11171111

1118-
self._harmonize_columns(
1119-
parse_dates=parse_dates, use_nullable_dtypes=use_nullable_dtypes
1120-
)
1112+
self._harmonize_columns(
1113+
parse_dates=parse_dates, use_nullable_dtypes=use_nullable_dtypes
1114+
)
11211115

1122-
if self.index is not None:
1123-
self.frame.set_index(self.index, inplace=True)
1116+
if self.index is not None:
1117+
self.frame.set_index(self.index, inplace=True)
11241118

1125-
yield self.frame
1119+
yield self.frame
11261120

11271121
def read(
11281122
self,
@@ -1147,16 +1141,14 @@ def read(
11471141
column_names = result.keys()
11481142

11491143
if chunksize is not None:
1150-
return _cleanup_after_generator(
1151-
self._query_iterator(
1152-
result,
1153-
chunksize,
1154-
column_names,
1155-
coerce_float=coerce_float,
1156-
parse_dates=parse_dates,
1157-
use_nullable_dtypes=use_nullable_dtypes,
1158-
),
1144+
return self._query_iterator(
1145+
result,
11591146
exit_stack,
1147+
chunksize,
1148+
column_names,
1149+
coerce_float=coerce_float,
1150+
parse_dates=parse_dates,
1151+
use_nullable_dtypes=use_nullable_dtypes,
11601152
)
11611153
else:
11621154
data = result.fetchall()
@@ -1693,6 +1685,7 @@ def read_table(
16931685
@staticmethod
16941686
def _query_iterator(
16951687
result,
1688+
exit_stack: ExitStack,
16961689
chunksize: int,
16971690
columns,
16981691
index_col=None,
@@ -1703,31 +1696,32 @@ def _query_iterator(
17031696
):
17041697
"""Return generator through chunked result set"""
17051698
has_read_data = False
1706-
while True:
1707-
data = result.fetchmany(chunksize)
1708-
if not data:
1709-
if not has_read_data:
1710-
yield _wrap_result(
1711-
[],
1712-
columns,
1713-
index_col=index_col,
1714-
coerce_float=coerce_float,
1715-
parse_dates=parse_dates,
1716-
dtype=dtype,
1717-
use_nullable_dtypes=use_nullable_dtypes,
1718-
)
1719-
break
1699+
with exit_stack:
1700+
while True:
1701+
data = result.fetchmany(chunksize)
1702+
if not data:
1703+
if not has_read_data:
1704+
yield _wrap_result(
1705+
[],
1706+
columns,
1707+
index_col=index_col,
1708+
coerce_float=coerce_float,
1709+
parse_dates=parse_dates,
1710+
dtype=dtype,
1711+
use_nullable_dtypes=use_nullable_dtypes,
1712+
)
1713+
break
17201714

1721-
has_read_data = True
1722-
yield _wrap_result(
1723-
data,
1724-
columns,
1725-
index_col=index_col,
1726-
coerce_float=coerce_float,
1727-
parse_dates=parse_dates,
1728-
dtype=dtype,
1729-
use_nullable_dtypes=use_nullable_dtypes,
1730-
)
1715+
has_read_data = True
1716+
yield _wrap_result(
1717+
data,
1718+
columns,
1719+
index_col=index_col,
1720+
coerce_float=coerce_float,
1721+
parse_dates=parse_dates,
1722+
dtype=dtype,
1723+
use_nullable_dtypes=use_nullable_dtypes,
1724+
)
17311725

17321726
def read_query(
17331727
self,
@@ -1793,18 +1787,16 @@ def read_query(
17931787

17941788
if chunksize is not None:
17951789
self.returns_generator = True
1796-
return _cleanup_after_generator(
1797-
self._query_iterator(
1798-
result,
1799-
chunksize,
1800-
columns,
1801-
index_col=index_col,
1802-
coerce_float=coerce_float,
1803-
parse_dates=parse_dates,
1804-
dtype=dtype,
1805-
use_nullable_dtypes=use_nullable_dtypes,
1806-
),
1790+
return self._query_iterator(
1791+
result,
18071792
self.exit_stack,
1793+
chunksize,
1794+
columns,
1795+
index_col=index_col,
1796+
coerce_float=coerce_float,
1797+
parse_dates=parse_dates,
1798+
dtype=dtype,
1799+
use_nullable_dtypes=use_nullable_dtypes,
18081800
)
18091801
else:
18101802
data = result.fetchall()

0 commit comments

Comments
 (0)