75
75
# -- Helper functions
76
76
77
77
78
- def _cleanup_after_generator (generator , exit_stack : ExitStack ):
79
- """Does the cleanup after iterating through the generator."""
80
- try :
81
- yield from generator
82
- finally :
83
- exit_stack .close ()
84
-
85
-
86
78
def _convert_params (sql , params ):
87
79
"""Convert SQL and params args to DBAPI2.0 compliant format."""
88
80
args = [sql ]
@@ -1093,6 +1085,7 @@ def insert(
1093
1085
def _query_iterator (
1094
1086
self ,
1095
1087
result ,
1088
+ exit_stack : ExitStack ,
1096
1089
chunksize : str | None ,
1097
1090
columns ,
1098
1091
coerce_float : bool = True ,
@@ -1101,28 +1094,29 @@ def _query_iterator(
1101
1094
):
1102
1095
"""Return generator through chunked result set."""
1103
1096
has_read_data = False
1104
- while True :
1105
- data = result .fetchmany (chunksize )
1106
- if not data :
1107
- if not has_read_data :
1108
- yield DataFrame .from_records (
1109
- [], columns = columns , coerce_float = coerce_float
1110
- )
1111
- break
1097
+ with exit_stack :
1098
+ while True :
1099
+ data = result .fetchmany (chunksize )
1100
+ if not data :
1101
+ if not has_read_data :
1102
+ yield DataFrame .from_records (
1103
+ [], columns = columns , coerce_float = coerce_float
1104
+ )
1105
+ break
1112
1106
1113
- has_read_data = True
1114
- self .frame = _convert_arrays_to_dataframe (
1115
- data , columns , coerce_float , use_nullable_dtypes
1116
- )
1107
+ has_read_data = True
1108
+ self .frame = _convert_arrays_to_dataframe (
1109
+ data , columns , coerce_float , use_nullable_dtypes
1110
+ )
1117
1111
1118
- self ._harmonize_columns (
1119
- parse_dates = parse_dates , use_nullable_dtypes = use_nullable_dtypes
1120
- )
1112
+ self ._harmonize_columns (
1113
+ parse_dates = parse_dates , use_nullable_dtypes = use_nullable_dtypes
1114
+ )
1121
1115
1122
- if self .index is not None :
1123
- self .frame .set_index (self .index , inplace = True )
1116
+ if self .index is not None :
1117
+ self .frame .set_index (self .index , inplace = True )
1124
1118
1125
- yield self .frame
1119
+ yield self .frame
1126
1120
1127
1121
def read (
1128
1122
self ,
@@ -1147,16 +1141,14 @@ def read(
1147
1141
column_names = result .keys ()
1148
1142
1149
1143
if chunksize is not None :
1150
- return _cleanup_after_generator (
1151
- self ._query_iterator (
1152
- result ,
1153
- chunksize ,
1154
- column_names ,
1155
- coerce_float = coerce_float ,
1156
- parse_dates = parse_dates ,
1157
- use_nullable_dtypes = use_nullable_dtypes ,
1158
- ),
1144
+ return self ._query_iterator (
1145
+ result ,
1159
1146
exit_stack ,
1147
+ chunksize ,
1148
+ column_names ,
1149
+ coerce_float = coerce_float ,
1150
+ parse_dates = parse_dates ,
1151
+ use_nullable_dtypes = use_nullable_dtypes ,
1160
1152
)
1161
1153
else :
1162
1154
data = result .fetchall ()
@@ -1693,6 +1685,7 @@ def read_table(
1693
1685
@staticmethod
1694
1686
def _query_iterator (
1695
1687
result ,
1688
+ exit_stack : ExitStack ,
1696
1689
chunksize : int ,
1697
1690
columns ,
1698
1691
index_col = None ,
@@ -1703,31 +1696,32 @@ def _query_iterator(
1703
1696
):
1704
1697
"""Return generator through chunked result set"""
1705
1698
has_read_data = False
1706
- while True :
1707
- data = result .fetchmany (chunksize )
1708
- if not data :
1709
- if not has_read_data :
1710
- yield _wrap_result (
1711
- [],
1712
- columns ,
1713
- index_col = index_col ,
1714
- coerce_float = coerce_float ,
1715
- parse_dates = parse_dates ,
1716
- dtype = dtype ,
1717
- use_nullable_dtypes = use_nullable_dtypes ,
1718
- )
1719
- break
1699
+ with exit_stack :
1700
+ while True :
1701
+ data = result .fetchmany (chunksize )
1702
+ if not data :
1703
+ if not has_read_data :
1704
+ yield _wrap_result (
1705
+ [],
1706
+ columns ,
1707
+ index_col = index_col ,
1708
+ coerce_float = coerce_float ,
1709
+ parse_dates = parse_dates ,
1710
+ dtype = dtype ,
1711
+ use_nullable_dtypes = use_nullable_dtypes ,
1712
+ )
1713
+ break
1720
1714
1721
- has_read_data = True
1722
- yield _wrap_result (
1723
- data ,
1724
- columns ,
1725
- index_col = index_col ,
1726
- coerce_float = coerce_float ,
1727
- parse_dates = parse_dates ,
1728
- dtype = dtype ,
1729
- use_nullable_dtypes = use_nullable_dtypes ,
1730
- )
1715
+ has_read_data = True
1716
+ yield _wrap_result (
1717
+ data ,
1718
+ columns ,
1719
+ index_col = index_col ,
1720
+ coerce_float = coerce_float ,
1721
+ parse_dates = parse_dates ,
1722
+ dtype = dtype ,
1723
+ use_nullable_dtypes = use_nullable_dtypes ,
1724
+ )
1731
1725
1732
1726
def read_query (
1733
1727
self ,
@@ -1793,18 +1787,16 @@ def read_query(
1793
1787
1794
1788
if chunksize is not None :
1795
1789
self .returns_generator = True
1796
- return _cleanup_after_generator (
1797
- self ._query_iterator (
1798
- result ,
1799
- chunksize ,
1800
- columns ,
1801
- index_col = index_col ,
1802
- coerce_float = coerce_float ,
1803
- parse_dates = parse_dates ,
1804
- dtype = dtype ,
1805
- use_nullable_dtypes = use_nullable_dtypes ,
1806
- ),
1790
+ return self ._query_iterator (
1791
+ result ,
1807
1792
self .exit_stack ,
1793
+ chunksize ,
1794
+ columns ,
1795
+ index_col = index_col ,
1796
+ coerce_float = coerce_float ,
1797
+ parse_dates = parse_dates ,
1798
+ dtype = dtype ,
1799
+ use_nullable_dtypes = use_nullable_dtypes ,
1808
1800
)
1809
1801
else :
1810
1802
data = result .fetchall ()
0 commit comments