Skip to content

Commit 7be1d10

Browse files
authored
TST: Replace ensure_clean with temp_file in 3 files (#62556)
1 parent 38a16a7 commit 7be1d10

File tree

3 files changed

+37
-38
lines changed

3 files changed

+37
-38
lines changed

pandas/tests/io/parser/test_parse_dates.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ def test_date_col_as_index_col(all_parsers):
8686

8787

8888
@xfail_pyarrow
89-
def test_nat_parse(all_parsers):
89+
def test_nat_parse(all_parsers, temp_file):
9090
# see gh-3062
9191
parser = all_parsers
9292
df = DataFrame(
@@ -97,11 +97,11 @@ def test_nat_parse(all_parsers):
9797
)
9898
df.iloc[3:6, :] = np.nan
9999

100-
with tm.ensure_clean("__nat_parse_.csv") as path:
101-
df.to_csv(path)
100+
path = temp_file
101+
df.to_csv(path)
102102

103-
result = parser.read_csv(path, index_col=0, parse_dates=["B"])
104-
tm.assert_frame_equal(result, df)
103+
result = parser.read_csv(path, index_col=0, parse_dates=["B"])
104+
tm.assert_frame_equal(result, df)
105105

106106

107107
@skip_pyarrow

pandas/tests/io/parser/test_read_fwf.py

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
BytesIO,
99
StringIO,
1010
)
11-
from pathlib import Path
1211

1312
import numpy as np
1413
import pytest
@@ -642,7 +641,7 @@ def test_default_delimiter():
642641

643642

644643
@pytest.mark.parametrize("infer", [True, False])
645-
def test_fwf_compression(compression_only, infer, compression_to_extension):
644+
def test_fwf_compression(compression_only, infer, compression_to_extension, temp_file):
646645
data = """1111111111
647646
2222222222
648647
3333333333""".strip()
@@ -655,17 +654,17 @@ def test_fwf_compression(compression_only, infer, compression_to_extension):
655654

656655
data = bytes(data, encoding="utf-8")
657656

658-
with tm.ensure_clean(filename="tmp." + extension) as path:
659-
tm.write_to_compressed(compression, path, data)
657+
path = temp_file.parent / f"tmp.{extension}"
658+
tm.write_to_compressed(compression, path, data)
660659

661-
if infer is not None:
662-
kwargs["compression"] = "infer" if infer else compression
660+
if infer is not None:
661+
kwargs["compression"] = "infer" if infer else compression
663662

664-
result = read_fwf(path, **kwargs)
665-
tm.assert_frame_equal(result, expected)
663+
result = read_fwf(path, **kwargs)
664+
tm.assert_frame_equal(result, expected)
666665

667666

668-
def test_binary_mode():
667+
def test_binary_mode(temp_file):
669668
"""
670669
read_fwf supports opening files in binary mode.
671670
@@ -676,31 +675,31 @@ def test_binary_mode():
676675
df_reference = DataFrame(
677676
[["bba", "bab", "b a"]], columns=["aaa", "aaa.1", "aaa.2"], index=[0]
678677
)
679-
with tm.ensure_clean() as path:
680-
Path(path).write_text(data, encoding="utf-8")
681-
with open(path, "rb") as file:
682-
df = read_fwf(file)
683-
file.seek(0)
684-
tm.assert_frame_equal(df, df_reference)
678+
path = temp_file
679+
path.write_text(data, encoding="utf-8")
680+
with open(path, "rb") as file:
681+
df = read_fwf(file)
682+
file.seek(0)
683+
tm.assert_frame_equal(df, df_reference)
685684

686685

687686
@pytest.mark.parametrize("memory_map", [True, False])
688-
def test_encoding_mmap(memory_map):
687+
def test_encoding_mmap(memory_map, temp_file):
689688
"""
690689
encoding should be working, even when using a memory-mapped file.
691690
692691
GH 23254.
693692
"""
694693
encoding = "iso8859_1"
695-
with tm.ensure_clean() as path:
696-
Path(path).write_bytes(" 1 A Ä 2\n".encode(encoding))
697-
df = read_fwf(
698-
path,
699-
header=None,
700-
widths=[2, 2, 2, 2],
701-
encoding=encoding,
702-
memory_map=memory_map,
703-
)
694+
path = temp_file
695+
path.write_bytes(" 1 A Ä 2\n".encode(encoding))
696+
df = read_fwf(
697+
path,
698+
header=None,
699+
widths=[2, 2, 2, 2],
700+
encoding=encoding,
701+
memory_map=memory_map,
702+
)
704703
df_reference = DataFrame([[1, "A", "Ä", 2]])
705704
tm.assert_frame_equal(df, df_reference)
706705

pandas/tests/io/parser/test_unsupported.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -167,7 +167,7 @@ def test_on_bad_lines_callable_python_or_pyarrow(self, all_parsers):
167167
parser.read_csv(sio, on_bad_lines=bad_lines_func)
168168

169169

170-
def test_close_file_handle_on_invalid_usecols(all_parsers):
170+
def test_close_file_handle_on_invalid_usecols(all_parsers, temp_file):
171171
# GH 45384
172172
parser = all_parsers
173173

@@ -176,13 +176,13 @@ def test_close_file_handle_on_invalid_usecols(all_parsers):
176176
# Raises pyarrow.lib.ArrowKeyError
177177
pytest.skip(reason="https://github.com/apache/arrow/issues/38676")
178178

179-
with tm.ensure_clean("test.csv") as fname:
180-
Path(fname).write_text("col1,col2\na,b\n1,2", encoding="utf-8")
181-
with tm.assert_produces_warning(False):
182-
with pytest.raises(error, match="col3"):
183-
parser.read_csv(fname, usecols=["col1", "col2", "col3"])
184-
# unlink fails on windows if file handles still point to it
185-
os.unlink(fname)
179+
fname = temp_file
180+
Path(fname).write_text("col1,col2\na,b\n1,2", encoding="utf-8")
181+
with tm.assert_produces_warning(False):
182+
with pytest.raises(error, match="col3"):
183+
parser.read_csv(fname, usecols=["col1", "col2", "col3"])
184+
# unlink fails on windows if file handles still point to it
185+
os.unlink(fname)
186186

187187

188188
def test_invalid_file_inputs(request, all_parsers):

0 commit comments

Comments
 (0)