@@ -748,10 +748,9 @@ def postgresql_psycopg2_conn_types(postgresql_psycopg2_engine_types):
748748
749749
750750@pytest .fixture
751- def sqlite_str ():
751+ def sqlite_str (temp_file ):
752752 pytest .importorskip ("sqlalchemy" )
753- with tm .ensure_clean () as name :
754- yield f"sqlite:///{ name } "
753+ return f"sqlite:///{ temp_file } "
755754
756755
757756@pytest .fixture
@@ -817,20 +816,19 @@ def sqlite_conn_types(sqlite_engine_types):
817816
818817
819818@pytest .fixture
820- def sqlite_adbc_conn ():
819+ def sqlite_adbc_conn (temp_file ):
821820 pytest .importorskip ("pyarrow" )
822821 pytest .importorskip ("adbc_driver_sqlite" )
823822 from adbc_driver_sqlite import dbapi
824823
825- with tm .ensure_clean () as name :
826- uri = f"file:{ name } "
827- with dbapi .connect (uri ) as conn :
828- yield conn
829- for view in get_all_views (conn ):
830- drop_view (view , conn )
831- for tbl in get_all_tables (conn ):
832- drop_table (tbl , conn )
833- conn .commit ()
824+ uri = f"file:{ temp_file } "
825+ with dbapi .connect (uri ) as conn :
826+ yield conn
827+ for view in get_all_views (conn ):
828+ drop_view (view , conn )
829+ for tbl in get_all_tables (conn ):
830+ drop_table (tbl , conn )
831+ conn .commit ()
834832
835833
836834@pytest .fixture
@@ -2504,20 +2502,20 @@ def test_sqlalchemy_integer_overload_mapping(conn, request, integer):
25042502 sql .SQLTable ("test_type" , db , frame = df )
25052503
25062504
2507- def test_database_uri_string (request , test_frame1 ):
2505+ def test_database_uri_string (temp_file , request , test_frame1 ):
25082506 pytest .importorskip ("sqlalchemy" )
25092507 # Test read_sql and .to_sql method with a database URI (GH10654)
25102508 # db_uri = 'sqlite:///:memory:' # raises
25112509 # sqlalchemy.exc.OperationalError: (sqlite3.OperationalError) near
25122510 # "iris": syntax error [SQL: 'iris']
2513- with tm . ensure_clean () as name :
2514- db_uri = "sqlite:///" + name
2515- table = "iris"
2516- test_frame1 .to_sql (name = table , con = db_uri , if_exists = "replace" , index = False )
2517- test_frame2 = sql .read_sql (table , db_uri )
2518- test_frame3 = sql .read_sql_table (table , db_uri )
2519- query = "SELECT * FROM iris"
2520- test_frame4 = sql .read_sql_query (query , db_uri )
2511+ name = str ( temp_file )
2512+ db_uri = "sqlite:///" + name
2513+ table = "iris"
2514+ test_frame1 .to_sql (name = table , con = db_uri , if_exists = "replace" , index = False )
2515+ test_frame2 = sql .read_sql (table , db_uri )
2516+ test_frame3 = sql .read_sql_table (table , db_uri )
2517+ query = "SELECT * FROM iris"
2518+ test_frame4 = sql .read_sql_query (query , db_uri )
25212519 tm .assert_frame_equal (test_frame1 , test_frame2 )
25222520 tm .assert_frame_equal (test_frame1 , test_frame3 )
25232521 tm .assert_frame_equal (test_frame1 , test_frame4 )
@@ -2581,16 +2579,15 @@ def test_column_with_percentage(conn, request):
25812579 tm .assert_frame_equal (res , df )
25822580
25832581
2584- def test_sql_open_close (test_frame3 ):
2582+ def test_sql_open_close (temp_file , test_frame3 ):
25852583 # Test if the IO in the database still work if the connection closed
25862584 # between the writing and reading (as in many real situations).
25872585
2588- with tm .ensure_clean () as name :
2589- with contextlib .closing (sqlite3 .connect (name )) as conn :
2590- assert sql .to_sql (test_frame3 , "test_frame3_legacy" , conn , index = False ) == 4
2586+ with contextlib .closing (sqlite3 .connect (temp_file )) as conn :
2587+ assert sql .to_sql (test_frame3 , "test_frame3_legacy" , conn , index = False ) == 4
25912588
2592- with contextlib .closing (sqlite3 .connect (name )) as conn :
2593- result = sql .read_sql_query ("SELECT * FROM test_frame3_legacy;" , conn )
2589+ with contextlib .closing (sqlite3 .connect (temp_file )) as conn :
2590+ result = sql .read_sql_query ("SELECT * FROM test_frame3_legacy;" , conn )
25942591
25952592 tm .assert_frame_equal (test_frame3 , result )
25962593
0 commit comments