1313
1414
1515@pytest .fixture
16- def some_joblib (tmp_dir2 ):
16+ def some_joblib (tmp_path : Path ):
1717 import joblib
1818
19- p_obj = tmp_dir2 / "some.joblib"
19+ p_obj = tmp_path / "some.joblib"
2020 joblib .dump ({"a" : 1 }, p_obj )
2121
2222 return p_obj
@@ -54,7 +54,7 @@ def test_default_title(obj, dst_title):
5454 "joblib" ,
5555 ],
5656)
57- def test_driver_roundtrip (tmp_dir2 , type_ ):
57+ def test_driver_roundtrip (tmp_path : Path , type_ ):
5858 # TODO: I think this test highlights the challenge of getting the flow
5959 # between metadata, drivers, and the metafactory right.
6060 # There is the name of the data (relative to the pin directory), and the full
@@ -66,13 +66,13 @@ def test_driver_roundtrip(tmp_dir2, type_):
6666 fname = "some_df"
6767 full_file = f"{ fname } .{ type_ } "
6868
69- p_obj = tmp_dir2 / fname
69+ p_obj = tmp_path / fname
7070 res_fname = save_data (df , p_obj , type_ )
7171
7272 assert Path (res_fname ).name == full_file
7373
7474 meta = MetaRaw (full_file , type_ , "my_pin" )
75- obj = load_data (meta , fsspec .filesystem ("file" ), tmp_dir2 , allow_pickle_read = True )
75+ obj = load_data (meta , fsspec .filesystem ("file" ), tmp_path , allow_pickle_read = True )
7676
7777 assert df .equals (obj )
7878
@@ -83,50 +83,50 @@ def test_driver_roundtrip(tmp_dir2, type_):
8383 "json" ,
8484 ],
8585)
86- def test_driver_roundtrip_json (tmp_dir2 , type_ ):
86+ def test_driver_roundtrip_json (tmp_path : Path , type_ ):
8787 df = {"x" : [1 , 2 , 3 ]}
8888
8989 fname = "some_df"
9090 full_file = f"{ fname } .{ type_ } "
9191
92- p_obj = tmp_dir2 / fname
92+ p_obj = tmp_path / fname
9393 res_fname = save_data (df , p_obj , type_ )
9494
9595 assert Path (res_fname ).name == full_file
9696
9797 meta = MetaRaw (full_file , type_ , "my_pin" )
98- obj = load_data (meta , fsspec .filesystem ("file" ), tmp_dir2 , allow_pickle_read = True )
98+ obj = load_data (meta , fsspec .filesystem ("file" ), tmp_path , allow_pickle_read = True )
9999
100100 assert df == obj
101101
102102
103- def test_driver_feather_write_error (tmp_dir2 ):
103+ def test_driver_feather_write_error (tmp_path : Path ):
104104 import pandas as pd
105105
106106 df = pd .DataFrame ({"x" : [1 , 2 , 3 ]})
107107
108108 fname = "some_df"
109109
110- p_obj = tmp_dir2 / fname
110+ p_obj = tmp_path / fname
111111
112112 with pytest .raises (NotImplementedError ) as exc_info :
113113 save_data (df , p_obj , "feather" )
114114
115115 assert '"feather" no longer supported.' in exc_info .value .args [0 ]
116116
117117
118- def test_driver_feather_read_backwards_compat (tmp_dir2 ):
118+ def test_driver_feather_read_backwards_compat (tmp_path : Path ):
119119 import pandas as pd
120120
121121 df = pd .DataFrame ({"x" : [1 , 2 , 3 ]})
122122
123123 fname = "some_df"
124124 full_file = f"{ fname } .feather"
125125
126- df .to_feather (tmp_dir2 / full_file )
126+ df .to_feather (tmp_path / full_file )
127127
128128 obj = load_data (
129- MetaRaw (full_file , "feather" , "my_pin" ), fsspec .filesystem ("file" ), tmp_dir2
129+ MetaRaw (full_file , "feather" , "my_pin" ), fsspec .filesystem ("file" ), tmp_path
130130 )
131131
132132 assert df .equals (obj )
@@ -148,15 +148,15 @@ def test_driver_pickle_read_fail_default(some_joblib):
148148 )
149149
150150
151- def test_driver_apply_suffix_false (tmp_dir2 ):
151+ def test_driver_apply_suffix_false (tmp_path : Path ):
152152 import pandas as pd
153153
154154 df = pd .DataFrame ({"x" : [1 , 2 , 3 ]})
155155
156156 fname = "some_df"
157157 type_ = "csv"
158158
159- p_obj = tmp_dir2 / fname
159+ p_obj = tmp_path / fname
160160 res_fname = save_data (df , p_obj , type_ , apply_suffix = False )
161161
162162 assert Path (res_fname ).name == "some_df"
0 commit comments