@@ -248,6 +248,42 @@ def check_partition_names(path, expected):
248248 assert dataset .partitioning .schema .names == expected
249249
250250
251+ def test_read_parquet_invalid_path_types (tmp_path , engine ):
252+ # GH #62922
253+ df = pd .DataFrame ({"a" : [1 ]})
254+ path = tmp_path / "test_read_parquet.parquet"
255+ df .to_parquet (path , engine = engine )
256+
257+ bad_path_types = [
258+ [str (path )], # list
259+ (str (path ),), # tuple
260+ b"raw-bytes" , # bytes
261+ ]
262+ for bad in bad_path_types :
263+ match = (
264+ f"read_parquet expected str/os.PathLike or file-like object, "
265+ f"got { type (bad ).__name__ } type"
266+ )
267+ with pytest .raises (TypeError , match = match ):
268+ read_parquet (bad , engine = engine )
269+
270+
271+ def test_read_parquet_valid_path_types (tmp_path , engine ):
272+ # GH #62922
273+ df = pd .DataFrame ({"a" : [1 ]})
274+ path = tmp_path / "test_read_parquet.parquet"
275+ df .to_parquet (path , engine = engine )
276+ # str
277+ read_parquet (str (path ), engine = engine )
278+ # os.PathLike
279+ read_parquet (pathlib .Path (path ), engine = engine )
280+ # file-like object
281+ buf = BytesIO ()
282+ df .to_parquet (buf , engine = engine )
283+ buf .seek (0 )
284+ read_parquet (buf , engine = engine )
285+
286+
251287def test_invalid_engine (df_compat , temp_file ):
252288 msg = "engine must be one of 'pyarrow', 'fastparquet'"
253289 with pytest .raises (ValueError , match = msg ):
0 commit comments