@@ -499,6 +499,19 @@ def test_partition_cols_supported(self, pa, df_full):
499
499
assert len (dataset .partitions .partition_names ) == 2
500
500
assert dataset .partitions .partition_names == set (partition_cols )
501
501
502
+ def test_partition_cols_string (self , pa , df_full ):
503
+ # GH #27117
504
+ partition_cols = "bool"
505
+ partition_cols_list = [partition_cols ]
506
+ df = df_full
507
+ with tm .ensure_clean_dir () as path :
508
+ df .to_parquet (path , partition_cols = partition_cols , compression = None )
509
+ import pyarrow .parquet as pq
510
+
511
+ dataset = pq .ParquetDataset (path , validate_schema = False )
512
+ assert len (dataset .partitions .partition_names ) == 1
513
+ assert dataset .partitions .partition_names == set (partition_cols_list )
514
+
502
515
def test_empty_dataframe (self , pa ):
503
516
# GH #27339
504
517
df = pd .DataFrame ()
@@ -595,6 +608,23 @@ def test_partition_cols_supported(self, fp, df_full):
595
608
actual_partition_cols = fastparquet .ParquetFile (path , False ).cats
596
609
assert len (actual_partition_cols ) == 2
597
610
611
+ def test_partition_cols_string (self , fp , df_full ):
612
+ # GH #27117
613
+ partition_cols = "bool"
614
+ df = df_full
615
+ with tm .ensure_clean_dir () as path :
616
+ df .to_parquet (
617
+ path ,
618
+ engine = "fastparquet" ,
619
+ partition_cols = partition_cols ,
620
+ compression = None ,
621
+ )
622
+ assert os .path .exists (path )
623
+ import fastparquet # noqa: F811
624
+
625
+ actual_partition_cols = fastparquet .ParquetFile (path , False ).cats
626
+ assert len (actual_partition_cols ) == 1
627
+
598
628
def test_partition_on_supported (self , fp , df_full ):
599
629
# GH #23283
600
630
partition_cols = ["bool" , "int" ]
0 commit comments