@@ -2450,7 +2450,6 @@ def load_table_from_dataframe(
2450
2450
project : str = None ,
2451
2451
job_config : LoadJobConfig = None ,
2452
2452
parquet_compression : str = "snappy" ,
2453
- parquet_use_compliant_nested_type : bool = True ,
2454
2453
timeout : float = DEFAULT_TIMEOUT ,
2455
2454
) -> job .LoadJob :
2456
2455
"""Upload the contents of a table from a pandas DataFrame.
@@ -2526,24 +2525,6 @@ def load_table_from_dataframe(
2526
2525
passed as the ``compression`` argument to the underlying
2527
2526
``DataFrame.to_parquet()`` method.
2528
2527
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_parquet.html#pandas.DataFrame.to_parquet
2529
- parquet_use_compliant_nested_type (bool):
2530
- Whether the ``pyarrow.parquet.write_table`` serializing method should write
2531
- compliant Parquet nested type (lists). Defaults to ``True``.
2532
-
2533
- The argument is directly passed as the ``use_compliant_nested_type``
2534
- argument to the underlying ``pyarrow.parquet.write_table()``
2535
- method.
2536
- https://arrow.apache.org/docs/python/generated/pyarrow.parquet.write_table.html#pyarrow-parquet-write-table
2537
-
2538
- If the job config schema is missing, the argument is directly
2539
- passed as an additonal ``kwarg`` argument to the underlying
2540
- ``DataFrame.to_parquet()`` method.
2541
- https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_parquet.html#pandas.DataFrame.to_parquet
2542
-
2543
- This argument is ignored for ``pyarrow`` versions before ``4.0.0``.
2544
-
2545
- This argument is only present to allow for backwards compatibility with
2546
- tables created using an old version of this method.
2547
2528
timeout (Optional[float]):
2548
2529
The number of seconds to wait for the underlying HTTP transport
2549
2530
before using ``retry``.
@@ -2667,16 +2648,14 @@ def load_table_from_dataframe(
2667
2648
job_config .schema ,
2668
2649
tmppath ,
2669
2650
parquet_compression = parquet_compression ,
2670
- parquet_use_compliant_nested_type = parquet_use_compliant_nested_type ,
2651
+ parquet_use_compliant_nested_type = True ,
2671
2652
)
2672
2653
else :
2673
2654
dataframe .to_parquet (
2674
2655
tmppath ,
2675
2656
engine = "pyarrow" ,
2676
2657
compression = parquet_compression ,
2677
- ** {
2678
- "use_compliant_nested_type" : parquet_use_compliant_nested_type
2679
- }
2658
+ ** {"use_compliant_nested_type" : True }
2680
2659
if _helpers .PYARROW_VERSIONS .use_compliant_nested_type
2681
2660
else {},
2682
2661
)
0 commit comments