Skip to content

Commit 54e50c1

Browse files
committed
Remove use facing argument
1 parent 62137d8 commit 54e50c1

File tree

1 file changed

+2
-23
lines changed

1 file changed

+2
-23
lines changed

google/cloud/bigquery/client.py

+2-23
Original file line numberDiff line numberDiff line change
@@ -2450,7 +2450,6 @@ def load_table_from_dataframe(
24502450
project: str = None,
24512451
job_config: LoadJobConfig = None,
24522452
parquet_compression: str = "snappy",
2453-
parquet_use_compliant_nested_type: bool = True,
24542453
timeout: float = DEFAULT_TIMEOUT,
24552454
) -> job.LoadJob:
24562455
"""Upload the contents of a table from a pandas DataFrame.
@@ -2526,24 +2525,6 @@ def load_table_from_dataframe(
25262525
passed as the ``compression`` argument to the underlying
25272526
``DataFrame.to_parquet()`` method.
25282527
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_parquet.html#pandas.DataFrame.to_parquet
2529-
parquet_use_compliant_nested_type (bool):
2530-
Whether the ``pyarrow.parquet.write_table`` serializing method should write
2531-
compliant Parquet nested type (lists). Defaults to ``True``.
2532-
2533-
The argument is directly passed as the ``use_compliant_nested_type``
2534-
argument to the underlying ``pyarrow.parquet.write_table()``
2535-
method.
2536-
https://arrow.apache.org/docs/python/generated/pyarrow.parquet.write_table.html#pyarrow-parquet-write-table
2537-
2538-
If the job config schema is missing, the argument is directly
2539-
passed as an additonal ``kwarg`` argument to the underlying
2540-
``DataFrame.to_parquet()`` method.
2541-
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_parquet.html#pandas.DataFrame.to_parquet
2542-
2543-
This argument is ignored for ``pyarrow`` versions before ``4.0.0``.
2544-
2545-
This argument is only present to allow for backwards compatibility with
2546-
tables created using an old version of this method.
25472528
timeout (Optional[float]):
25482529
The number of seconds to wait for the underlying HTTP transport
25492530
before using ``retry``.
@@ -2667,16 +2648,14 @@ def load_table_from_dataframe(
26672648
job_config.schema,
26682649
tmppath,
26692650
parquet_compression=parquet_compression,
2670-
parquet_use_compliant_nested_type=parquet_use_compliant_nested_type,
2651+
parquet_use_compliant_nested_type=True,
26712652
)
26722653
else:
26732654
dataframe.to_parquet(
26742655
tmppath,
26752656
engine="pyarrow",
26762657
compression=parquet_compression,
2677-
**{
2678-
"use_compliant_nested_type": parquet_use_compliant_nested_type
2679-
}
2658+
**{"use_compliant_nested_type": True}
26802659
if _helpers.PYARROW_VERSIONS.use_compliant_nested_type
26812660
else {},
26822661
)

0 commit comments

Comments
 (0)