30
30
Union ,
31
31
)
32
32
33
+ import bigframes_vendored .constants as constants
33
34
import bigframes_vendored .pandas .io .gbq as vendored_pandas_gbq
34
35
from google .cloud import bigquery
35
36
import numpy
@@ -103,6 +104,7 @@ def read_csv(
103
104
Literal ["c" , "python" , "pyarrow" , "python-fwf" , "bigquery" ]
104
105
] = None ,
105
106
encoding : Optional [str ] = None ,
107
+ write_engine : constants .WriteEngineType = "default" ,
106
108
** kwargs ,
107
109
) -> bigframes .dataframe .DataFrame :
108
110
return global_session .with_default_session (
@@ -116,6 +118,7 @@ def read_csv(
116
118
dtype = dtype ,
117
119
engine = engine ,
118
120
encoding = encoding ,
121
+ write_engine = write_engine ,
119
122
** kwargs ,
120
123
)
121
124
@@ -133,6 +136,7 @@ def read_json(
133
136
encoding : Optional [str ] = None ,
134
137
lines : bool = False ,
135
138
engine : Literal ["ujson" , "pyarrow" , "bigquery" ] = "ujson" ,
139
+ write_engine : constants .WriteEngineType = "default" ,
136
140
** kwargs ,
137
141
) -> bigframes .dataframe .DataFrame :
138
142
return global_session .with_default_session (
@@ -143,6 +147,7 @@ def read_json(
143
147
encoding = encoding ,
144
148
lines = lines ,
145
149
engine = engine ,
150
+ write_engine = write_engine ,
146
151
** kwargs ,
147
152
)
148
153
@@ -243,24 +248,41 @@ def read_gbq_table(
243
248
244
249
245
250
@typing .overload
246
- def read_pandas (pandas_dataframe : pandas .DataFrame ) -> bigframes .dataframe .DataFrame :
251
+ def read_pandas (
252
+ pandas_dataframe : pandas .DataFrame ,
253
+ * ,
254
+ write_engine : constants .WriteEngineType = "default" ,
255
+ ) -> bigframes .dataframe .DataFrame :
247
256
...
248
257
249
258
250
259
@typing .overload
251
- def read_pandas (pandas_dataframe : pandas .Series ) -> bigframes .series .Series :
260
+ def read_pandas (
261
+ pandas_dataframe : pandas .Series ,
262
+ * ,
263
+ write_engine : constants .WriteEngineType = "default" ,
264
+ ) -> bigframes .series .Series :
252
265
...
253
266
254
267
255
268
@typing .overload
256
- def read_pandas (pandas_dataframe : pandas .Index ) -> bigframes .core .indexes .Index :
269
+ def read_pandas (
270
+ pandas_dataframe : pandas .Index ,
271
+ * ,
272
+ write_engine : constants .WriteEngineType = "default" ,
273
+ ) -> bigframes .core .indexes .Index :
257
274
...
258
275
259
276
260
- def read_pandas (pandas_dataframe : Union [pandas .DataFrame , pandas .Series , pandas .Index ]):
277
+ def read_pandas (
278
+ pandas_dataframe : Union [pandas .DataFrame , pandas .Series , pandas .Index ],
279
+ * ,
280
+ write_engine : constants .WriteEngineType = "default" ,
281
+ ):
261
282
return global_session .with_default_session (
262
283
bigframes .session .Session .read_pandas ,
263
284
pandas_dataframe ,
285
+ write_engine = write_engine ,
264
286
)
265
287
266
288
@@ -271,25 +293,32 @@ def read_pickle(
271
293
filepath_or_buffer : FilePath | ReadPickleBuffer ,
272
294
compression : CompressionOptions = "infer" ,
273
295
storage_options : StorageOptions = None ,
296
+ * ,
297
+ write_engine : constants .WriteEngineType = "default" ,
274
298
):
275
299
return global_session .with_default_session (
276
300
bigframes .session .Session .read_pickle ,
277
301
filepath_or_buffer = filepath_or_buffer ,
278
302
compression = compression ,
279
303
storage_options = storage_options ,
304
+ write_engine = write_engine ,
280
305
)
281
306
282
307
283
308
read_pickle .__doc__ = inspect .getdoc (bigframes .session .Session .read_pickle )
284
309
285
310
286
311
def read_parquet (
287
- path : str | IO ["bytes" ], * , engine : str = "auto"
312
+ path : str | IO ["bytes" ],
313
+ * ,
314
+ engine : str = "auto" ,
315
+ write_engine : constants .WriteEngineType = "default" ,
288
316
) -> bigframes .dataframe .DataFrame :
289
317
return global_session .with_default_session (
290
318
bigframes .session .Session .read_parquet ,
291
319
path ,
292
320
engine = engine ,
321
+ write_engine = write_engine ,
293
322
)
294
323
295
324
0 commit comments