@@ -132,7 +132,8 @@ def test_create_table_nested_repeated_schema(client, to_delete):
132
132
# [START bigquery_nested_repeated_schema]
133
133
# from google.cloud import bigquery
134
134
# client = bigquery.Client()
135
- # dataset_ref = client.dataset('my_dataset')
135
+ # project = client.project
136
+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
136
137
137
138
schema = [
138
139
bigquery .SchemaField ("id" , "STRING" , mode = "NULLABLE" ),
@@ -202,7 +203,8 @@ def test_create_partitioned_table(client, to_delete):
202
203
# [START bigquery_create_table_partitioned]
203
204
# from google.cloud import bigquery
204
205
# client = bigquery.Client()
205
- # dataset_ref = client.dataset('my_dataset')
206
+ # project = client.project
207
+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
206
208
207
209
table_ref = dataset_ref .table ("my_partitioned_table" )
208
210
schema = [
@@ -240,7 +242,9 @@ def test_create_partitioned_table(client, to_delete):
240
242
def test_manage_table_labels (client , to_delete ):
241
243
dataset_id = "label_table_dataset_{}" .format (_millis ())
242
244
table_id = "label_table_{}" .format (_millis ())
243
- dataset = bigquery .Dataset (client .dataset (dataset_id ))
245
+ project = client .project
246
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
247
+ dataset = bigquery .Dataset (dataset_ref )
244
248
client .create_dataset (dataset )
245
249
to_delete .append (dataset )
246
250
@@ -250,7 +254,9 @@ def test_manage_table_labels(client, to_delete):
250
254
# [START bigquery_label_table]
251
255
# from google.cloud import bigquery
252
256
# client = bigquery.Client()
253
- # table_ref = client.dataset('my_dataset').table('my_table')
257
+ # project = client.project
258
+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
259
+ # table_ref = dataset_ref.table('my_table')
254
260
# table = client.get_table(table_ref) # API request
255
261
256
262
assert table .labels == {}
@@ -268,7 +274,8 @@ def test_manage_table_labels(client, to_delete):
268
274
# dataset_id = 'my_dataset'
269
275
# table_id = 'my_table'
270
276
271
- dataset_ref = client .dataset (dataset_id )
277
+ project = client .project
278
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
272
279
table_ref = dataset_ref .table (table_id )
273
280
table = client .get_table (table_ref ) # API Request
274
281
@@ -286,7 +293,9 @@ def test_manage_table_labels(client, to_delete):
286
293
# [START bigquery_delete_label_table]
287
294
# from google.cloud import bigquery
288
295
# client = bigquery.Client()
289
- # table_ref = client.dataset('my_dataset').table('my_table')
296
+ # project = client.project
297
+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
298
+ # table_ref = dataset_ref.table('my_table')
290
299
# table = client.get_table(table_ref) # API request
291
300
292
301
# This example table starts with one label
@@ -310,7 +319,9 @@ def test_update_table_description(client, to_delete):
310
319
"""Update a table's description."""
311
320
dataset_id = "update_table_description_dataset_{}" .format (_millis ())
312
321
table_id = "update_table_description_table_{}" .format (_millis ())
313
- dataset = bigquery .Dataset (client .dataset (dataset_id ))
322
+ project = client .project
323
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
324
+ dataset = bigquery .Dataset (dataset_ref )
314
325
client .create_dataset (dataset )
315
326
to_delete .append (dataset )
316
327
@@ -321,7 +332,9 @@ def test_update_table_description(client, to_delete):
321
332
# [START bigquery_update_table_description]
322
333
# from google.cloud import bigquery
323
334
# client = bigquery.Client()
324
- # table_ref = client.dataset('my_dataset').table('my_table')
335
+ # project = client.project
336
+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
337
+ # table_ref = dataset_ref.table('my_table')
325
338
# table = client.get_table(table_ref) # API request
326
339
327
340
assert table .description == "Original description."
@@ -343,7 +356,9 @@ def test_update_table_expiration(client, to_delete):
343
356
"""Update a table's expiration time."""
344
357
dataset_id = "update_table_expiration_dataset_{}" .format (_millis ())
345
358
table_id = "update_table_expiration_table_{}" .format (_millis ())
346
- dataset = bigquery .Dataset (client .dataset (dataset_id ))
359
+ project = client .project
360
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
361
+ dataset = bigquery .Dataset (dataset_ref )
347
362
client .create_dataset (dataset )
348
363
to_delete .append (dataset )
349
364
@@ -356,7 +371,9 @@ def test_update_table_expiration(client, to_delete):
356
371
357
372
# from google.cloud import bigquery
358
373
# client = bigquery.Client()
359
- # table_ref = client.dataset('my_dataset').table('my_table')
374
+ # project = client.project
375
+ # dataset_ref = bigquery.DatasetReference(project, dataset_id)
376
+ # table_ref = dataset_ref.table('my_table')
360
377
# table = client.get_table(table_ref) # API request
361
378
362
379
assert table .expires is None
@@ -382,7 +399,9 @@ def test_relax_column(client, to_delete):
382
399
"""Updates a schema field from required to nullable."""
383
400
dataset_id = "relax_column_dataset_{}" .format (_millis ())
384
401
table_id = "relax_column_table_{}" .format (_millis ())
385
- dataset = bigquery .Dataset (client .dataset (dataset_id ))
402
+ project = client .project
403
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
404
+ dataset = bigquery .Dataset (dataset_ref )
386
405
dataset = client .create_dataset (dataset )
387
406
to_delete .append (dataset )
388
407
@@ -396,7 +415,9 @@ def test_relax_column(client, to_delete):
396
415
bigquery .SchemaField ("full_name" , "STRING" , mode = "REQUIRED" ),
397
416
bigquery .SchemaField ("age" , "INTEGER" , mode = "REQUIRED" ),
398
417
]
399
- table_ref = client .dataset (dataset_id ).table (table_id )
418
+
419
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
420
+ table_ref = dataset_ref .table (table_id )
400
421
table = bigquery .Table (table_ref , schema = original_schema )
401
422
table = client .create_table (table )
402
423
assert all (field .mode == "REQUIRED" for field in table .schema )
@@ -424,7 +445,9 @@ def test_update_table_cmek(client, to_delete):
424
445
"""Patch a table's metadata."""
425
446
dataset_id = "update_table_cmek_{}" .format (_millis ())
426
447
table_id = "update_table_cmek_{}" .format (_millis ())
427
- dataset = bigquery .Dataset (client .dataset (dataset_id ))
448
+ project = client .project
449
+ dataset_ref = bigquery .DatasetReference (project , dataset_id )
450
+ dataset = bigquery .Dataset (dataset_ref )
428
451
client .create_dataset (dataset )
429
452
to_delete .append (dataset )
430
453
@@ -468,7 +491,7 @@ def test_update_table_cmek(client, to_delete):
468
491
def test_manage_views (client , to_delete ):
469
492
project = client .project
470
493
source_dataset_id = "source_dataset_{}" .format (_millis ())
471
- source_dataset_ref = client . dataset ( source_dataset_id )
494
+ source_dataset_ref = bigquery . DatasetReference ( project , source_dataset_id )
472
495
source_dataset = bigquery .Dataset (source_dataset_ref )
473
496
source_dataset = client .create_dataset (source_dataset )
474
497
to_delete .append (source_dataset )
@@ -487,7 +510,7 @@ def test_manage_views(client, to_delete):
487
510
load_job .result ()
488
511
489
512
shared_dataset_id = "shared_dataset_{}" .format (_millis ())
490
- shared_dataset_ref = client . dataset ( shared_dataset_id )
513
+ shared_dataset_ref = bigquery . DatasetReference ( project , shared_dataset_id )
491
514
shared_dataset = bigquery .Dataset (shared_dataset_ref )
492
515
shared_dataset = client .create_dataset (shared_dataset )
493
516
to_delete .append (shared_dataset )
@@ -498,7 +521,7 @@ def test_manage_views(client, to_delete):
498
521
# project = 'my-project'
499
522
# source_dataset_id = 'my_source_dataset'
500
523
# source_table_id = 'us_states'
501
- # shared_dataset_ref = client.dataset( 'my_shared_dataset')
524
+ # shared_dataset_ref = bigquery.DatasetReference(project, 'my_shared_dataset')
502
525
503
526
# This example shows how to create a shared view of a source table of
504
527
# US States. The source table contains all 50 states, while the view will
@@ -518,7 +541,7 @@ def test_manage_views(client, to_delete):
518
541
# project = 'my-project'
519
542
# source_dataset_id = 'my_source_dataset'
520
543
# source_table_id = 'us_states'
521
- # shared_dataset_ref = client.dataset( 'my_shared_dataset')
544
+ # shared_dataset_ref = bigquery.DatasetReference(project, 'my_shared_dataset')
522
545
523
546
# This example shows how to update a shared view of a source table of
524
547
# US States. The view's query will be updated to contain only states with
@@ -534,8 +557,9 @@ def test_manage_views(client, to_delete):
534
557
# from google.cloud import bigquery
535
558
# client = bigquery.Client()
536
559
# shared_dataset_id = 'my_shared_dataset'
537
-
538
- view_ref = client .dataset (shared_dataset_id ).table ("my_shared_view" )
560
+ project = client .project
561
+ shared_dataset_ref = bigquery .DatasetReference (project , shared_dataset_id )
562
+ view_ref = shared_dataset_ref .table ("my_shared_view" )
539
563
view = client .get_table (view_ref ) # API Request
540
564
541
565
# Display view properties
@@ -552,9 +576,9 @@ def test_manage_views(client, to_delete):
552
576
# Assign access controls to the dataset containing the view
553
577
# shared_dataset_id = 'my_shared_dataset'
554
578
# analyst_group_email = '[email protected] '
555
- shared_dataset = client .get_dataset (
556
- client . dataset ( shared_dataset_id )
557
- ) # API request
579
+ project = client .project
580
+ shared_dataset_ref = bigquery . DatasetReference ( project , shared_dataset_id )
581
+ shared_dataset = client . get_dataset ( shared_dataset_ref ) # API request
558
582
access_entries = shared_dataset .access_entries
559
583
access_entries .append (
560
584
bigquery .AccessEntry ("READER" , "groupByEmail" , analyst_group_email )
@@ -567,9 +591,9 @@ def test_manage_views(client, to_delete):
567
591
# Authorize the view to access the source dataset
568
592
# project = 'my-project'
569
593
# source_dataset_id = 'my_source_dataset'
570
- source_dataset = client .get_dataset (
571
- client . dataset ( source_dataset_id )
572
- ) # API request
594
+ project = client .project
595
+ source_dataset_ref = bigquery . DatasetReference ( project , source_dataset_id )
596
+ source_dataset = client . get_dataset ( source_dataset_ref ) # API request
573
597
view_reference = {
574
598
"projectId" : project ,
575
599
"datasetId" : shared_dataset_id ,
@@ -602,7 +626,8 @@ def test_load_table_add_column(client, to_delete):
602
626
# [START bigquery_add_column_load_append]
603
627
# from google.cloud import bigquery
604
628
# client = bigquery.Client()
605
- # dataset_ref = client.dataset('my_dataset')
629
+ # project = client.project
630
+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
606
631
# filepath = 'path/to/your_file.csv'
607
632
608
633
# Retrieves the destination table and checks the length of the schema
@@ -673,7 +698,8 @@ def test_load_table_relax_column(client, to_delete):
673
698
# [START bigquery_relax_column_load_append]
674
699
# from google.cloud import bigquery
675
700
# client = bigquery.Client()
676
- # dataset_ref = client.dataset('my_dataset')
701
+ # project = client.project
702
+ # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
677
703
# filepath = 'path/to/your_file.csv'
678
704
679
705
# Retrieves the destination table and checks the number of required fields
0 commit comments