@@ -2316,6 +2316,75 @@ def test_parameterized_types_round_trip(self):
2316
2316
2317
2317
self .assertEqual (tuple (s ._key ()[:2 ] for s in table2 .schema ), fields )
2318
2318
2319
+ def test_table_snapshots (self ):
2320
+ from google .cloud .bigquery import CopyJobConfig
2321
+ from google .cloud .bigquery import OperationType
2322
+
2323
+ client = Config .CLIENT
2324
+
2325
+ source_table_path = f"{ client .project } .{ Config .DATASET } .test_table"
2326
+ snapshot_table_path = f"{ source_table_path } _snapshot"
2327
+
2328
+ # Create the table before loading so that the column order is predictable.
2329
+ schema = [
2330
+ bigquery .SchemaField ("foo" , "INTEGER" ),
2331
+ bigquery .SchemaField ("bar" , "STRING" ),
2332
+ ]
2333
+ source_table = helpers .retry_403 (Config .CLIENT .create_table )(
2334
+ Table (source_table_path , schema = schema )
2335
+ )
2336
+ self .to_delete .insert (0 , source_table )
2337
+
2338
+ # Populate the table with initial data.
2339
+ rows = [{"foo" : 1 , "bar" : "one" }, {"foo" : 2 , "bar" : "two" }]
2340
+ load_job = Config .CLIENT .load_table_from_json (rows , source_table )
2341
+ load_job .result ()
2342
+
2343
+ # Now create a snapshot before modifying the original table data.
2344
+ copy_config = CopyJobConfig ()
2345
+ copy_config .operation_type = OperationType .SNAPSHOT
2346
+
2347
+ copy_job = client .copy_table (
2348
+ sources = source_table_path ,
2349
+ destination = snapshot_table_path ,
2350
+ job_config = copy_config ,
2351
+ )
2352
+ copy_job .result ()
2353
+
2354
+ snapshot_table = client .get_table (snapshot_table_path )
2355
+ self .to_delete .insert (0 , snapshot_table )
2356
+
2357
+ # Modify data in original table.
2358
+ sql = f'INSERT INTO `{ source_table_path } `(foo, bar) VALUES (3, "three")'
2359
+ query_job = client .query (sql )
2360
+ query_job .result ()
2361
+
2362
+ # List rows from the source table and compare them to rows from the snapshot.
2363
+ rows_iter = client .list_rows (source_table_path )
2364
+ rows = sorted (row .values () for row in rows_iter )
2365
+ assert rows == [(1 , "one" ), (2 , "two" ), (3 , "three" )]
2366
+
2367
+ rows_iter = client .list_rows (snapshot_table_path )
2368
+ rows = sorted (row .values () for row in rows_iter )
2369
+ assert rows == [(1 , "one" ), (2 , "two" )]
2370
+
2371
+ # Now restore the table from the snapshot and it should again contain the old
2372
+ # set of rows.
2373
+ copy_config = CopyJobConfig ()
2374
+ copy_config .operation_type = OperationType .RESTORE
2375
+ copy_config .write_disposition = bigquery .WriteDisposition .WRITE_TRUNCATE
2376
+
2377
+ copy_job = client .copy_table (
2378
+ sources = snapshot_table_path ,
2379
+ destination = source_table_path ,
2380
+ job_config = copy_config ,
2381
+ )
2382
+ copy_job .result ()
2383
+
2384
+ rows_iter = client .list_rows (source_table_path )
2385
+ rows = sorted (row .values () for row in rows_iter )
2386
+ assert rows == [(1 , "one" ), (2 , "two" )]
2387
+
2319
2388
def temp_dataset (self , dataset_id , location = None ):
2320
2389
project = Config .CLIENT .project
2321
2390
dataset_ref = bigquery .DatasetReference (project , dataset_id )
0 commit comments