@@ -118,80 +118,6 @@ def test_create_client_default_credentials():
118
118
assert client is not None
119
119
120
120
121
- # TODO(Mattix23): After code sample from https://github.com/googleapis/python-bigquery/pull/1446
122
- # is updated from cloud.google.com delete this.
123
- def test_create_table_nested_repeated_schema (client , to_delete ):
124
- dataset_id = "create_table_nested_repeated_{}" .format (_millis ())
125
- project = client .project
126
- dataset_ref = bigquery .DatasetReference (project , dataset_id )
127
- dataset = bigquery .Dataset (dataset_ref )
128
- client .create_dataset (dataset )
129
- to_delete .append (dataset )
130
-
131
- # [START bigquery_nested_repeated_schema]
132
- # from google.cloud import bigquery
133
- # client = bigquery.Client()
134
- # project = client.project
135
- # dataset_ref = bigquery.DatasetReference(project, 'my_dataset')
136
-
137
- schema = [
138
- bigquery .SchemaField ("id" , "STRING" , mode = "NULLABLE" ),
139
- bigquery .SchemaField ("first_name" , "STRING" , mode = "NULLABLE" ),
140
- bigquery .SchemaField ("last_name" , "STRING" , mode = "NULLABLE" ),
141
- bigquery .SchemaField ("dob" , "DATE" , mode = "NULLABLE" ),
142
- bigquery .SchemaField (
143
- "addresses" ,
144
- "RECORD" ,
145
- mode = "REPEATED" ,
146
- fields = [
147
- bigquery .SchemaField ("status" , "STRING" , mode = "NULLABLE" ),
148
- bigquery .SchemaField ("address" , "STRING" , mode = "NULLABLE" ),
149
- bigquery .SchemaField ("city" , "STRING" , mode = "NULLABLE" ),
150
- bigquery .SchemaField ("state" , "STRING" , mode = "NULLABLE" ),
151
- bigquery .SchemaField ("zip" , "STRING" , mode = "NULLABLE" ),
152
- bigquery .SchemaField ("numberOfYears" , "STRING" , mode = "NULLABLE" ),
153
- ],
154
- ),
155
- ]
156
- table_ref = dataset_ref .table ("my_table" )
157
- table = bigquery .Table (table_ref , schema = schema )
158
- table = client .create_table (table ) # API request
159
-
160
- print ("Created table {}" .format (table .full_table_id ))
161
- # [END bigquery_nested_repeated_schema]
162
-
163
-
164
- def test_create_table_cmek (client , to_delete ):
165
- dataset_id = "create_table_cmek_{}" .format (_millis ())
166
- project = client .project
167
- dataset_ref = bigquery .DatasetReference (project , dataset_id )
168
- dataset = bigquery .Dataset (dataset_ref )
169
- client .create_dataset (dataset )
170
- to_delete .append (dataset )
171
- # TODO(Mattix23): When sample is updated in cloud.google.com, delete this one.
172
- # [START bigquery_create_table_cmek]
173
- # from google.cloud import bigquery
174
- # client = bigquery.Client()
175
- # dataset_id = 'my_dataset'
176
-
177
- table_ref = dataset .table ("my_table" )
178
- table = bigquery .Table (table_ref )
179
-
180
- # Set the encryption key to use for the table.
181
- # TODO: Replace this key with a key you have created in Cloud KMS.
182
- kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}" .format (
183
- "cloud-samples-tests" , "us" , "test" , "test"
184
- )
185
- table .encryption_configuration = bigquery .EncryptionConfiguration (
186
- kms_key_name = kms_key_name
187
- )
188
-
189
- table = client .create_table (table ) # API request
190
-
191
- assert table .encryption_configuration .kms_key_name == kms_key_name
192
- # [END bigquery_create_table_cmek]
193
-
194
-
195
121
def test_create_partitioned_table (client , to_delete ):
196
122
dataset_id = "create_table_partitioned_{}" .format (_millis ())
197
123
project = client .project
@@ -248,27 +174,10 @@ def test_manage_table_labels(client, to_delete):
248
174
to_delete .append (dataset )
249
175
250
176
table = bigquery .Table (dataset .table (table_id ), schema = SCHEMA )
251
- table = client .create_table (table )
252
-
253
- # TODO(Mattix23): After code sample from https://github.com/googleapis/python-bigquery/pull/1451
254
- # is updated from cloud.google.com delete this.
255
177
256
- # [START bigquery_label_table]
257
- # from google.cloud import bigquery
258
- # client = bigquery.Client()
259
- # project = client.project
260
- # dataset_ref = bigquery.DatasetReference(project, dataset_id)
261
- # table_ref = dataset_ref.table('my_table')
262
- # table = client.get_table(table_ref) # API request
263
-
264
- assert table .labels == {}
265
178
labels = {"color" : "green" }
266
179
table .labels = labels
267
-
268
- table = client .update_table (table , ["labels" ]) # API request
269
-
270
- assert table .labels == labels
271
- # [END bigquery_label_table]
180
+ table = client .create_table (table )
272
181
273
182
# [START bigquery_get_table_labels]
274
183
# from google.cloud import bigquery
0 commit comments