Skip to content

Commit c03aacc

Browse files
tswastdizcology
authored andcommitted
docs: add sample for dataset copy (#76)
* docs: add sample for dataset copy * add google-cloud-bigquery to test requirements * use relative imports to hopefully fix lint
1 parent 51195a0 commit c03aacc

File tree

5 files changed

+166
-3
lines changed

5 files changed

+166
-3
lines changed
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# Copyright 2020 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
# Copyright 2020 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
def copy_dataset(override_values={}):
17+
# [START bigquerydatatransfer_copy_dataset]
18+
from google.cloud import bigquery_datatransfer
19+
20+
transfer_client = bigquery_datatransfer.DataTransferServiceClient()
21+
22+
destination_project_id = "my-destination-project"
23+
destination_dataset_id = "my_destination_dataset"
24+
source_project_id = "my-source-project"
25+
source_dataset_id = "my_source_dataset"
26+
# [END bigquerydatatransfer_copy_dataset]
27+
# To facilitate testing, we replace values with alternatives
28+
# provided by the testing harness.
29+
destination_project_id = override_values.get(
30+
"destination_project_id", destination_project_id
31+
)
32+
destination_dataset_id = override_values.get(
33+
"destination_dataset_id", destination_dataset_id
34+
)
35+
source_project_id = override_values.get("source_project_id", source_project_id)
36+
source_dataset_id = override_values.get("source_dataset_id", source_dataset_id)
37+
# [START bigquerydatatransfer_copy_dataset]
38+
transfer_config = bigquery_datatransfer.TransferConfig(
39+
destination_dataset_id=destination_dataset_id,
40+
display_name="Your Dataset Copy Name",
41+
data_source_id="cross_region_copy",
42+
params={
43+
"source_project_id": source_project_id,
44+
"source_dataset_id": source_dataset_id,
45+
},
46+
schedule="every 24 hours",
47+
)
48+
transfer_config = transfer_client.create_transfer_config(
49+
parent=transfer_client.common_project_path(destination_project_id),
50+
transfer_config=transfer_config,
51+
)
52+
print(f"Created transfer config: {transfer_config.name}")
53+
# [END bigquerydatatransfer_copy_dataset]
54+
return transfer_config
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
# Copyright 2020 Google LLC
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
import datetime
16+
import uuid
17+
18+
import google.api_core.exceptions
19+
import google.auth
20+
from google.cloud import bigquery
21+
from google.cloud import bigquery_datatransfer
22+
import pytest
23+
24+
from . import copy_dataset
25+
26+
27+
def temp_suffix():
28+
now = datetime.datetime.now()
29+
return f"{now.strftime('%Y%m%d%H%M%S')}_{uuid.uuid4().hex[:8]}"
30+
31+
32+
@pytest.fixture(scope="session")
33+
def default_credentials():
34+
return google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
35+
36+
37+
@pytest.fixture(scope="session")
38+
def project_id(default_credentials):
39+
_, project_id = default_credentials
40+
return project_id
41+
42+
43+
@pytest.fixture(scope="session")
44+
def bigquery_client(default_credentials):
45+
credentials, project_id = default_credentials
46+
return bigquery.Client(credentials=credentials, project=project_id)
47+
48+
49+
@pytest.fixture(scope="session")
50+
def transfer_client(default_credentials):
51+
credentials, _ = default_credentials
52+
return bigquery_datatransfer.DataTransferServiceClient(credentials=credentials)
53+
54+
55+
@pytest.fixture
56+
def to_delete_configs(transfer_client):
57+
to_delete = []
58+
yield to_delete
59+
for config_name in to_delete:
60+
try:
61+
transfer_client.delete_transfer_config(name=config_name)
62+
except google.api_core.exceptions.GoogleAPICallError:
63+
pass
64+
65+
66+
@pytest.fixture(scope="module")
67+
def destination_dataset_id(bigquery_client, project_id):
68+
dataset_id = f"bqdts_dest_{temp_suffix()}"
69+
bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
70+
yield dataset_id
71+
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
72+
73+
74+
@pytest.fixture(scope="module")
75+
def source_dataset_id(bigquery_client, project_id):
76+
dataset_id = f"bqdts_src_{temp_suffix()}"
77+
bigquery_client.create_dataset(f"{project_id}.{dataset_id}")
78+
yield dataset_id
79+
bigquery_client.delete_dataset(dataset_id, delete_contents=True)
80+
81+
82+
def test_copy_dataset(
83+
capsys, project_id, destination_dataset_id, source_dataset_id, to_delete_configs
84+
):
85+
transfer_config = copy_dataset.copy_dataset(
86+
{
87+
"destination_project_id": project_id,
88+
"destination_dataset_id": destination_dataset_id,
89+
"source_project_id": project_id,
90+
"source_dataset_id": source_dataset_id,
91+
}
92+
)
93+
to_delete_configs.append(transfer_config.name)
94+
out, _ = capsys.readouterr()
95+
assert transfer_config.name in out

bigquery-datatransfer/snippets/quickstart_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,10 @@
1616

1717
import pytest
1818

19-
import quickstart
19+
from . import quickstart
2020

2121

22-
PROJECT = os.environ['GOOGLE_CLOUD_PROJECT']
22+
PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"]
2323

2424

2525
@pytest.fixture
@@ -32,4 +32,4 @@ def mock_project_id():
3232
def test_quickstart(capsys, mock_project_id):
3333
quickstart.run_quickstart(mock_project_id)
3434
out, _ = capsys.readouterr()
35-
assert 'Supported Data Sources:' in out
35+
assert "Supported Data Sources:" in out
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1+
google-cloud-bigquery==2.6.0
12
pytest==6.0.1
23
mock==4.0.2

0 commit comments

Comments
 (0)