Skip to content

Commit f3f569e

Browse files
author
Jon Wayne Parrott
committed
Standardizing docstrings across bigquery samples.
1 parent c0da62e commit f3f569e

9 files changed

+135
-62
lines changed

bigquery/README.md

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,28 @@
22

33
This section contains samples for [Google BigQuery](https://cloud.google.com/bigquery).
44

5+
## Running the samples
6+
7+
In order to run it, your environment must be setup with [authentication
8+
information](https://developers.google.com/identity/protocols/application-default-credentials#howtheywork). If you're running it in your local development environment and you have the [Google Cloud SDK](https://cloud.google.com/sdk/) installed, you can do this easily by running:
9+
10+
$ gcloud auth login
11+
12+
## Additional resources
13+
14+
For more information on BigQuery you can visit:
15+
16+
> https://developers.google.com/bigquery
17+
18+
For more information on the BigQuery API Python library surface you
19+
can visit:
20+
21+
> https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/
22+
23+
For information on the Python Client Library visit:
24+
25+
> https://developers.google.com/api-client-library/python
26+
527
## Other Samples
628

729
* [Using BigQuery from Google App Engine](../appengine/bigquery).

bigquery/api/async_query.py

100644100755
Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
@@ -10,7 +12,16 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application to perform an asynchronous query in BigQuery.
17+
18+
This sample is used on this page:
19+
20+
https://cloud.google.com/bigquery/querying-data#asyncqueries
21+
22+
For more information, see the README.md under /bigquery.
23+
"""
24+
1425
import argparse
1526
import json
1627
import time
@@ -103,7 +114,8 @@ def main(project_id, query_string, batch, num_retries, interval):
103114
# [START main]
104115
if __name__ == '__main__':
105116
parser = argparse.ArgumentParser(
106-
description='Loads data into BigQuery.')
117+
description=__doc__,
118+
formatter_class=argparse.RawDescriptionHelpFormatter)
107119
parser.add_argument('project_id', help='Your Google Cloud project ID.')
108120
parser.add_argument('query', help='BigQuery SQL Query.')
109121
parser.add_argument(

bigquery/api/export_data_to_cloud_storage.py

100644100755
Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
1-
# Copyright 2015, Google, Inc.
1+
#!/usr/bin/env python
2+
3+
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
46
# You may obtain a copy of the License at
@@ -10,7 +12,17 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application to export a table from BigQuery to Google Cloud
17+
Storage.
18+
19+
This sample is used on this page:
20+
21+
https://cloud.google.com/bigquery/exporting-data-from-bigquery
22+
23+
For more information, see the README.md under /bigquery.
24+
"""
25+
1426
import argparse
1527
import time
1628
import uuid
@@ -113,7 +125,8 @@ def main(cloud_storage_path, project_id, dataset_id, table_id,
113125
# [START main]
114126
if __name__ == '__main__':
115127
parser = argparse.ArgumentParser(
116-
description='Exports data from BigQuery to Google Cloud Storage.')
128+
description=__doc__,
129+
formatter_class=argparse.RawDescriptionHelpFormatter)
117130
parser.add_argument('project_id', help='Your Google Cloud project ID.')
118131
parser.add_argument('dataset_id', help='BigQuery dataset to export.')
119132
parser.add_argument('table_id', help='BigQuery table to export.')

bigquery/api/getting_started.py

100644100755
Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,16 @@
1313
# See the License for the specific language governing permissions and
1414
# limitations under the License.
1515

16-
"""Sample for making BigQuery queries using the python sdk.
16+
"""Command-line application that demonstrates basic BigQuery API usage.
1717
18-
This is a command-line script that queries a public shakespeare dataset, and
19-
displays the 10 of Shakespeare's works with the greatest number of distinct
20-
words.
18+
This sample queries a public shakespeare dataset and displays the 10 of
19+
Shakespeare's works with the greatest number of distinct words.
20+
21+
This sample is used on this page:
22+
23+
https://cloud.google.com/bigquery/bigquery-api-quickstart
24+
25+
For more information, see the README.md under /bigquery.
2126
"""
2227
# [START all]
2328
import argparse
@@ -63,7 +68,8 @@ def main(project_id):
6368

6469
if __name__ == '__main__':
6570
parser = argparse.ArgumentParser(
66-
description='Queries the public BigQuery Shakespeare dataset.')
71+
description=__doc__,
72+
formatter_class=argparse.RawDescriptionHelpFormatter)
6773
parser.add_argument('project_id', help='Your Google Cloud Project ID.')
6874

6975
args = parser.parse_args()

bigquery/api/list_datasets_projects.py

100644100755
Lines changed: 15 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
# -*- coding: utf-8 -*-
1+
#!/usr/bin/env python
2+
23
# Copyright 2015, Google, Inc.
34
# Licensed under the Apache License, Version 2.0 (the "License");
45
# you may not use this file except in compliance with the License.
@@ -11,44 +12,14 @@
1112
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1213
# See the License for the specific language governing permissions and
1314
# limitations under the License.
14-
#
15-
"""Command-line skeleton application for BigQuery API.
16-
17-
This is the sample for this page:
18-
19-
https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects
20-
21-
In order to run it, your environment must be setup with authentication
22-
information [1]. If you're running it in your local development environment and
23-
you have the Google Cloud SDK [2] installed, you can do this easily by running:
24-
25-
$ gcloud auth login
2615

27-
Usage:
16+
"""Command-line application to list all projects and datasets in BigQuery.
2817
29-
$ python list_datasets_projects.py <project-id>
18+
This sample is used on this page:
3019
31-
where <project-id> is the id of the developers console [3] project you'd like
32-
to list the bigquery datasets and projects for.
33-
34-
[1] https://developers.google.com/identity/protocols/\
35-
application-default-credentials#howtheywork
36-
[2] https://cloud.google.com/sdk/
37-
[3] https://console.developers.google.com
38-
39-
For more information on the BigQuery API you can visit:
40-
41-
https://developers.google.com/bigquery/docs/overview
42-
43-
For more information on the BigQuery API Python library surface you
44-
can visit:
45-
46-
https://developers.google.com/resources/api-libraries/documentation/
47-
bigquery/v2/python/latest/
48-
49-
For information on the Python Client Library visit:
20+
https://cloud.google.com/bigquery/docs/managing_jobs_datasets_projects
5021
51-
https://developers.google.com/api-client-library/python/start/get_started
22+
For more information, see the README.md under /bigquery.
5223
"""
5324

5425
import argparse
@@ -60,9 +31,9 @@
6031

6132

6233
# [START list_datasets]
63-
def list_datasets(service, project):
34+
def list_datasets(bigquery, project):
6435
try:
65-
datasets = service.datasets()
36+
datasets = bigquery.datasets()
6637
list_reply = datasets.list(projectId=project).execute()
6738
print('Dataset list:')
6839
pprint(list_reply)
@@ -74,10 +45,10 @@ def list_datasets(service, project):
7445

7546

7647
# [START list_projects]
77-
def list_projects(service):
48+
def list_projects(bigquery):
7849
try:
7950
# Start training on a data set
80-
projects = service.projects()
51+
projects = bigquery.projects()
8152
list_reply = projects.list().execute()
8253

8354
print('Project list:')
@@ -92,14 +63,15 @@ def list_projects(service):
9263
def main(project_id):
9364
credentials = GoogleCredentials.get_application_default()
9465
# Construct the service object for interacting with the BigQuery API.
95-
service = discovery.build('bigquery', 'v2', credentials=credentials)
66+
bigquery = discovery.build('bigquery', 'v2', credentials=credentials)
9667

97-
list_datasets(service, project_id)
98-
list_projects(service)
68+
list_datasets(bigquery, project_id)
69+
list_projects(bigquery)
9970

10071
if __name__ == '__main__':
10172
parser = argparse.ArgumentParser(
102-
description='Lists BigQuery datasets and projects.')
73+
description=__doc__,
74+
formatter_class=argparse.RawDescriptionHelpFormatter)
10375
parser.add_argument('project_id', help='the project id to list.')
10476

10577
args = parser.parse_args()

bigquery/api/load_data_by_post.py

100644100755
Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
@@ -10,7 +12,16 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application that loads data into BigQuery via HTTP POST.
17+
18+
This sample is used on this page:
19+
20+
https://cloud.google.com/bigquery/loading-data-post-request
21+
22+
For more information, see the README.md under /bigquery.
23+
"""
24+
1425
import argparse
1526
import json
1627
import time
@@ -124,7 +135,8 @@ def main(project_id, dataset_id, table_name, schema_path, data_path):
124135

125136
if __name__ == '__main__':
126137
parser = argparse.ArgumentParser(
127-
description='Loads data into BigQuery.')
138+
description=__doc__,
139+
formatter_class=argparse.RawDescriptionHelpFormatter)
128140
parser.add_argument('project_id', help='Your Google Cloud project ID.')
129141
parser.add_argument('dataset_id', help='A BigQuery dataset ID.')
130142
parser.add_argument(

bigquery/api/load_data_from_csv.py

100644100755
Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
@@ -10,7 +12,17 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application that loads data into BigQuery from a CSV file in
17+
Google Cloud Storage.
18+
19+
This sample is used on this page:
20+
21+
https://cloud.google.com/bigquery/loading-data-into-bigquery#loaddatagcs
22+
23+
For more information, see the README.md under /bigquery.
24+
"""
25+
1426
import argparse
1527
import json
1628
import time
@@ -119,8 +131,8 @@ def main(project_id, dataset_id, table_name, schema_file, data_path,
119131
# [START main]
120132
if __name__ == '__main__':
121133
parser = argparse.ArgumentParser(
122-
description='Loads data into BigQuery from a CSV file in Google '
123-
'Cloud Storage.')
134+
description=__doc__,
135+
formatter_class=argparse.RawDescriptionHelpFormatter)
124136
parser.add_argument('project_id', help='Your Google Cloud project ID.')
125137
parser.add_argument('dataset_id', help='A BigQuery dataset ID.')
126138
parser.add_argument(

bigquery/api/streaming.py

100644100755
Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
@@ -10,7 +12,16 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application that streams data into BigQuery.
17+
18+
This sample is used on this page:
19+
20+
https://cloud.google.com/bigquery/streaming-data-into-bigquery
21+
22+
For more information, see the README.md under /bigquery.
23+
"""
24+
1425
import argparse
1526
import ast
1627
import json
@@ -66,7 +77,8 @@ def get_rows():
6677
# [START main]
6778
if __name__ == '__main__':
6879
parser = argparse.ArgumentParser(
69-
description='Streams data into BigQuery from the command line.')
80+
description=__doc__,
81+
formatter_class=argparse.RawDescriptionHelpFormatter)
7082
parser.add_argument('project_id', help='Your Google Cloud project ID.')
7183
parser.add_argument('dataset_id', help='A BigQuery dataset ID.')
7284
parser.add_argument(

bigquery/api/sync_query.py

100644100755
Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#!/usr/bin/env python
2+
13
# Copyright 2015, Google, Inc.
24
# Licensed under the Apache License, Version 2.0 (the "License");
35
# you may not use this file except in compliance with the License.
@@ -10,7 +12,16 @@
1012
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1113
# See the License for the specific language governing permissions and
1214
# limitations under the License.
13-
#
15+
16+
"""Command-line application to perform an synchronous query in BigQuery.
17+
18+
This sample is used on this page:
19+
20+
https://cloud.google.com/bigquery/querying-data#syncqueries
21+
22+
For more information, see the README.md under /bigquery.
23+
"""
24+
1425
import argparse
1526
import json
1627

@@ -67,7 +78,8 @@ def main(project_id, query, timeout, num_retries):
6778
# [START main]
6879
if __name__ == '__main__':
6980
parser = argparse.ArgumentParser(
70-
description='Loads data into BigQuery.')
81+
description=__doc__,
82+
formatter_class=argparse.RawDescriptionHelpFormatter)
7183
parser.add_argument('project_id', help='Your Google Cloud project ID.')
7284
parser.add_argument('query', help='BigQuery SQL Query.')
7385
parser.add_argument(

0 commit comments

Comments
 (0)