Skip to content

Commit 2d363d5

Browse files
author
Jonathan Wayne Parrott
committed
Merge pull request #74 from GoogleCloudPlatform/py3-tests
Enabling tests for python 3, fixing all python 3 incompatibilities.
2 parents 25d0540 + 79ed631 commit 2d363d5

17 files changed

+257
-184
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,4 @@ coverage-gae.json
66
coverage.xml
77
nosetests.xml
88
python-docs-samples.json
9+
__pycache__

README.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ For more detailed introduction to a product, check the README in the correspondi
1313

1414
## Testing
1515

16+
The tests in this repository run against live services, therefore, it takes a bit
17+
of configuration to run all of the tests locally.
18+
1619
### Local setup
1720

1821
Before you can run tests locally you must have:
@@ -26,7 +29,9 @@ Before you can run tests locally you must have:
2629
$ curl https://sdk.cloud.google.com | bash
2730

2831
* Most tests require you to have an active, billing-enabled project on the [Google Developers Console](https://console.developers.google.com).
32+
2933
* You will need a set of [Service Account Credentials](https://console.developers.google.com/project/_/apiui/credential) for your project in ``json`` form.
34+
3035
* Set the environment variables appropriately for your project.
3136

3237
$ export GOOGLE_APPLICATION_CREDENTIALS=your-service-account-json-file
@@ -43,6 +48,17 @@ If you want to run the Google App Engine tests, you will need:
4348

4449
$ export GAE_PYTHONPATH=~/google-cloud-sdk/platform/google_appengine
4550

51+
To run the bigquery tests, you'll need to create a bigquery dataset:
52+
53+
* Create a dataset in your project named `test_dataset`.
54+
* Create a table named `test_table2`, upload ``tests/resources/data.csv`` and give it the following schema:
55+
56+
Name STRING
57+
Age INTEGER
58+
Weight FLOAT
59+
IsMagic BOOLEAN
60+
61+
4662
### Test environments
4763

4864
We use [tox](https://tox.readthedocs.org/en/latest/) to configure multiple python environments:

bigquery/samples/async_query.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,13 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313
#
14-
from __future__ import print_function # For python 2/3 interoperability
15-
1614
import json
1715
import uuid
1816

1917
from bigquery.samples.utils import get_service
2018
from bigquery.samples.utils import paging
2119
from bigquery.samples.utils import poll_job
20+
from six.moves import input
2221

2322

2423
# [START async_query]
@@ -70,13 +69,13 @@ def run(project_id, query_string, batch, num_retries, interval):
7069

7170
# [START main]
7271
def main():
73-
project_id = raw_input("Enter the project ID: ")
74-
query_string = raw_input("Enter the Bigquery SQL Query: ")
75-
batch = raw_input("Run query as batch (y/n)?: ") in (
72+
project_id = input("Enter the project ID: ")
73+
query_string = input("Enter the Bigquery SQL Query: ")
74+
batch = input("Run query as batch (y/n)?: ") in (
7675
'True', 'true', 'y', 'Y', 'yes', 'Yes')
77-
num_retries = int(raw_input(
76+
num_retries = int(input(
7877
"Enter number of times to retry in case of 500 error: "))
79-
interval = raw_input(
78+
interval = input(
8079
"Enter how often to poll the query for completion (seconds): ")
8180

8281
for result in run(project_id, query_string, batch, num_retries, interval):

bigquery/samples/export_data_to_cloud_storage.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
from bigquery.samples.utils import get_service
1717
from bigquery.samples.utils import poll_job
18+
from six.moves import input
1819

1920

2021
# [START export_table]
@@ -82,19 +83,19 @@ def run(cloud_storage_path,
8283

8384
# [START main]
8485
def main():
85-
projectId = raw_input("Enter the project ID: ")
86-
datasetId = raw_input("Enter a dataset ID: ")
87-
tableId = raw_input("Enter a table name to copy: ")
88-
cloud_storage_path = raw_input(
86+
projectId = input("Enter the project ID: ")
87+
datasetId = input("Enter a dataset ID: ")
88+
tableId = input("Enter a table name to copy: ")
89+
cloud_storage_path = input(
8990
"Enter a Google Cloud Storage URI: ")
90-
interval = raw_input(
91+
interval = input(
9192
"Enter how often to poll the job (in seconds): ")
92-
num_retries = raw_input(
93+
num_retries = input(
9394
"Enter the number of retries in case of 500 error: ")
9495

9596
run(cloud_storage_path,
9697
projectId, datasetId, tableId,
9798
num_retries, interval)
9899

99-
print 'Done exporting!'
100+
print('Done exporting!')
100101
# [END main]

bigquery/samples/list_datasets_projects.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,19 +31,18 @@
3131
where <project-id> is the id of the developers console [3] project you'd like
3232
to list the bigquery datasets and projects for.
3333
34-
[1] https://developers.google.com/identity/protocols/application-default-credentials#howtheywork
34+
[1] https://developers.google.com/identity/protocols/\
35+
application-default-credentials#howtheywork
3536
[2] https://cloud.google.com/sdk/
3637
[3] https://console.developers.google.com
3738
""" # NOQA
3839

3940
import argparse
4041
from pprint import pprint
4142

42-
from urllib2 import HTTPError
43-
4443
from apiclient import discovery
45-
4644
from oauth2client.client import GoogleCredentials
45+
from six.moves.urllib.error import HTTPError
4746

4847

4948
# [START list_datasets]

bigquery/samples/load_data_by_post.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -14,10 +14,9 @@
1414
import json
1515

1616
from bigquery.samples.utils import get_service, poll_job
17-
1817
import httplib2
19-
2018
from oauth2client.client import GoogleCredentials
19+
from six.moves import input
2120

2221

2322
# [START make_post]
@@ -75,16 +74,16 @@ def make_post(http, schema, data, projectId, datasetId, tableId):
7574
def main():
7675
credentials = GoogleCredentials.get_application_default()
7776
http = credentials.authorize(httplib2.Http())
78-
projectId = raw_input('Enter the project ID: ')
79-
datasetId = raw_input('Enter a dataset ID: ')
80-
tableId = raw_input('Enter a table name to load the data to: ')
81-
schema_path = raw_input(
77+
projectId = input('Enter the project ID: ')
78+
datasetId = input('Enter a dataset ID: ')
79+
tableId = input('Enter a table name to load the data to: ')
80+
schema_path = input(
8281
'Enter the path to the schema file for the table: ')
8382

8483
with open(schema_path, 'r') as schema_file:
8584
schema = schema_file.read()
8685

87-
data_path = raw_input('Enter the path to the data file: ')
86+
data_path = input('Enter the path to the data file: ')
8887

8988
with open(data_path, 'r') as data_file:
9089
data = data_file.read()

bigquery/samples/load_data_from_csv.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
import uuid
1616

1717
from bigquery.samples.utils import get_service, poll_job
18+
from six.moves import input
1819

1920

2021
# [START load_table]
@@ -81,20 +82,20 @@ def run(source_schema, source_csv,
8182

8283
# [START main]
8384
def main():
84-
projectId = raw_input("Enter the project ID: ")
85-
datasetId = raw_input("Enter a dataset ID: ")
86-
tableId = raw_input("Enter a destination table name: ")
85+
projectId = input("Enter the project ID: ")
86+
datasetId = input("Enter a dataset ID: ")
87+
tableId = input("Enter a destination table name: ")
8788

88-
schema_file_path = raw_input(
89+
schema_file_path = input(
8990
"Enter the path to the table schema: ")
9091
with open(schema_file_path, 'r') as schema_file:
9192
schema = json.load(schema_file)
9293

93-
data_file_path = raw_input(
94+
data_file_path = input(
9495
"Enter the Cloud Storage path for the CSV file: ")
95-
num_retries = raw_input(
96+
num_retries = input(
9697
"Enter number of times to retry in case of 500 error: ")
97-
interval = raw_input(
98+
interval = input(
9899
"Enter how often to poll the query for completion (seconds): ")
99100
run(schema,
100101
data_file_path,

bigquery/samples/streaming.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,12 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313
#
14-
from __future__ import print_function
15-
1614
import ast
1715
import json
1816
import uuid
1917

2018
from bigquery.samples.utils import get_service
19+
from six.moves import input
2120

2221

2322
# [START stream_row_to_bigquery]
@@ -57,18 +56,18 @@ def run(project_id, dataset_id, table_id, rows, num_retries):
5756

5857
# [START main]
5958
def get_rows():
60-
line = raw_input("Enter a row (python dict) into the table: ")
59+
line = input("Enter a row (python dict) into the table: ")
6160
while line:
6261
yield ast.literal_eval(line)
63-
line = raw_input("Enter another row into the table \n" +
64-
"[hit enter to stop]: ")
62+
line = input("Enter another row into the table \n" +
63+
"[hit enter to stop]: ")
6564

6665

6766
def main():
68-
project_id = raw_input("Enter the project ID: ")
69-
dataset_id = raw_input("Enter a dataset ID: ")
70-
table_id = raw_input("Enter a table ID : ")
71-
num_retries = int(raw_input(
67+
project_id = input("Enter the project ID: ")
68+
dataset_id = input("Enter a dataset ID: ")
69+
table_id = input("Enter a table ID : ")
70+
num_retries = int(input(
7271
"Enter number of times to retry in case of 500 error: "))
7372

7473
for result in run(project_id, dataset_id, table_id,

bigquery/samples/sync_query.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,10 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313
#
14-
from __future__ import print_function # For python 2/3 interoperability
15-
1614
import json
1715

1816
from bigquery.samples.utils import get_service, paging
17+
from six.moves import input
1918

2019

2120
# [START sync_query]
@@ -49,12 +48,12 @@ def run(project_id, query, timeout, num_retries):
4948

5049
# [START main]
5150
def main():
52-
project_id = raw_input("Enter the project ID: ")
53-
query_string = raw_input("Enter the Bigquery SQL Query: ")
54-
timeout = raw_input(
51+
project_id = input("Enter the project ID: ")
52+
query_string = input("Enter the Bigquery SQL Query: ")
53+
timeout = input(
5554
"Enter how long to wait for the query to complete in milliseconds"
5655
"\n (if longer than 10 seconds, use an asynchronous query): ")
57-
num_retries = int(raw_input(
56+
num_retries = int(input(
5857
"Enter how many times to retry in case of server error"))
5958

6059
for result in run(project_id, query_string, timeout, num_retries):

bigquery/tests/test_async_query.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,9 @@ def test_async_query_runner(self):
3737
test_project_id = os.environ.get(tests.PROJECT_ID_ENV)
3838
answers = [test_project_id, self.constants['query'], 'n',
3939
'1', '1']
40-
with tests.mock_raw_input(answers):
40+
41+
with tests.mock_input_answers(
42+
answers, target='bigquery.samples.async_query.input'):
4143
main()
4244

4345

compute/autoscaler/demo/frontend.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,15 @@
2222
autoscaler/demo/tests/test_frontend.py
2323
"""
2424

25-
import BaseHTTPServer
25+
try:
26+
import BaseHTTPServer
27+
import SocketServer
28+
except:
29+
import http.server as BaseHTTPServer
30+
import socketserver as SocketServer
31+
2632
from multiprocessing import Process
2733
import os
28-
import SocketServer
2934
import sys
3035
import time
3136

monitoring/samples/auth.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,8 @@
3535
3636
$ export GOOGLE_APPLICATION_CREDENTIALS=/path/to/json-key.json
3737
38-
[1] https://developers.google.com/identity/protocols/application-default-credentials
38+
[1] https://developers.google.com/identity/protocols/\
39+
application-default-credentials
3940
[2] https://console.developers.google.com/project/_/apiui/credential
4041
""" # NOQA
4142

@@ -62,19 +63,19 @@ def ListTimeseries(project_name, service):
6263

6364
timeseries = service.timeseries()
6465

65-
print 'Timeseries.list raw response:'
66+
print('Timeseries.list raw response:')
6667
try:
6768
response = timeseries.list(
6869
project=project_name, metric=METRIC, youngest=YOUNGEST).execute()
6970

70-
print json.dumps(response,
71+
print(json.dumps(response,
7172
sort_keys=True,
7273
indent=4,
73-
separators=(',', ': '))
74+
separators=(',', ': ')))
7475
except:
75-
print 'Error:'
76+
print('Error:')
7677
for error in sys.exc_info():
77-
print error
78+
print(error)
7879

7980

8081
def main(project_name):
@@ -87,7 +88,7 @@ def main(project_name):
8788

8889
if __name__ == '__main__':
8990
if len(sys.argv) != 2:
90-
print "Usage: %s <project-name>" % sys.argv[0]
91+
print("Usage: {} <project-name>".format(sys.argv[0]))
9192
sys.exit(1)
9293
main(sys.argv[1])
9394
# [END all]

storage/compose_objects.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def main(argv):
7373
name=filename,
7474
bucket=args.bucket)
7575
resp = req.execute()
76-
print '> Uploaded source file %s' % filename
77-
print json.dumps(resp, indent=2)
76+
print('> Uploaded source file {}'.format(filename))
77+
print(json.dumps(resp, indent=2))
7878

7979
# Construct a request to compose the source files into the destination.
8080
compose_req_body = {
@@ -88,17 +88,17 @@ def main(argv):
8888
destinationObject=args.destination,
8989
body=compose_req_body)
9090
resp = req.execute()
91-
print '> Composed files into %s' % args.destination
92-
print json.dumps(resp, indent=2)
91+
print('> Composed files into {}'.format(args.destination))
92+
print(json.dumps(resp, indent=2))
9393

9494
# Download and print the composed object.
9595
req = service.objects().get_media(
9696
bucket=args.bucket,
9797
object=args.destination)
9898

9999
res = req.execute()
100-
print '> Composed file contents:'
101-
print res
100+
print('> Composed file contents:')
101+
print(res)
102102

103103

104104
if __name__ == '__main__':

0 commit comments

Comments
 (0)