diff --git a/dataproc/create_cluster.py b/dataproc/create_cluster.py index a396ddc6c85..b4d63d2e13f 100644 --- a/dataproc/create_cluster.py +++ b/dataproc/create_cluster.py @@ -16,6 +16,12 @@ # This sample walks a user through creating a Cloud Dataproc cluster using # the Python client library. +# +# This script can be run on its own: +# python create_cluster.py ${PROJECT_ID} ${REGION} ${CLUSTER_NAME} + + +import sys # [START dataproc_create_cluster] from google.cloud import dataproc_v1 as dataproc @@ -33,7 +39,7 @@ def create_cluster(project_id, region, cluster_name): # Create a client with the endpoint set to the desired cluster region. cluster_client = dataproc.ClusterControllerClient(client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region) + 'api_endpoint': f'{region}-dataproc.googleapis.com:443', }) # Create the cluster config. @@ -57,5 +63,15 @@ def create_cluster(project_id, region, cluster_name): result = operation.result() # Output a success message. - print('Cluster created successfully: {}'.format(result.cluster_name)) + print(f'Cluster created successfully: {result.cluster_name}') # [END dataproc_create_cluster] + + +if __name__ == "__main__": + if len(sys.argv) < 4: + sys.exit('python create_cluster.py project_id region cluster_name') + + project_id = sys.argv[1] + region = sys.argv[2] + cluster_name = sys.argv[3] + create_cluster(project_id, region, cluster_name) diff --git a/dataproc/create_cluster_test.py b/dataproc/create_cluster_test.py index 04274579273..72ffce2bf06 100644 --- a/dataproc/create_cluster_test.py +++ b/dataproc/create_cluster_test.py @@ -31,7 +31,7 @@ def teardown(): yield cluster_client = dataproc.ClusterControllerClient(client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(REGION) + 'api_endpoint': f'{REGION}-dataproc.googleapis.com:443' }) # Client library function operation = cluster_client.delete_cluster(PROJECT_ID, REGION, CLUSTER_NAME) diff --git a/dataproc/instantiate_inline_workflow_template.py b/dataproc/instantiate_inline_workflow_template.py index d492506bc72..f9358376f9f 100644 --- a/dataproc/instantiate_inline_workflow_template.py +++ b/dataproc/instantiate_inline_workflow_template.py @@ -16,9 +16,11 @@ # workflow for Cloud Dataproc using the Python client library. # # This script can be run on its own: -# python workflows.py ${PROJECT_ID} ${REGION} +# python instantiate_inline_workflow_template.py ${PROJECT_ID} ${REGION} + import sys + # [START dataproc_instantiate_inline_workflow_template] from google.cloud import dataproc_v1 as dataproc @@ -35,7 +37,8 @@ def instantiate_inline_workflow_template(project_id, region): # Create a client with the endpoint set to the desired region. workflow_template_client = dataproc.WorkflowTemplateServiceClient( client_options={ - 'api_endpoint': '{}-dataproc.googleapis.com:443'.format(region)} + 'api_endpoint': f'{region}-dataproc.googleapis.com:443' + } ) parent = workflow_template_client.region_path(project_id, region) @@ -91,8 +94,14 @@ def instantiate_inline_workflow_template(project_id, region): # Output a success message. print('Workflow ran successfully.') -# [END dataproc_instantiate_inline_workflow_template] + # [END dataproc_instantiate_inline_workflow_template] if __name__ == "__main__": - instantiate_inline_workflow_template(sys.argv[1], sys.argv[2]) + if len(sys.argv) < 3: + sys.exit('python instantiate_inline_workflow_template.py ' + + 'project_id region') + + project_id = sys.argv[1] + region = sys.argv[2] + instantiate_inline_workflow_template(project_id, region) diff --git a/dataproc/quickstart/quickstart_test.py b/dataproc/quickstart/quickstart_test.py index 5361449ff35..a38019d9b18 100644 --- a/dataproc/quickstart/quickstart_test.py +++ b/dataproc/quickstart/quickstart_test.py @@ -15,11 +15,12 @@ import os import uuid import pytest -import subprocess from google.cloud import dataproc_v1 as dataproc from google.cloud import storage +import quickstart + PROJECT_ID = os.environ['GCLOUD_PROJECT'] REGION = 'us-central1' @@ -60,15 +61,9 @@ def setup_teardown(): bucket.delete() -def test_quickstart(): - command = [ - 'python', 'quickstart/quickstart.py', - '--project_id', PROJECT_ID, - '--region', REGION, - '--cluster_name', CLUSTER_NAME, - '--job_file_path', JOB_FILE_PATH - ] - out = subprocess.check_output(command).decode("utf-8") +def test_quickstart(capsys): + quickstart.quickstart(PROJECT_ID, REGION, CLUSTER_NAME, JOB_FILE_PATH) + out, _ = capsys.readouterr() assert 'Cluster created successfully' in out assert 'Submitted job' in out