Skip to content

Commit e537c6f

Browse files
Jon Wayne Parrottdpebot
Jon Wayne Parrott
authored andcommitted
Remove cloud config fixture (#887)
* Remove cloud config fixture * Fix client secrets * Fix bigtable instance
1 parent 59a8ac5 commit e537c6f

File tree

61 files changed

+478
-369
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+478
-369
lines changed

appengine/standard/background/main_test.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,16 +12,20 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
16+
1517
from mock import patch
1618
import pytest
1719
import webtest
1820

1921
import main
2022

23+
PROJECT = os.environ['GCLOUD_PROJECT']
24+
2125

2226
@pytest.fixture
23-
def app(cloud_config, testbed):
24-
main.PROJECTID = cloud_config.project
27+
def app(testbed):
28+
main.PROJECTID = PROJECT
2529
return webtest.TestApp(main.app)
2630

2731

appengine/standard/bigquery/main_test.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
1516
import re
1617

1718
from googleapiclient.http import HttpMock
@@ -21,10 +22,12 @@
2122

2223
import main
2324

25+
PROJECT = os.environ['GCLOUD_PROJECT']
26+
2427

2528
@pytest.fixture
26-
def app(cloud_config, testbed):
27-
main.PROJECTID = cloud_config.project
29+
def app(testbed):
30+
main.PROJECTID = PROJECT
2831
return webtest.TestApp(main.app)
2932

3033

appengine/standard/storage/api-client/main_test.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,18 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
1516
import re
1617

1718
import webtest
1819

1920
import main
2021

22+
PROJECT = os.environ['GCLOUD_PROJECT']
2123

22-
def test_get(cloud_config):
23-
main.BUCKET_NAME = cloud_config.project
24+
25+
def test_get():
26+
main.BUCKET_NAME = PROJECT
2427
app = webtest.TestApp(main.app)
2528

2629
response = app.get('/')

appengine/standard/storage/appengine-client/main_test.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,17 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
16+
1517
import webtest
1618

1719
import main
1820

21+
PROJECT = os.environ['GCLOUD_PROJECT']
22+
1923

20-
def test_get(testbed, cloud_config):
21-
main.BUCKET_NAME = cloud_config.project
24+
def test_get(testbed):
25+
main.BUCKET_NAME = PROJECT
2226
app = webtest.TestApp(main.app)
2327

2428
response = app.get('/')

bigquery/api/async_query_test.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,20 @@
1212
# limitations under the License.
1313

1414
import json
15+
import os
1516

1617
from async_query import main
1718

19+
PROJECT = os.environ['GCLOUD_PROJECT']
1820

19-
def test_async_query(cloud_config, capsys):
21+
22+
def test_async_query(capsys):
2023
query = (
2124
'SELECT corpus FROM publicdata:samples.shakespeare '
2225
'GROUP BY corpus;')
2326

2427
main(
25-
project_id=cloud_config.project,
28+
project_id=PROJECT,
2629
query_string=query,
2730
batch=False,
2831
num_retries=5,
@@ -35,11 +38,11 @@ def test_async_query(cloud_config, capsys):
3538
assert json.loads(value) is not None
3639

3740

38-
def test_async_query_standard_sql(cloud_config, capsys):
41+
def test_async_query_standard_sql(capsys):
3942
query = 'SELECT [1, 2, 3] AS arr;' # Only valid in standard SQL
4043

4144
main(
42-
project_id=cloud_config.project,
45+
project_id=PROJECT,
4346
query_string=query,
4447
batch=False,
4548
num_retries=5,

bigquery/api/export_data_to_cloud_storage_test.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -11,21 +11,24 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
15+
1416
from gcp.testing.flaky import flaky
1517

1618
from export_data_to_cloud_storage import main
1719

20+
PROJECT = os.environ['GCLOUD_PROJECT']
21+
BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
1822
DATASET_ID = 'test_dataset'
1923
TABLE_ID = 'test_table'
2024

2125

2226
@flaky
23-
def test_export_table_csv(cloud_config):
24-
cloud_storage_output_uri = \
25-
'gs://{}/output.csv'.format(cloud_config.storage_bucket)
27+
def test_export_table_csv():
28+
cloud_storage_output_uri = 'gs://{}/output.csv'.format(BUCKET)
2629
main(
2730
cloud_storage_output_uri,
28-
cloud_config.project,
31+
PROJECT,
2932
DATASET_ID,
3033
TABLE_ID,
3134
num_retries=5,
@@ -34,12 +37,11 @@ def test_export_table_csv(cloud_config):
3437

3538

3639
@flaky
37-
def test_export_table_json(cloud_config):
38-
cloud_storage_output_uri = \
39-
'gs://{}/output.json'.format(cloud_config.storage_bucket)
40+
def test_export_table_json():
41+
cloud_storage_output_uri = 'gs://{}/output.json'.format(BUCKET)
4042
main(
4143
cloud_storage_output_uri,
42-
cloud_config.project,
44+
PROJECT,
4345
DATASET_ID,
4446
TABLE_ID,
4547
num_retries=5,
@@ -48,12 +50,11 @@ def test_export_table_json(cloud_config):
4850

4951

5052
@flaky
51-
def test_export_table_avro(cloud_config):
52-
cloud_storage_output_uri = \
53-
'gs://{}/output.avro'.format(cloud_config.storage_bucket)
53+
def test_export_table_avro():
54+
cloud_storage_output_uri = 'gs://{}/output.avro'.format(BUCKET)
5455
main(
5556
cloud_storage_output_uri,
56-
cloud_config.project,
57+
PROJECT,
5758
DATASET_ID,
5859
TABLE_ID,
5960
num_retries=5,

bigquery/api/getting_started_test.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,16 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
1415
import re
1516

1617
from getting_started import main
1718

19+
PROJECT = os.environ['GCLOUD_PROJECT']
1820

19-
def test_main(cloud_config, capsys):
20-
main(cloud_config.project)
21+
22+
def test_main(capsys):
23+
main(PROJECT)
2124

2225
out, _ = capsys.readouterr()
2326

bigquery/api/installed_app_test.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,20 +11,24 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
1415
import re
1516

1617
from oauth2client.client import GoogleCredentials
1718

1819
import installed_app
1920

21+
PROJECT = os.environ['GCLOUD_PROJECT']
22+
CLIENT_SECRETS = os.environ['GOOGLE_CLIENT_SECRETS']
23+
2024

2125
class Namespace(object):
2226
def __init__(self, **kwargs):
2327
self.__dict__.update(kwargs)
2428

2529

26-
def test_main(cloud_config, monkeypatch, capsys):
27-
installed_app.CLIENT_SECRETS = cloud_config.client_secrets
30+
def test_main(monkeypatch, capsys):
31+
installed_app.CLIENT_SECRETS = CLIENT_SECRETS
2832

2933
# Replace the user credentials flow with Application Default Credentials.
3034
# Unfortunately, there's no easy way to fully test the user flow.
@@ -34,7 +38,7 @@ def mock_run_flow(flow, storage, args):
3438
monkeypatch.setattr(installed_app.tools, 'run_flow', mock_run_flow)
3539

3640
args = Namespace(
37-
project_id=cloud_config.project,
41+
project_id=PROJECT,
3842
logging_level='INFO',
3943
noauth_local_webserver=True)
4044

bigquery/api/list_datasets_projects_test.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,16 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
1415
import re
1516

1617
from list_datasets_projects import main
1718

19+
PROJECT = os.environ['GCLOUD_PROJECT']
1820

19-
def test_main(cloud_config, capsys):
20-
main(cloud_config.project)
21+
22+
def test_main(capsys):
23+
main(PROJECT)
2124

2225
out, _ = capsys.readouterr()
2326

bigquery/api/load_data_by_post_test.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,25 +11,27 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
1415
import re
1516

1617
from gcp.testing.flaky import flaky
1718

1819
from load_data_by_post import load_data
1920

21+
PROJECT = os.environ['GCLOUD_PROJECT']
2022
DATASET_ID = 'ephemeral_test_dataset'
2123
TABLE_ID = 'load_data_by_post'
2224

2325

2426
@flaky
25-
def test_load_csv_data(cloud_config, resource, capsys):
27+
def test_load_csv_data(resource, capsys):
2628
schema_path = resource('schema.json')
2729
data_path = resource('data.csv')
2830

2931
load_data(
3032
schema_path,
3133
data_path,
32-
cloud_config.project,
34+
PROJECT,
3335
DATASET_ID,
3436
TABLE_ID
3537
)
@@ -41,14 +43,14 @@ def test_load_csv_data(cloud_config, resource, capsys):
4143

4244

4345
@flaky
44-
def test_load_json_data(cloud_config, resource, capsys):
46+
def test_load_json_data(resource, capsys):
4547
schema_path = resource('schema.json')
4648
data_path = resource('data.json')
4749

4850
load_data(
4951
schema_path,
5052
data_path,
51-
cloud_config.project,
53+
PROJECT,
5254
DATASET_ID,
5355
TABLE_ID
5456
)

bigquery/api/load_data_from_csv_test.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,22 +11,25 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313

14+
import os
15+
1416
from gcp.testing.flaky import flaky
1517

1618
from load_data_from_csv import main
1719

20+
PROJECT = os.environ['GCLOUD_PROJECT']
21+
BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
1822
DATASET_ID = 'test_dataset'
1923
TABLE_ID = 'test_import_table'
2024

2125

2226
@flaky
23-
def test_load_table(cloud_config, resource):
24-
cloud_storage_input_uri = 'gs://{}/data.csv'.format(
25-
cloud_config.storage_bucket)
27+
def test_load_table(resource):
28+
cloud_storage_input_uri = 'gs://{}/data.csv'.format(BUCKET)
2629
schema_file = resource('schema.json')
2730

2831
main(
29-
cloud_config.project,
32+
PROJECT,
3033
DATASET_ID,
3134
TABLE_ID,
3235
schema_file=schema_file,

bigquery/api/streaming_test.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,22 +12,23 @@
1212
# limitations under the License.
1313

1414
import json
15+
import os
1516

1617
import streaming
1718

18-
19+
PROJECT = os.environ['GCLOUD_PROJECT']
1920
DATASET_ID = 'test_dataset'
2021
TABLE_ID = 'test_table'
2122

2223

23-
def test_stream_row_to_bigquery(cloud_config, resource, capsys):
24+
def test_stream_row_to_bigquery(resource, capsys):
2425
with open(resource('streamrows.json'), 'r') as rows_file:
2526
rows = json.load(rows_file)
2627

2728
streaming.get_rows = lambda: rows
2829

2930
streaming.main(
30-
cloud_config.project,
31+
PROJECT,
3132
DATASET_ID,
3233
TABLE_ID,
3334
num_retries=5)

bigquery/api/sync_query_test.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,20 @@
1212
# limitations under the License.
1313

1414
import json
15+
import os
1516

1617
from sync_query import main
1718

19+
PROJECT = os.environ['GCLOUD_PROJECT']
1820

19-
def test_sync_query(cloud_config, capsys):
21+
22+
def test_sync_query(capsys):
2023
query = (
2124
'SELECT corpus FROM publicdata:samples.shakespeare '
2225
'GROUP BY corpus;')
2326

2427
main(
25-
project_id=cloud_config.project,
28+
project_id=PROJECT,
2629
query=query,
2730
timeout=30,
2831
num_retries=5,
@@ -34,11 +37,11 @@ def test_sync_query(cloud_config, capsys):
3437
assert json.loads(result) is not None
3538

3639

37-
def test_sync_query_standard_sql(cloud_config, capsys):
40+
def test_sync_query_standard_sql(capsys):
3841
query = 'SELECT [1, 2, 3] AS arr;' # Only valid in standard SQL
3942

4043
main(
41-
project_id=cloud_config.project,
44+
project_id=PROJECT,
4245
query=query,
4346
timeout=30,
4447
num_retries=5,

0 commit comments

Comments
 (0)