Skip to content

Commit de307f3

Browse files
tetiana-karasovapartheagcf-owl-bot[bot]
authored andcommitted
test: fix for product import tests (#156)
* feat: product_number variable is replaced with product_id Import product code samples are added * use project_number in search_simple_query.py * use project_number in search_with_boost_spec.py * use project_number in search_with_filtering.py * use project number in search_with_ordering.py * use project_number in search_with_pagination.py * use project_number in search_with_query_expansion_spec.py * use project_number in search_with_facet_spec.py * use correct path to resources * revert change to paths * resolve error where bq table doesn't exist * use google.cloud.bigquery client * fix for failed tests * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * move samples/interactive-tutorials/product/setup to move samples/interactive-tutorials/product/setup_product * allow import_products_bq_test to run concurrently * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add dependency for test * remove tests for setup script Co-authored-by: Anthonios Partheniou <[email protected]> Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 19a3fe3 commit de307f3

14 files changed

+150
-177
lines changed

generated_samples/interactive-tutorials/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ The bucket name must be unique. For convenience, you can name it `<YOUR_PROJECT_
124124
1. To create the bucket and upload the JSON file, run the following command in the Terminal:
125125

126126
```bash
127-
python product/setup/create_gcs_bucket.py
127+
python product/setup_product/create_gcs_bucket.py
128128
```
129129

130130
Now you can see the bucket is created in the [Cloud Storage](https://console.cloud.google.com/storage/browser), and the files are uploaded.
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright 2021 Google Inc. All Rights Reserved.
1+
# Copyright 2022 Google Inc. All Rights Reserved.
22
#
33
# Licensed under the Apache License, Version 2.0 (the "License");
44
# you may not use this file except in compliance with the License.
@@ -12,14 +12,14 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
import os
15+
import pytest
16+
import test_utils.prefixer
1617

17-
from setup_cleanup import delete_bucket
18+
prefixer = test_utils.prefixer.Prefixer(
19+
"python-retail", "samples/interactive-tutorials/product"
20+
)
1821

1922

20-
def delete_bucket_by_name(name: str):
21-
if name is None:
22-
bucket_name = os.environ["BUCKET_NAME"]
23-
delete_bucket(bucket_name)
24-
else:
25-
delete_bucket(name)
23+
@pytest.fixture(scope="session")
24+
def table_id_prefix() -> str:
25+
return prefixer.create_prefix()

generated_samples/interactive-tutorials/product/import_products_big_query_table.py

Lines changed: 78 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -12,90 +12,104 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
# [START retail_import_products_from_big_query]
16-
# Import products into a catalog from big query table using Retail API
17-
#
15+
import argparse
1816
import os
19-
import time
20-
21-
from google.cloud.retail import (
22-
BigQuerySource,
23-
ImportProductsRequest,
24-
ProductInputConfig,
25-
ProductServiceClient,
26-
)
2717

28-
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
2918
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
3019

31-
default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
32-
dataset_id = "products"
33-
table_id = "products"
3420

21+
def main(project_id, dataset_id, table_id):
22+
# [START retail_import_products_from_big_query]
23+
# TODO: Set project_id to your Google Cloud Platform project ID.
24+
# project_id = "my-project"
3525

36-
# TO CHECK ERROR HANDLING USE THE TABLE WITH INVALID PRODUCTS:
37-
# table_id = "products_some_invalid"
26+
# TODO: Set dataset_id
27+
# dataset_id = "products"
3828

29+
# TODO: Set dataset_id
30+
# table_id = "products"
3931

40-
# get import products from big query request
41-
def get_import_products_big_query_request(reconciliation_mode):
42-
# TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE:
43-
# default_catalog = "invalid_catalog_name"
44-
big_query_source = BigQuerySource()
45-
big_query_source.project_id = project_id
46-
big_query_source.dataset_id = dataset_id
47-
big_query_source.table_id = table_id
48-
big_query_source.data_schema = "product"
32+
# Import products into a catalog from big query table using Retail API
33+
import time
4934

50-
input_config = ProductInputConfig()
51-
input_config.big_query_source = big_query_source
35+
from google.cloud.retail import (
36+
BigQuerySource,
37+
ImportProductsRequest,
38+
ProductInputConfig,
39+
ProductServiceClient,
40+
)
5241

53-
import_request = ImportProductsRequest()
54-
import_request.parent = default_catalog
55-
import_request.reconciliation_mode = reconciliation_mode
56-
import_request.input_config = input_config
42+
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"
5743

58-
print("---import products from big query table request---")
59-
print(import_request)
44+
# TO CHECK ERROR HANDLING USE THE TABLE WITH INVALID PRODUCTS:
45+
# table_id = "products_some_invalid"
6046

61-
return import_request
47+
# get import products from big query request
48+
def get_import_products_big_query_request(reconciliation_mode):
49+
# TO CHECK ERROR HANDLING PASTE THE INVALID CATALOG NAME HERE:
50+
# default_catalog = "invalid_catalog_name"
51+
big_query_source = BigQuerySource()
52+
big_query_source.project_id = project_id
53+
big_query_source.dataset_id = dataset_id
54+
big_query_source.table_id = table_id
55+
big_query_source.data_schema = "product"
6256

57+
input_config = ProductInputConfig()
58+
input_config.big_query_source = big_query_source
6359

64-
# call the Retail API to import products
65-
def import_products_from_big_query():
66-
# TRY THE FULL RECONCILIATION MODE HERE:
67-
reconciliation_mode = ImportProductsRequest.ReconciliationMode.INCREMENTAL
60+
import_request = ImportProductsRequest()
61+
import_request.parent = default_catalog
62+
import_request.reconciliation_mode = reconciliation_mode
63+
import_request.input_config = input_config
6864

69-
import_big_query_request = get_import_products_big_query_request(
70-
reconciliation_mode
71-
)
72-
big_query_operation = ProductServiceClient().import_products(
73-
import_big_query_request
74-
)
65+
print("---import products from big query table request---")
66+
print(import_request)
7567

76-
print("---the operation was started:----")
77-
print(big_query_operation.operation.name)
68+
return import_request
7869

79-
while not big_query_operation.done():
80-
print("---please wait till operation is done---")
81-
time.sleep(30)
82-
print("---import products operation is done---")
70+
# call the Retail API to import products
71+
def import_products_from_big_query():
72+
# TRY THE FULL RECONCILIATION MODE HERE:
73+
reconciliation_mode = ImportProductsRequest.ReconciliationMode.INCREMENTAL
8374

84-
if big_query_operation.metadata is not None:
85-
print("---number of successfully imported products---")
86-
print(big_query_operation.metadata.success_count)
87-
print("---number of failures during the importing---")
88-
print(big_query_operation.metadata.failure_count)
89-
else:
90-
print("---operation.metadata is empty---")
75+
import_big_query_request = get_import_products_big_query_request(
76+
reconciliation_mode
77+
)
78+
big_query_operation = ProductServiceClient().import_products(
79+
import_big_query_request
80+
)
9181

92-
if big_query_operation.result is not None:
93-
print("---operation result:---")
94-
print(big_query_operation.result())
95-
else:
96-
print("---operation.result is empty---")
82+
print("---the operation was started:----")
83+
print(big_query_operation.operation.name)
9784

85+
while not big_query_operation.done():
86+
print("---please wait till operation is done---")
87+
time.sleep(30)
88+
print("---import products operation is done---")
89+
90+
if big_query_operation.metadata is not None:
91+
print("---number of successfully imported products---")
92+
print(big_query_operation.metadata.success_count)
93+
print("---number of failures during the importing---")
94+
print(big_query_operation.metadata.failure_count)
95+
else:
96+
print("---operation.metadata is empty---")
97+
98+
if big_query_operation.result is not None:
99+
print("---operation result:---")
100+
print(big_query_operation.result())
101+
else:
102+
print("---operation.result is empty---")
103+
104+
import_products_from_big_query()
98105

99-
import_products_from_big_query()
100106

101107
# [END retail_import_products_from_big_query]
108+
109+
110+
if __name__ == "__main__":
111+
parser = argparse.ArgumentParser()
112+
parser.add_argument("dataset_id")
113+
parser.add_argument("table_id")
114+
args = parser.parse_args()
115+
main(project_id, args.dataset_id, args.table_id)

generated_samples/interactive-tutorials/product/import_products_bq_test.py

Lines changed: 27 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,41 @@
1515
import re
1616
import subprocess
1717

18+
from setup_product.setup_cleanup import (
19+
create_bq_dataset,
20+
create_bq_table,
21+
delete_bq_table,
22+
upload_data_to_bq_table,
23+
)
24+
25+
26+
def test_import_products_bq(table_id_prefix):
27+
dataset = "products"
28+
valid_products_table = f"{table_id_prefix}products"
29+
product_schema = "../resources/product_schema.json"
30+
valid_products_source_file = "../resources/products.json"
31+
32+
create_bq_dataset(dataset)
33+
create_bq_table(dataset, valid_products_table, product_schema)
34+
upload_data_to_bq_table(
35+
dataset, valid_products_table, valid_products_source_file, product_schema
36+
)
1837

19-
def test_import_products_bq():
2038
output = str(
21-
subprocess.check_output("python import_products_big_query_table.py", shell=True)
39+
subprocess.check_output(
40+
f"python import_products_big_query_table.py {dataset} {valid_products_table}",
41+
shell=True,
42+
)
2243
)
2344

45+
delete_bq_table(dataset, valid_products_table)
46+
2447
assert re.match(".*import products from big query table request.*", output)
2548
assert re.match(".*the operation was started.*", output)
2649
assert re.match(
2750
".*projects/.*/locations/global/catalogs/default_catalog/branches/0/operations/import-products.*",
2851
output,
2952
)
3053

31-
assert re.match(".*number of successfully imported products.*316.*", output)
54+
assert re.match(".*number of successfully imported products.*?316.*", output)
55+
assert re.match(".*number of failures during the importing.*?0.*", output)

generated_samples/interactive-tutorials/product/import_products_gcs.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,12 +28,11 @@
2828

2929

3030
# Read the project number from the environment variable
31-
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
3231
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
3332
bucket_name = os.environ["BUCKET_NAME"]
3433

3534
# You can change the branch here. The "default_branch" is set to point to the branch "0"
36-
default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
35+
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"
3736

3837
gcs_bucket = f"gs://{bucket_name}"
3938
gcs_errors_bucket = f"{gcs_bucket}/error"

generated_samples/interactive-tutorials/product/import_products_gcs_test.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,13 +12,22 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import os
1516
import re
1617
import subprocess
1718

19+
from setup_product.setup_cleanup import create_bucket, delete_bucket, upload_blob
20+
1821

1922
def test_import_products_gcs():
23+
bucket_name = os.environ["BUCKET_NAME"]
24+
create_bucket(bucket_name)
25+
upload_blob(bucket_name, "../resources/products.json")
26+
2027
output = str(subprocess.check_output("python import_products_gcs.py", shell=True))
2128

29+
delete_bucket(bucket_name)
30+
2231
assert re.match(".*import products from google cloud source request.*", output)
2332
assert re.match('.*input_uris: "gs://.*/products.json".*', output)
2433
assert re.match(".*the operation was started.*", output)
@@ -27,4 +36,5 @@ def test_import_products_gcs():
2736
output,
2837
)
2938

30-
assert re.match(".*number of successfully imported products.*316.*", output)
39+
assert re.match(".*number of successfully imported products.*?316.*", output)
40+
assert re.match(".*number of failures during the importing.*?0.*", output)

generated_samples/interactive-tutorials/product/import_products_inline_source.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@
3333
from google.protobuf.field_mask_pb2 import FieldMask
3434

3535
# Read the project number from the environment variable
36-
project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"]
36+
project_id = os.environ["GOOGLE_CLOUD_PROJECT"]
3737

38-
default_catalog = f"projects/{project_number}/locations/global/catalogs/default_catalog/branches/default_branch"
38+
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog/branches/default_branch"
3939

4040

4141
# prepare product to import as inline source

generated_samples/interactive-tutorials/product/import_products_inline_test.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,4 +28,5 @@ def test_import_products_gcs():
2828
output,
2929
)
3030

31-
assert re.match(".*number of successfully imported products.*2.*", output)
31+
assert re.match(".*number of successfully imported products.*?2.*", output)
32+
assert re.match(".*number of failures during the importing.*?0.*", output)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
11
pytest==6.2.5
22
pytest-xdist==2.5.0
3+
google-cloud-testutils==1.3.1

generated_samples/interactive-tutorials/product/setup/products_create_bigquery_table_test.py

Lines changed: 0 additions & 45 deletions
This file was deleted.

0 commit comments

Comments
 (0)