Skip to content

Commit ec71b23

Browse files
munkhuushmgldandhlee
authored andcommitted
chore: removed unused and duplicate samples (#127)
* chore: removed unused and duplicate samples * updated req.txt with new version * restored table set_endpoint * formatted all * removed unused imports
1 parent 873c323 commit ec71b23

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+106
-832
lines changed

automl/beta/automl_vision_create_model_test.py

Lines changed: 0 additions & 43 deletions
This file was deleted.

automl/beta/automl_vision_model.py

Lines changed: 0 additions & 93 deletions
This file was deleted.

automl/beta/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-cloud-automl==2.0.0
1+
google-cloud-automl==2.1.0

automl/snippets/automl_translation_dataset.py

Lines changed: 3 additions & 170 deletions
Original file line numberDiff line numberDiff line change
@@ -25,138 +25,6 @@
2525
import os
2626

2727

28-
def create_dataset(project_id, compute_region, dataset_name, source, target):
29-
"""Create a dataset."""
30-
# [START automl_translate_create_dataset]
31-
# TODO(developer): Uncomment and set the following variables
32-
# project_id = 'PROJECT_ID_HERE'
33-
# compute_region = 'COMPUTE_REGION_HERE'
34-
# dataset_name = 'DATASET_NAME_HERE'
35-
# source = 'LANGUAGE_CODE_OF_SOURCE_LANGUAGE'
36-
# target = 'LANGUAGE_CODE_OF_TARGET_LANGUAGE'
37-
38-
from google.cloud import automl_v1beta1 as automl
39-
40-
client = automl.AutoMlClient()
41-
42-
# A resource that represents Google Cloud Platform location.
43-
project_location = f"projects/{project_id}/locations/{compute_region}"
44-
45-
# Specify the source and target language.
46-
dataset_metadata = {
47-
"source_language_code": source,
48-
"target_language_code": target,
49-
}
50-
# Set dataset name and dataset metadata
51-
my_dataset = {
52-
"display_name": dataset_name,
53-
"translation_dataset_metadata": dataset_metadata,
54-
}
55-
56-
# Create a dataset with the dataset metadata in the region.
57-
dataset = client.create_dataset(parent=project_location, dataset=my_dataset)
58-
59-
# Display the dataset information
60-
print("Dataset name: {}".format(dataset.name))
61-
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
62-
print("Dataset display name: {}".format(dataset.display_name))
63-
print("Translation dataset Metadata:")
64-
print(
65-
"\tsource_language_code: {}".format(
66-
dataset.translation_dataset_metadata.source_language_code
67-
)
68-
)
69-
print(
70-
"\ttarget_language_code: {}".format(
71-
dataset.translation_dataset_metadata.target_language_code
72-
)
73-
)
74-
print("Dataset create time: {}".format(dataset.create_time))
75-
76-
# [END automl_translate_create_dataset]
77-
78-
79-
def list_datasets(project_id, compute_region, filter_):
80-
"""List Datasets."""
81-
# [START automl_translate_list_datasets]
82-
# TODO(developer): Uncomment and set the following variables
83-
# project_id = 'PROJECT_ID_HERE'
84-
# compute_region = 'COMPUTE_REGION_HERE'
85-
# filter_ = 'filter expression here'
86-
87-
from google.cloud import automl_v1beta1 as automl
88-
89-
client = automl.AutoMlClient()
90-
91-
# A resource that represents Google Cloud Platform location.
92-
project_location = f"projects/{project_id}/locations/{compute_region}"
93-
94-
# List all the datasets available in the region by applying filter.
95-
request = automl.ListDatasetsRequest(parent=project_location, filter=filter_)
96-
response = client.list_datasets(request=request)
97-
98-
print("List of datasets:")
99-
for dataset in response:
100-
# Display the dataset information
101-
print("Dataset name: {}".format(dataset.name))
102-
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
103-
print("Dataset display name: {}".format(dataset.display_name))
104-
print("Translation dataset metadata:")
105-
print(
106-
"\tsource_language_code: {}".format(
107-
dataset.translation_dataset_metadata.source_language_code
108-
)
109-
)
110-
print(
111-
"\ttarget_language_code: {}".format(
112-
dataset.translation_dataset_metadata.target_language_code
113-
)
114-
)
115-
print("Dataset create time: {}".format(dataset.create_time))
116-
117-
# [END automl_translate_list_datasets]
118-
119-
120-
def get_dataset(project_id, compute_region, dataset_id):
121-
"""Get the dataset."""
122-
# [START automl_translate_get_dataset]
123-
# TODO(developer): Uncomment and set the following variables
124-
# project_id = 'PROJECT_ID_HERE'
125-
# compute_region = 'COMPUTE_REGION_HERE'
126-
# dataset_id = 'DATASET_ID_HERE'
127-
128-
from google.cloud import automl_v1beta1 as automl
129-
130-
client = automl.AutoMlClient()
131-
132-
# Get the full path of the dataset
133-
dataset_full_id = client.dataset_path(
134-
project_id, compute_region, dataset_id
135-
)
136-
137-
# Get complete detail of the dataset.
138-
dataset = client.get_dataset(name=dataset_full_id)
139-
140-
# Display the dataset information
141-
print("Dataset name: {}".format(dataset.name))
142-
print("Dataset id: {}".format(dataset.name.split("/")[-1]))
143-
print("Dataset display name: {}".format(dataset.display_name))
144-
print("Translation dataset metadata:")
145-
print(
146-
"\tsource_language_code: {}".format(
147-
dataset.translation_dataset_metadata.source_language_code
148-
)
149-
)
150-
print(
151-
"\ttarget_language_code: {}".format(
152-
dataset.translation_dataset_metadata.target_language_code
153-
)
154-
)
155-
print("Dataset create time: {}".format(dataset.create_time))
156-
157-
# [END automl_translate_get_dataset]
158-
159-
16028
def import_data(project_id, compute_region, dataset_id, path):
16129
"""Import sentence pairs to the dataset."""
16230
# [START automl_translate_import_data]
@@ -171,9 +39,7 @@ def import_data(project_id, compute_region, dataset_id, path):
17139
client = automl.AutoMlClient()
17240

17341
# Get the full path of the dataset.
174-
dataset_full_id = client.dataset_path(
175-
project_id, compute_region, dataset_id
176-
)
42+
dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id)
17743

17844
# Get the multiple Google Cloud Storage URIs
17945
input_uris = path.split(",")
@@ -202,9 +68,7 @@ def delete_dataset(project_id, compute_region, dataset_id):
20268
client = automl.AutoMlClient()
20369

20470
# Get the full path of the dataset.
205-
dataset_full_id = client.dataset_path(
206-
project_id, compute_region, dataset_id
207-
)
71+
dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id)
20872

20973
# Delete a dataset.
21074
response = client.delete_dataset(name=dataset_full_id)
@@ -222,21 +86,7 @@ def delete_dataset(project_id, compute_region, dataset_id):
22286
)
22387
subparsers = parser.add_subparsers(dest="command")
22488

225-
create_dataset_parser = subparsers.add_parser(
226-
"create_dataset", help=create_dataset.__doc__
227-
)
228-
create_dataset_parser.add_argument("dataset_name")
229-
create_dataset_parser.add_argument("source")
230-
create_dataset_parser.add_argument("target")
231-
232-
list_datasets_parser = subparsers.add_parser(
233-
"list_datasets", help=list_datasets.__doc__
234-
)
235-
list_datasets_parser.add_argument("filter", nargs="?", default="")
236-
237-
import_data_parser = subparsers.add_parser(
238-
"import_data", help=import_data.__doc__
239-
)
89+
import_data_parser = subparsers.add_parser("import_data", help=import_data.__doc__)
24090
import_data_parser.add_argument("dataset_id")
24191
import_data_parser.add_argument("path")
24292

@@ -245,28 +95,11 @@ def delete_dataset(project_id, compute_region, dataset_id):
24595
)
24696
delete_dataset_parser.add_argument("dataset_id")
24797

248-
get_dataset_parser = subparsers.add_parser(
249-
"get_dataset", help=get_dataset.__doc__
250-
)
251-
get_dataset_parser.add_argument("dataset_id")
252-
25398
project_id = os.environ["PROJECT_ID"]
25499
compute_region = os.environ["REGION_NAME"]
255100

256101
args = parser.parse_args()
257102

258-
if args.command == "create_dataset":
259-
create_dataset(
260-
project_id,
261-
compute_region,
262-
args.dataset_name,
263-
args.source,
264-
args.target,
265-
)
266-
if args.command == "list_datasets":
267-
list_datasets(project_id, compute_region, args.filter)
268-
if args.command == "get_dataset":
269-
get_dataset(project_id, compute_region, args.dataset_id)
270103
if args.command == "import_data":
271104
import_data(project_id, compute_region, args.dataset_id, args.path)
272105
if args.command == "delete_dataset":

0 commit comments

Comments
 (0)