Skip to content

Commit 2d77d1f

Browse files
happyhumanbusunkim96
authored andcommitted
Moved the imports and region tags inside the functions [(#1891)](#1891)
* Moved the imports and region tags inside the functions * Removed the unnecessary imports * Added the missing import (six) to the functions * Removed the extra whitespaces * Changes based on Alix's comments. - Sample files no longer have input arguments - Input texts and uri's are hard coded - unit tests are modified accordingly * Remove extra whitespace * Removed extra whitespaces * Removed unused import * Removed the extra + signs
1 parent 9a80642 commit 2d77d1f

File tree

2 files changed

+120
-97
lines changed

2 files changed

+120
-97
lines changed

language/snippets/cloud-client/v1/snippets.py

Lines changed: 107 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/usr/bin/env python
22

3-
# Copyright 2016 Google, Inc.
3+
# Copyright 2018 Google, LLC.
44
#
55
# Licensed under the Apache License, Version 2.0 (the "License");
66
# you may not use this file except in compliance with the License.
@@ -24,15 +24,16 @@
2424
import argparse
2525
import sys
2626

27-
from google.cloud import language
28-
from google.cloud.language import enums
29-
from google.cloud.language import types
30-
import six
3127

28+
def sentiment_text():
29+
# [START language_sentiment_text]
30+
import six
31+
from google.cloud import language
32+
from google.cloud.language import enums
33+
from google.cloud.language import types
34+
35+
text = 'Hello, world!'
3236

33-
# [START language_sentiment_text]
34-
def sentiment_text(text):
35-
"""Detects sentiment in the text."""
3637
client = language.LanguageServiceClient()
3738

3839
if isinstance(text, six.binary_type):
@@ -51,12 +52,17 @@ def sentiment_text(text):
5152
print('Score: {}'.format(sentiment.score))
5253
print('Magnitude: {}'.format(sentiment.magnitude))
5354
# [END language_python_migration_sentiment_text]
54-
# [END language_sentiment_text]
55+
# [END language_sentiment_text]
56+
57+
58+
def sentiment_file():
59+
# [START language_sentiment_gcs]
60+
from google.cloud import language
61+
from google.cloud.language import enums
62+
from google.cloud.language import types
5563

64+
gcs_uri = 'gs://cloud-samples-data/language/hello.txt'
5665

57-
# [START language_sentiment_gcs]
58-
def sentiment_file(gcs_uri):
59-
"""Detects sentiment in the file located in Google Cloud Storage."""
6066
client = language.LanguageServiceClient()
6167

6268
# Instantiates a plain text document.
@@ -72,12 +78,18 @@ def sentiment_file(gcs_uri):
7278

7379
print('Score: {}'.format(sentiment.score))
7480
print('Magnitude: {}'.format(sentiment.magnitude))
75-
# [END language_sentiment_gcs]
81+
# [END language_sentiment_gcs]
82+
83+
84+
def entities_text():
85+
# [START language_entities_text]
86+
import six
87+
from google.cloud import language
88+
from google.cloud.language import enums
89+
from google.cloud.language import types
7690

91+
text = 'President Kennedy spoke at the White House.'
7792

78-
# [START language_entities_text]
79-
def entities_text(text):
80-
"""Detects entities in the text."""
8193
client = language.LanguageServiceClient()
8294

8395
if isinstance(text, six.binary_type):
@@ -105,12 +117,17 @@ def entities_text(text):
105117
print(u'{:<16}: {}'.format('wikipedia_url',
106118
entity.metadata.get('wikipedia_url', '-')))
107119
# [END language_python_migration_entities_text]
108-
# [END language_entities_text]
120+
# [END language_entities_text]
109121

110122

111-
# [START language_entities_gcs]
112-
def entities_file(gcs_uri):
113-
"""Detects entities in the file located in Google Cloud Storage."""
123+
def entities_file():
124+
# [START language_entities_gcs]
125+
from google.cloud import language
126+
from google.cloud.language import enums
127+
from google.cloud.language import types
128+
129+
gcs_uri = 'gs://cloud-samples-data/language/president.txt'
130+
114131
client = language.LanguageServiceClient()
115132

116133
# Instantiates a plain text document.
@@ -131,12 +148,18 @@ def entities_file(gcs_uri):
131148
print(u'{:<16}: {}'.format('salience', entity.salience))
132149
print(u'{:<16}: {}'.format('wikipedia_url',
133150
entity.metadata.get('wikipedia_url', '-')))
134-
# [END language_entities_gcs]
151+
# [END language_entities_gcs]
152+
135153

154+
def syntax_text():
155+
# [START language_syntax_text]
156+
import six
157+
from google.cloud import language
158+
from google.cloud.language import enums
159+
from google.cloud.language import types
160+
161+
text = 'President Kennedy spoke at the White House.'
136162

137-
# [START language_syntax_text]
138-
def syntax_text(text):
139-
"""Detects syntax in the text."""
140163
client = language.LanguageServiceClient()
141164

142165
if isinstance(text, six.binary_type):
@@ -157,12 +180,17 @@ def syntax_text(text):
157180
print(u'{}: {}'.format(part_of_speech_tag.name,
158181
token.text.content))
159182
# [END language_python_migration_syntax_text]
160-
# [END language_syntax_text]
183+
# [END language_syntax_text]
184+
161185

186+
def syntax_file():
187+
# [START language_syntax_gcs]
188+
from google.cloud import language
189+
from google.cloud.language import enums
190+
from google.cloud.language import types
191+
192+
gcs_uri = 'gs://cloud-samples-data/language/president.txt'
162193

163-
# [START language_syntax_gcs]
164-
def syntax_file(gcs_uri):
165-
"""Detects syntax in the file located in Google Cloud Storage."""
166194
client = language.LanguageServiceClient()
167195

168196
# Instantiates a plain text document.
@@ -178,12 +206,18 @@ def syntax_file(gcs_uri):
178206
part_of_speech_tag = enums.PartOfSpeech.Tag(token.part_of_speech.tag)
179207
print(u'{}: {}'.format(part_of_speech_tag.name,
180208
token.text.content))
181-
# [END language_syntax_gcs]
209+
# [END language_syntax_gcs]
210+
211+
212+
def entity_sentiment_text():
213+
# [START language_entity_sentiment_text]
214+
import six
215+
from google.cloud import language
216+
from google.cloud.language import enums
217+
from google.cloud.language import types
182218

219+
text = 'President Kennedy spoke at the White House.'
183220

184-
# [START language_entity_sentiment_text]
185-
def entity_sentiment_text(text):
186-
"""Detects entity sentiment in the provided text."""
187221
client = language.LanguageServiceClient()
188222

189223
if isinstance(text, six.binary_type):
@@ -211,12 +245,17 @@ def entity_sentiment_text(text):
211245
print(u' Type : {}'.format(mention.type))
212246
print(u'Salience: {}'.format(entity.salience))
213247
print(u'Sentiment: {}\n'.format(entity.sentiment))
214-
# [END language_entity_sentiment_text]
248+
# [END language_entity_sentiment_text]
249+
250+
251+
def entity_sentiment_file():
252+
# [START language_entity_sentiment_gcs]
253+
from google.cloud import language
254+
from google.cloud.language import enums
255+
from google.cloud.language import types
215256

257+
gcs_uri = 'gs://cloud-samples-data/language/president.txt'
216258

217-
# [START language_entity_sentiment_gcs]
218-
def entity_sentiment_file(gcs_uri):
219-
"""Detects entity sentiment in a Google Cloud Storage file."""
220259
client = language.LanguageServiceClient()
221260

222261
document = types.Document(
@@ -240,12 +279,20 @@ def entity_sentiment_file(gcs_uri):
240279
print(u' Type : {}'.format(mention.type))
241280
print(u'Salience: {}'.format(entity.salience))
242281
print(u'Sentiment: {}\n'.format(entity.sentiment))
243-
# [END language_entity_sentiment_gcs]
282+
# [END language_entity_sentiment_gcs]
244283

245284

246-
# [START language_classify_text]
247-
def classify_text(text):
248-
"""Classifies content categories of the provided text."""
285+
def classify_text():
286+
# [START language_classify_text]
287+
import six
288+
from google.cloud import language
289+
from google.cloud.language import enums
290+
from google.cloud.language import types
291+
292+
text = 'Android is a mobile operating system developed by Google, ' \
293+
'based on the Linux kernel and designed primarily for ' \
294+
'touchscreen mobile devices such as smartphones and tablets.'
295+
249296
client = language.LanguageServiceClient()
250297

251298
if isinstance(text, six.binary_type):
@@ -261,14 +308,17 @@ def classify_text(text):
261308
print(u'=' * 20)
262309
print(u'{:<16}: {}'.format('name', category.name))
263310
print(u'{:<16}: {}'.format('confidence', category.confidence))
264-
# [END language_classify_text]
311+
# [END language_classify_text]
312+
313+
314+
def classify_file():
315+
# [START language_classify_gcs]
316+
from google.cloud import language
317+
from google.cloud.language import enums
318+
from google.cloud.language import types
265319

320+
gcs_uri = 'gs://cloud-samples-data/language/android.txt'
266321

267-
# [START language_classify_gcs]
268-
def classify_file(gcs_uri):
269-
"""Classifies content categories of the text in a Google Cloud Storage
270-
file.
271-
"""
272322
client = language.LanguageServiceClient()
273323

274324
document = types.Document(
@@ -281,7 +331,7 @@ def classify_file(gcs_uri):
281331
print(u'=' * 20)
282332
print(u'{:<16}: {}'.format('name', category.name))
283333
print(u'{:<16}: {}'.format('confidence', category.confidence))
284-
# [END language_classify_gcs]
334+
# [END language_classify_gcs]
285335

286336

287337
if __name__ == '__main__':
@@ -292,63 +342,53 @@ def classify_file(gcs_uri):
292342

293343
classify_text_parser = subparsers.add_parser(
294344
'classify-text', help=classify_text.__doc__)
295-
classify_text_parser.add_argument('text')
296345

297346
classify_text_parser = subparsers.add_parser(
298347
'classify-file', help=classify_file.__doc__)
299-
classify_text_parser.add_argument('gcs_uri')
300348

301349
sentiment_entities_text_parser = subparsers.add_parser(
302350
'sentiment-entities-text', help=entity_sentiment_text.__doc__)
303-
sentiment_entities_text_parser.add_argument('text')
304351

305352
sentiment_entities_file_parser = subparsers.add_parser(
306353
'sentiment-entities-file', help=entity_sentiment_file.__doc__)
307-
sentiment_entities_file_parser.add_argument('gcs_uri')
308354

309355
sentiment_text_parser = subparsers.add_parser(
310356
'sentiment-text', help=sentiment_text.__doc__)
311-
sentiment_text_parser.add_argument('text')
312357

313358
sentiment_file_parser = subparsers.add_parser(
314359
'sentiment-file', help=sentiment_file.__doc__)
315-
sentiment_file_parser.add_argument('gcs_uri')
316360

317361
entities_text_parser = subparsers.add_parser(
318362
'entities-text', help=entities_text.__doc__)
319-
entities_text_parser.add_argument('text')
320363

321364
entities_file_parser = subparsers.add_parser(
322365
'entities-file', help=entities_file.__doc__)
323-
entities_file_parser.add_argument('gcs_uri')
324366

325367
syntax_text_parser = subparsers.add_parser(
326368
'syntax-text', help=syntax_text.__doc__)
327-
syntax_text_parser.add_argument('text')
328369

329370
syntax_file_parser = subparsers.add_parser(
330371
'syntax-file', help=syntax_file.__doc__)
331-
syntax_file_parser.add_argument('gcs_uri')
332372

333373
args = parser.parse_args()
334374

335375
if args.command == 'sentiment-text':
336-
sentiment_text(args.text)
376+
sentiment_text()
337377
elif args.command == 'sentiment-file':
338-
sentiment_file(args.gcs_uri)
378+
sentiment_file()
339379
elif args.command == 'entities-text':
340-
entities_text(args.text)
380+
entities_text()
341381
elif args.command == 'entities-file':
342-
entities_file(args.gcs_uri)
382+
entities_file()
343383
elif args.command == 'syntax-text':
344-
syntax_text(args.text)
384+
syntax_text()
345385
elif args.command == 'syntax-file':
346-
syntax_file(args.gcs_uri)
386+
syntax_file()
347387
elif args.command == 'sentiment-entities-text':
348-
entity_sentiment_text(args.text)
388+
entity_sentiment_text()
349389
elif args.command == 'sentiment-entities-file':
350-
entity_sentiment_file(args.gcs_uri)
390+
entity_sentiment_file()
351391
elif args.command == 'classify-text':
352-
classify_text(args.text)
392+
classify_text()
353393
elif args.command == 'classify-file':
354-
classify_file(args.gcs_uri)
394+
classify_file()

0 commit comments

Comments
 (0)