Skip to content

Commit ec80cae

Browse files
authored
Language region tag update [(#1643)](GoogleCloudPlatform/python-docs-samples#1643)
1 parent 1afe461 commit ec80cae

File tree

2 files changed

+34
-32
lines changed

2 files changed

+34
-32
lines changed

samples/snippets/quickstart.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,16 +18,16 @@
1818
def run_quickstart():
1919
# [START language_quickstart]
2020
# Imports the Google Cloud client library
21-
# [START migration_import]
21+
# [START language_python_migration_imports]
2222
from google.cloud import language
2323
from google.cloud.language import enums
2424
from google.cloud.language import types
25-
# [END migration_import]
25+
# [END language_python_migration_imports]
2626

2727
# Instantiates a client
28-
# [START migration_client]
28+
# [START language_python_migration_client]
2929
client = language.LanguageServiceClient()
30-
# [END migration_client]
30+
# [END language_python_migration_client]
3131

3232
# The text to analyze
3333
text = u'Hello, world!'

samples/snippets/snippets.py

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
import six
3131

3232

33-
# [START def_sentiment_text]
33+
# [START language_sentiment_text]
3434
def sentiment_text(text):
3535
"""Detects sentiment in the text."""
3636
client = language.LanguageServiceClient()
@@ -39,45 +39,45 @@ def sentiment_text(text):
3939
text = text.decode('utf-8')
4040

4141
# Instantiates a plain text document.
42-
# [START migration_document_text]
43-
# [START migration_analyze_sentiment]
42+
# [START language_python_migration_document_text]
43+
# [START language_python_migration_sentiment_text]
4444
document = types.Document(
4545
content=text,
4646
type=enums.Document.Type.PLAIN_TEXT)
47-
# [END migration_document_text]
47+
# [END language_python_migration_document_text]
4848

4949
# Detects sentiment in the document. You can also analyze HTML with:
5050
# document.type == enums.Document.Type.HTML
5151
sentiment = client.analyze_sentiment(document).document_sentiment
5252

5353
print('Score: {}'.format(sentiment.score))
5454
print('Magnitude: {}'.format(sentiment.magnitude))
55-
# [END migration_analyze_sentiment]
56-
# [END def_sentiment_text]
55+
# [END language_python_migration_sentiment_text]
56+
# [END language_sentiment_text]
5757

5858

59-
# [START def_sentiment_file]
59+
# [START language_sentiment_gcs]
6060
def sentiment_file(gcs_uri):
6161
"""Detects sentiment in the file located in Google Cloud Storage."""
6262
client = language.LanguageServiceClient()
6363

6464
# Instantiates a plain text document.
65-
# [START migration_document_gcs_uri]
65+
# [START language_python_migration_document_gcs]
6666
document = types.Document(
6767
gcs_content_uri=gcs_uri,
6868
type=enums.Document.Type.PLAIN_TEXT)
69-
# [END migration_document_gcs_uri]
69+
# [END language_python_migration_document_gcs]
7070

7171
# Detects sentiment in the document. You can also analyze HTML with:
7272
# document.type == enums.Document.Type.HTML
7373
sentiment = client.analyze_sentiment(document).document_sentiment
7474

7575
print('Score: {}'.format(sentiment.score))
7676
print('Magnitude: {}'.format(sentiment.magnitude))
77-
# [END def_sentiment_file]
77+
# [END language_sentiment_gcs]
7878

7979

80-
# [START def_entities_text]
80+
# [START language_entities_text]
8181
def entities_text(text):
8282
"""Detects entities in the text."""
8383
client = language.LanguageServiceClient()
@@ -86,7 +86,7 @@ def entities_text(text):
8686
text = text.decode('utf-8')
8787

8888
# Instantiates a plain text document.
89-
# [START migration_analyze_entities]
89+
# [START language_python_migration_entities_text]
9090
document = types.Document(
9191
content=text,
9292
type=enums.Document.Type.PLAIN_TEXT)
@@ -107,11 +107,11 @@ def entities_text(text):
107107
print(u'{:<16}: {}'.format('salience', entity.salience))
108108
print(u'{:<16}: {}'.format('wikipedia_url',
109109
entity.metadata.get('wikipedia_url', '-')))
110-
# [END migration_analyze_entities]
111-
# [END def_entities_text]
110+
# [END language_python_migration_entities_text]
111+
# [END language_entities_text]
112112

113113

114-
# [START def_entities_file]
114+
# [START language_entities_gcs]
115115
def entities_file(gcs_uri):
116116
"""Detects entities in the file located in Google Cloud Storage."""
117117
client = language.LanguageServiceClient()
@@ -137,10 +137,10 @@ def entities_file(gcs_uri):
137137
print(u'{:<16}: {}'.format('salience', entity.salience))
138138
print(u'{:<16}: {}'.format('wikipedia_url',
139139
entity.metadata.get('wikipedia_url', '-')))
140-
# [END def_entities_file]
140+
# [END language_entities_gcs]
141141

142142

143-
# [START def_syntax_text]
143+
# [START language_syntax_text]
144144
def syntax_text(text):
145145
"""Detects syntax in the text."""
146146
client = language.LanguageServiceClient()
@@ -149,7 +149,7 @@ def syntax_text(text):
149149
text = text.decode('utf-8')
150150

151151
# Instantiates a plain text document.
152-
# [START migration_analyze_syntax]
152+
# [START language_python_migration_syntax_text]
153153
document = types.Document(
154154
content=text,
155155
type=enums.Document.Type.PLAIN_TEXT)
@@ -165,11 +165,11 @@ def syntax_text(text):
165165
for token in tokens:
166166
print(u'{}: {}'.format(pos_tag[token.part_of_speech.tag],
167167
token.text.content))
168-
# [END migration_analyze_syntax]
169-
# [END def_syntax_text]
168+
# [END language_python_migration_syntax_text]
169+
# [END language_syntax_text]
170170

171171

172-
# [START def_syntax_file]
172+
# [START language_syntax_gcs]
173173
def syntax_file(gcs_uri):
174174
"""Detects syntax in the file located in Google Cloud Storage."""
175175
client = language.LanguageServiceClient()
@@ -190,10 +190,10 @@ def syntax_file(gcs_uri):
190190
for token in tokens:
191191
print(u'{}: {}'.format(pos_tag[token.part_of_speech.tag],
192192
token.text.content))
193-
# [END def_syntax_file]
193+
# [END language_syntax_gcs]
194194

195195

196-
# [START def_entity_sentiment_text]
196+
# [START language_entity_sentiment_text]
197197
def entity_sentiment_text(text):
198198
"""Detects entity sentiment in the provided text."""
199199
client = language.LanguageServiceClient()
@@ -223,9 +223,10 @@ def entity_sentiment_text(text):
223223
print(u' Type : {}'.format(mention.type))
224224
print(u'Salience: {}'.format(entity.salience))
225225
print(u'Sentiment: {}\n'.format(entity.sentiment))
226-
# [END def_entity_sentiment_text]
226+
# [END language_entity_sentiment_text]
227227

228228

229+
# [START language_entity_sentiment_gcs]
229230
def entity_sentiment_file(gcs_uri):
230231
"""Detects entity sentiment in a Google Cloud Storage file."""
231232
client = language.LanguageServiceClient()
@@ -251,9 +252,10 @@ def entity_sentiment_file(gcs_uri):
251252
print(u' Type : {}'.format(mention.type))
252253
print(u'Salience: {}'.format(entity.salience))
253254
print(u'Sentiment: {}\n'.format(entity.sentiment))
255+
# [END language_entity_sentiment_gcs]
254256

255257

256-
# [START def_classify_text]
258+
# [START language_classify_text]
257259
def classify_text(text):
258260
"""Classifies content categories of the provided text."""
259261
client = language.LanguageServiceClient()
@@ -271,10 +273,10 @@ def classify_text(text):
271273
print(u'=' * 20)
272274
print(u'{:<16}: {}'.format('name', category.name))
273275
print(u'{:<16}: {}'.format('confidence', category.confidence))
274-
# [END def_classify_text]
276+
# [END language_classify_text]
275277

276278

277-
# [START def_classify_file]
279+
# [START language_classify_gcs]
278280
def classify_file(gcs_uri):
279281
"""Classifies content categories of the text in a Google Cloud Storage
280282
file.
@@ -291,7 +293,7 @@ def classify_file(gcs_uri):
291293
print(u'=' * 20)
292294
print(u'{:<16}: {}'.format('name', category.name))
293295
print(u'{:<16}: {}'.format('confidence', category.confidence))
294-
# [END def_classify_file]
296+
# [END language_classify_gcs]
295297

296298

297299
if __name__ == '__main__':

0 commit comments

Comments
 (0)