22
22
"""
23
23
24
24
import argparse
25
+ import sys
25
26
26
27
# [START beta_import]
27
28
from google .cloud import language_v1beta2
@@ -125,6 +126,66 @@ def entities_file(gcs_uri):
125
126
entity .metadata .get ('wikipedia_url' , '-' )))
126
127
127
128
129
+ # [START def_entity_sentiment_text]
130
+ def entity_sentiment_text (text ):
131
+ """Detects entity sentiment in the provided text."""
132
+ client = language_v1beta2 .LanguageServiceClient ()
133
+
134
+ if isinstance (text , six .binary_type ):
135
+ text = text .decode ('utf-8' )
136
+
137
+ document = types .Document (
138
+ content = text .encode ('utf-8' ),
139
+ type = enums .Document .Type .PLAIN_TEXT )
140
+
141
+ # Detect and send native Python encoding to receive correct word offsets.
142
+ encoding = enums .EncodingType .UTF32
143
+ if sys .maxunicode == 65535 :
144
+ encoding = enums .EncodingType .UTF16
145
+
146
+ result = client .analyze_entity_sentiment (document , encoding )
147
+
148
+ for entity in result .entities :
149
+ print ('Mentions: ' )
150
+ print (u'Name: "{}"' .format (entity .name ))
151
+ for mention in entity .mentions :
152
+ print (u' Begin Offset : {}' .format (mention .text .begin_offset ))
153
+ print (u' Content : {}' .format (mention .text .content ))
154
+ print (u' Magnitude : {}' .format (mention .sentiment .magnitude ))
155
+ print (u' Sentiment : {}' .format (mention .sentiment .score ))
156
+ print (u' Type : {}' .format (mention .type ))
157
+ print (u'Salience: {}' .format (entity .salience ))
158
+ print (u'Sentiment: {}\n ' .format (entity .sentiment ))
159
+ # [END def_entity_sentiment_text]
160
+
161
+
162
+ def entity_sentiment_file (gcs_uri ):
163
+ """Detects entity sentiment in a Google Cloud Storage file."""
164
+ client = language_v1beta2 .LanguageServiceClient ()
165
+
166
+ document = types .Document (
167
+ gcs_content_uri = gcs_uri ,
168
+ type = enums .Document .Type .PLAIN_TEXT )
169
+
170
+ # Detect and send native Python encoding to receive correct word offsets.
171
+ encoding = enums .EncodingType .UTF32
172
+ if sys .maxunicode == 65535 :
173
+ encoding = enums .EncodingType .UTF16
174
+
175
+ result = client .analyze_entity_sentiment (document , encoding )
176
+
177
+ for entity in result .entities :
178
+ print (u'Name: "{}"' .format (entity .name ))
179
+ for mention in entity .mentions :
180
+ print (u' Begin Offset : {}' .format (mention .text .begin_offset ))
181
+ print (u' Content : {}' .format (mention .text .content ))
182
+ print (u' Magnitude : {}' .format (mention .sentiment .magnitude ))
183
+ print (u' Sentiment : {}' .format (mention .sentiment .score ))
184
+ print (u' Type : {}' .format (mention .type ))
185
+ print (u'Salience: {}' .format (entity .salience ))
186
+ print (u'Sentiment: {}\n ' .format (entity .sentiment ))
187
+
188
+
128
189
def syntax_text (text ):
129
190
"""Detects syntax in the text."""
130
191
client = language_v1beta2 .LanguageServiceClient ()
@@ -174,7 +235,7 @@ def syntax_file(gcs_uri):
174
235
175
236
# [START def_classify_text]
176
237
def classify_text (text ):
177
- """Classifies the provided text."""
238
+ """Classifies content categories of the provided text."""
178
239
# [START beta_client]
179
240
client = language_v1beta2 .LanguageServiceClient ()
180
241
# [END beta_client]
@@ -197,7 +258,9 @@ def classify_text(text):
197
258
198
259
# [START def_classify_file]
199
260
def classify_file (gcs_uri ):
200
- """Classifies the text in a Google Cloud Storage file."""
261
+ """Classifies content categories of the text in a Google Cloud Storage
262
+ file.
263
+ """
201
264
client = language_v1beta2 .LanguageServiceClient ()
202
265
203
266
document = types .Document (
@@ -227,6 +290,14 @@ def classify_file(gcs_uri):
227
290
'classify-file' , help = classify_file .__doc__ )
228
291
classify_text_parser .add_argument ('gcs_uri' )
229
292
293
+ sentiment_entities_text_parser = subparsers .add_parser (
294
+ 'sentiment-entities-text' , help = entity_sentiment_text .__doc__ )
295
+ sentiment_entities_text_parser .add_argument ('text' )
296
+
297
+ sentiment_entities_file_parser = subparsers .add_parser (
298
+ 'sentiment-entities-file' , help = entity_sentiment_file .__doc__ )
299
+ sentiment_entities_file_parser .add_argument ('gcs_uri' )
300
+
230
301
sentiment_text_parser = subparsers .add_parser (
231
302
'sentiment-text' , help = sentiment_text .__doc__ )
232
303
sentiment_text_parser .add_argument ('text' )
@@ -265,6 +336,10 @@ def classify_file(gcs_uri):
265
336
syntax_text (args .text )
266
337
elif args .command == 'syntax-file' :
267
338
syntax_file (args .gcs_uri )
339
+ elif args .command == 'sentiment-entities-text' :
340
+ entity_sentiment_text (args .text )
341
+ elif args .command == 'sentiment-entities-file' :
342
+ entity_sentiment_file (args .gcs_uri )
268
343
elif args .command == 'classify-text' :
269
344
classify_text (args .text )
270
345
elif args .command == 'classify-file' :
0 commit comments