Skip to content

Commit f461740

Browse files
jmdobrybusunkim96
authored andcommitted
Add Cloud Client NL API samples. [(#668)](#668)
1 parent c02d64a commit f461740

File tree

6 files changed

+239
-3
lines changed

6 files changed

+239
-3
lines changed

language/snippets/cloud-client/README.rst.in

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ product:
44
name: Google Cloud Natural Language API
55
short_name: Cloud Natural Language API
66
url: https://cloud.google.com/natural-language/docs/
7-
description: >
7+
description: >
88
The `Google Cloud Natural Language API`_ provides natural language
99
understanding technologies to developers, including sentiment analysis,
1010
entity recognition, and syntax analysis. This API is part of the larger
@@ -17,5 +17,8 @@ setup:
1717
samples:
1818
- name: Quickstart
1919
file: quickstart.py
20+
- name: Snippets
21+
file: snippets.py
22+
show_help: true
2023

2124
cloud_client_library: true

language/snippets/cloud-client/quickstart.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def run_quickstart():
3131
sentiment = document.analyze_sentiment()
3232

3333
print('Text: {}'.format(text))
34-
print('Sentiment: {}, {}'.format(sentiment.polarity, sentiment.magnitude))
34+
print('Sentiment: {}, {}'.format(sentiment.score, sentiment.magnitude))
3535
# [END language_quickstart]
3636

3737

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
google-cloud-language==0.21.0
1+
google-cloud-language==0.22.0
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
President Obama is speaking at the White House.
Lines changed: 172 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,172 @@
1+
#!/usr/bin/env python
2+
3+
# Copyright 2016 Google, Inc.
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
"""This application demonstrates how to perform basic operations with the
18+
Google Cloud Natural Language API
19+
20+
For more information, the documentation at
21+
https://cloud.google.com/natural-language/docs.
22+
"""
23+
24+
import argparse
25+
26+
from google.cloud import language
27+
28+
29+
def sentiment_text(text):
30+
"""Detects sentiment in the text."""
31+
language_client = language.Client()
32+
33+
# Instantiates a plain text document.
34+
document = language_client.document_from_text(text)
35+
36+
# Detects sentiment in the document. You can also analyze HTML with:
37+
# document.doc_type == language.Document.HTML
38+
sentiment = document.analyze_sentiment()
39+
40+
print('Score: {}'.format(sentiment.score))
41+
print('Magnitude: {}'.format(sentiment.magnitude))
42+
43+
44+
def sentiment_file(gcs_uri):
45+
"""Detects sentiment in the file located in Google Cloud Storage."""
46+
language_client = language.Client()
47+
48+
# Instantiates a plain text document.
49+
document = language_client.document_from_url(gcs_uri)
50+
51+
# Detects sentiment in the document. You can also analyze HTML with:
52+
# document.doc_type == language.Document.HTML
53+
sentiment = document.analyze_sentiment()
54+
55+
print('Score: {}'.format(sentiment.score))
56+
print('Magnitude: {}'.format(sentiment.magnitude))
57+
58+
59+
def entities_text(text):
60+
"""Detects entities in the text."""
61+
language_client = language.Client()
62+
63+
# Instantiates a plain text document.
64+
document = language_client.document_from_text(text)
65+
66+
# Detects entities in the document. You can also analyze HTML with:
67+
# document.doc_type == language.Document.HTML
68+
entities = document.analyze_entities()
69+
70+
for entity in entities:
71+
print('=' * 20)
72+
print('{:<16}: {}'.format('name', entity.name))
73+
print('{:<16}: {}'.format('type', entity.entity_type))
74+
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
75+
print('{:<16}: {}'.format('metadata', entity.metadata))
76+
print('{:<16}: {}'.format('salience', entity.salience))
77+
78+
79+
def entities_file(gcs_uri):
80+
"""Detects entities in the file located in Google Cloud Storage."""
81+
language_client = language.Client()
82+
83+
# Instantiates a plain text document.
84+
document = language_client.document_from_url(gcs_uri)
85+
86+
# Detects sentiment in the document. You can also analyze HTML with:
87+
# document.doc_type == language.Document.HTML
88+
entities = document.analyze_entities()
89+
90+
for entity in entities:
91+
print('=' * 20)
92+
print('{:<16}: {}'.format('name', entity.name))
93+
print('{:<16}: {}'.format('type', entity.entity_type))
94+
print('{:<16}: {}'.format('wikipedia_url', entity.wikipedia_url))
95+
print('{:<16}: {}'.format('metadata', entity.metadata))
96+
print('{:<16}: {}'.format('salience', entity.salience))
97+
98+
99+
def syntax_text(text):
100+
"""Detects syntax in the text."""
101+
language_client = language.Client()
102+
103+
# Instantiates a plain text document.
104+
document = language_client.document_from_text(text)
105+
106+
# Detects syntax in the document. You can also analyze HTML with:
107+
# document.doc_type == language.Document.HTML
108+
tokens = document.analyze_syntax()
109+
110+
for token in tokens:
111+
print('{}: {}'.format(token.part_of_speech, token.text_content))
112+
113+
114+
def syntax_file(gcs_uri):
115+
"""Detects syntax in the file located in Google Cloud Storage."""
116+
language_client = language.Client()
117+
118+
# Instantiates a plain text document.
119+
document = language_client.document_from_url(gcs_uri)
120+
121+
# Detects syntax in the document. You can also analyze HTML with:
122+
# document.doc_type == language.Document.HTML
123+
tokens = document.analyze_syntax()
124+
125+
for token in tokens:
126+
print('{}: {}'.format(token.part_of_speech, token.text_content))
127+
128+
129+
if __name__ == '__main__':
130+
parser = argparse.ArgumentParser(
131+
description=__doc__,
132+
formatter_class=argparse.RawDescriptionHelpFormatter)
133+
subparsers = parser.add_subparsers(dest='command')
134+
135+
sentiment_text_parser = subparsers.add_parser(
136+
'sentiment-text', help=sentiment_text.__doc__)
137+
sentiment_text_parser.add_argument('text')
138+
139+
sentiment_file_parser = subparsers.add_parser(
140+
'sentiment-file', help=sentiment_file.__doc__)
141+
sentiment_file_parser.add_argument('gcs_uri')
142+
143+
entities_text_parser = subparsers.add_parser(
144+
'entities-text', help=entities_text.__doc__)
145+
entities_text_parser.add_argument('text')
146+
147+
entities_file_parser = subparsers.add_parser(
148+
'entities-file', help=entities_file.__doc__)
149+
entities_file_parser.add_argument('gcs_uri')
150+
151+
syntax_text_parser = subparsers.add_parser(
152+
'syntax-text', help=syntax_text.__doc__)
153+
syntax_text_parser.add_argument('text')
154+
155+
syntax_file_parser = subparsers.add_parser(
156+
'syntax-file', help=syntax_file.__doc__)
157+
syntax_file_parser.add_argument('gcs_uri')
158+
159+
args = parser.parse_args()
160+
161+
if args.command == 'sentiment-text':
162+
sentiment_text(args.text)
163+
elif args.command == 'sentiment-file':
164+
sentiment_file(args.gcs_uri)
165+
elif args.command == 'entities-text':
166+
entities_text(args.text)
167+
elif args.command == 'entities-file':
168+
entities_file(args.gcs_uri)
169+
elif args.command == 'syntax-text':
170+
syntax_text(args.text)
171+
elif args.command == 'syntax-file':
172+
syntax_file(args.gcs_uri)
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
# Copyright 2016 Google, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
16+
import snippets
17+
18+
19+
def test_sentiment_text(cloud_config, capsys):
20+
snippets.sentiment_text('President Obama is speaking at the White House.')
21+
out, _ = capsys.readouterr()
22+
assert 'Score: 0.2' in out
23+
24+
25+
def test_sentiment_file(cloud_config, capsys):
26+
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
27+
cloud_config.storage_bucket)
28+
snippets.sentiment_file(cloud_storage_input_uri)
29+
out, _ = capsys.readouterr()
30+
assert 'Score: 0.2' in out
31+
32+
33+
def test_entities_text(cloud_config, capsys):
34+
snippets.entities_text('President Obama is speaking at the White House.')
35+
out, _ = capsys.readouterr()
36+
assert 'name' in out
37+
assert ': Obama' in out
38+
39+
40+
def test_entities_file(cloud_config, capsys):
41+
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
42+
cloud_config.storage_bucket)
43+
snippets.entities_file(cloud_storage_input_uri)
44+
out, _ = capsys.readouterr()
45+
assert 'name' in out
46+
assert ': Obama' in out
47+
48+
49+
def test_syntax_text(cloud_config, capsys):
50+
snippets.syntax_text('President Obama is speaking at the White House.')
51+
out, _ = capsys.readouterr()
52+
assert 'NOUN: President' in out
53+
54+
55+
def test_syntax_file(cloud_config, capsys):
56+
cloud_storage_input_uri = 'gs://{}/text.txt'.format(
57+
cloud_config.storage_bucket)
58+
snippets.syntax_file(cloud_storage_input_uri)
59+
out, _ = capsys.readouterr()
60+
assert 'NOUN: President' in out

0 commit comments

Comments
 (0)