11#!/usr/bin/env python
22
3- # Copyright 2017 Google, Inc.
3+ # Copyright 2016 Google, Inc.
44#
55# Licensed under the Apache License, Version 2.0 (the "License");
66# you may not use this file except in compliance with the License.
2222"""
2323
2424import argparse
25- import sys
2625
2726# [START beta_import]
2827from google .cloud import language_v1beta2
@@ -173,9 +172,9 @@ def syntax_file(gcs_uri):
173172 token .text .content ))
174173
175174
176- # [START def_entity_sentiment_text ]
177- def entity_sentiment_text (text ):
178- """Detects entity sentiment in the provided text."""
175+ # [START def_classify_text ]
176+ def classify_text (text ):
177+ """Classifies the provided text."""
179178 # [START beta_client]
180179 client = language_v1beta2 .LanguageServiceClient ()
181180 # [END beta_client]
@@ -187,52 +186,31 @@ def entity_sentiment_text(text):
187186 content = text .encode ('utf-8' ),
188187 type = enums .Document .Type .PLAIN_TEXT )
189188
190- # Pass in encoding type to get useful offsets in the response.
191- encoding = enums .EncodingType .UTF32
192- if sys .maxunicode == 65535 :
193- encoding = enums .EncodingType .UTF16
194-
195- result = client .analyze_entity_sentiment (document , encoding )
196-
197- for entity in result .entities :
198- print ('Mentions: ' )
199- print (u'Name: "{}"' .format (entity .name ))
200- for mention in entity .mentions :
201- print (u' Begin Offset : {}' .format (mention .text .begin_offset ))
202- print (u' Content : {}' .format (mention .text .content ))
203- print (u' Magnitude : {}' .format (mention .sentiment .magnitude ))
204- print (u' Sentiment : {}' .format (mention .sentiment .score ))
205- print (u' Type : {}' .format (mention .type ))
206- print (u'Salience: {}' .format (entity .salience ))
207- print (u'Sentiment: {}\n ' .format (entity .sentiment ))
208- # [END def_entity_sentiment_text]
209-
210-
211- def entity_sentiment_file (gcs_uri ):
212- """Detects entity sentiment in a Google Cloud Storage file."""
189+ categories = client .classify_text (document ).categories
190+
191+ for category in categories :
192+ print (u'=' * 20 )
193+ print (u'{:<16}: {}' .format ('name' , category .name ))
194+ print (u'{:<16}: {}' .format ('confidence' , category .confidence ))
195+ # [END def_classify_text]
196+
197+
198+ # [START def_classify_file]
199+ def classify_file (gcs_uri ):
200+ """Classifies the text in a Google Cloud Storage file."""
213201 client = language_v1beta2 .LanguageServiceClient ()
214202
215203 document = types .Document (
216204 gcs_content_uri = gcs_uri ,
217205 type = enums .Document .Type .PLAIN_TEXT )
218206
219- # Pass in encoding type to get useful offsets in the response.
220- encoding = enums .EncodingType .UTF32
221- if sys .maxunicode == 65535 :
222- encoding = enums .EncodingType .UTF16
223-
224- result = client .analyze_entity_sentiment (document , encoding )
207+ categories = client .classify_text (document ).categories
225208
226- for entity in result .entities :
227- print (u'Name: "{}"' .format (entity .name ))
228- for mention in entity .mentions :
229- print (u' Begin Offset : {}' .format (mention .text .begin_offset ))
230- print (u' Content : {}' .format (mention .text .content ))
231- print (u' Magnitude : {}' .format (mention .sentiment .magnitude ))
232- print (u' Sentiment : {}' .format (mention .sentiment .score ))
233- print (u' Type : {}' .format (mention .type ))
234- print (u'Salience: {}' .format (entity .salience ))
235- print (u'Sentiment: {}\n ' .format (entity .sentiment ))
209+ for category in categories :
210+ print (u'=' * 20 )
211+ print (u'{:<16}: {}' .format ('name' , category .name ))
212+ print (u'{:<16}: {}' .format ('confidence' , category .confidence ))
213+ # [END def_classify_file]
236214
237215
238216if __name__ == '__main__' :
@@ -241,13 +219,13 @@ def entity_sentiment_file(gcs_uri):
241219 formatter_class = argparse .RawDescriptionHelpFormatter )
242220 subparsers = parser .add_subparsers (dest = 'command' )
243221
244- sentiment_entities_text_parser = subparsers .add_parser (
245- 'sentiment-entities- text' , help = entity_sentiment_text .__doc__ )
246- sentiment_entities_text_parser .add_argument ('text' )
222+ classify_text_parser = subparsers .add_parser (
223+ 'classify- text' , help = classify_text .__doc__ )
224+ classify_text_parser .add_argument ('text' )
247225
248- sentiment_entities_file_parser = subparsers .add_parser (
249- 'sentiment-entities- file' , help = entity_sentiment_file .__doc__ )
250- sentiment_entities_file_parser .add_argument ('gcs_uri' )
226+ classify_text_parser = subparsers .add_parser (
227+ 'classify- file' , help = classify_file .__doc__ )
228+ classify_text_parser .add_argument ('gcs_uri' )
251229
252230 sentiment_text_parser = subparsers .add_parser (
253231 'sentiment-text' , help = sentiment_text .__doc__ )
@@ -287,7 +265,7 @@ def entity_sentiment_file(gcs_uri):
287265 syntax_text (args .text )
288266 elif args .command == 'syntax-file' :
289267 syntax_file (args .gcs_uri )
290- elif args .command == 'sentiment-entities -text' :
291- entity_sentiment_text (args .text )
292- elif args .command == 'sentiment-entities -file' :
293- entity_sentiment_file (args .gcs_uri )
268+ elif args .command == 'classify -text' :
269+ classify_text (args .text )
270+ elif args .command == 'classify -file' :
271+ classify_file (args .gcs_uri )
0 commit comments