--- /dev/null
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google/cloud/language/v1beta2/language_service.proto
+
+/*
+Package language is a generated protocol buffer package.
+
+It is generated from these files:
+ google/cloud/language/v1beta2/language_service.proto
+
+It has these top-level messages:
+ Document
+ Sentence
+ Entity
+ Token
+ Sentiment
+ PartOfSpeech
+ DependencyEdge
+ EntityMention
+ TextSpan
+ AnalyzeSentimentRequest
+ AnalyzeSentimentResponse
+ AnalyzeEntitySentimentRequest
+ AnalyzeEntitySentimentResponse
+ AnalyzeEntitiesRequest
+ AnalyzeEntitiesResponse
+ AnalyzeSyntaxRequest
+ AnalyzeSyntaxResponse
+ AnnotateTextRequest
+ AnnotateTextResponse
+*/
+package language
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+import _ "google.golang.org/genproto/googleapis/api/annotations"
+import _ "google.golang.org/genproto/googleapis/longrunning"
+import _ "github.com/golang/protobuf/ptypes/timestamp"
+import _ "google.golang.org/genproto/googleapis/rpc/status"
+
+import (
+ context "golang.org/x/net/context"
+ grpc "google.golang.org/grpc"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// Represents the text encoding that the caller uses to process the output.
+// Providing an `EncodingType` is recommended because the API provides the
+// beginning offsets for various outputs, such as tokens and mentions, and
+// languages that natively use different text encodings may access offsets
+// differently.
+type EncodingType int32
+
+const (
+ // If `EncodingType` is not specified, encoding-dependent information (such as
+ // `begin_offset`) will be set at `-1`.
+ EncodingType_NONE EncodingType = 0
+ // Encoding-dependent information (such as `begin_offset`) is calculated based
+ // on the UTF-8 encoding of the input. C++ and Go are examples of languages
+ // that use this encoding natively.
+ EncodingType_UTF8 EncodingType = 1
+ // Encoding-dependent information (such as `begin_offset`) is calculated based
+ // on the UTF-16 encoding of the input. Java and Javascript are examples of
+ // languages that use this encoding natively.
+ EncodingType_UTF16 EncodingType = 2
+ // Encoding-dependent information (such as `begin_offset`) is calculated based
+ // on the UTF-32 encoding of the input. Python is an example of a language
+ // that uses this encoding natively.
+ EncodingType_UTF32 EncodingType = 3
+)
+
+var EncodingType_name = map[int32]string{
+ 0: "NONE",
+ 1: "UTF8",
+ 2: "UTF16",
+ 3: "UTF32",
+}
+var EncodingType_value = map[string]int32{
+ "NONE": 0,
+ "UTF8": 1,
+ "UTF16": 2,
+ "UTF32": 3,
+}
+
+func (x EncodingType) String() string {
+ return proto.EnumName(EncodingType_name, int32(x))
+}
+func (EncodingType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+// The document types enum.
+type Document_Type int32
+
+const (
+ // The content type is not specified.
+ Document_TYPE_UNSPECIFIED Document_Type = 0
+ // Plain text
+ Document_PLAIN_TEXT Document_Type = 1
+ // HTML
+ Document_HTML Document_Type = 2
+)
+
+var Document_Type_name = map[int32]string{
+ 0: "TYPE_UNSPECIFIED",
+ 1: "PLAIN_TEXT",
+ 2: "HTML",
+}
+var Document_Type_value = map[string]int32{
+ "TYPE_UNSPECIFIED": 0,
+ "PLAIN_TEXT": 1,
+ "HTML": 2,
+}
+
+func (x Document_Type) String() string {
+ return proto.EnumName(Document_Type_name, int32(x))
+}
+func (Document_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} }
+
+// The type of the entity.
+type Entity_Type int32
+
+const (
+ // Unknown
+ Entity_UNKNOWN Entity_Type = 0
+ // Person
+ Entity_PERSON Entity_Type = 1
+ // Location
+ Entity_LOCATION Entity_Type = 2
+ // Organization
+ Entity_ORGANIZATION Entity_Type = 3
+ // Event
+ Entity_EVENT Entity_Type = 4
+ // Work of art
+ Entity_WORK_OF_ART Entity_Type = 5
+ // Consumer goods
+ Entity_CONSUMER_GOOD Entity_Type = 6
+ // Other types
+ Entity_OTHER Entity_Type = 7
+)
+
+var Entity_Type_name = map[int32]string{
+ 0: "UNKNOWN",
+ 1: "PERSON",
+ 2: "LOCATION",
+ 3: "ORGANIZATION",
+ 4: "EVENT",
+ 5: "WORK_OF_ART",
+ 6: "CONSUMER_GOOD",
+ 7: "OTHER",
+}
+var Entity_Type_value = map[string]int32{
+ "UNKNOWN": 0,
+ "PERSON": 1,
+ "LOCATION": 2,
+ "ORGANIZATION": 3,
+ "EVENT": 4,
+ "WORK_OF_ART": 5,
+ "CONSUMER_GOOD": 6,
+ "OTHER": 7,
+}
+
+func (x Entity_Type) String() string {
+ return proto.EnumName(Entity_Type_name, int32(x))
+}
+func (Entity_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} }
+
+// The part of speech tags enum.
+type PartOfSpeech_Tag int32
+
+const (
+ // Unknown
+ PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0
+ // Adjective
+ PartOfSpeech_ADJ PartOfSpeech_Tag = 1
+ // Adposition (preposition and postposition)
+ PartOfSpeech_ADP PartOfSpeech_Tag = 2
+ // Adverb
+ PartOfSpeech_ADV PartOfSpeech_Tag = 3
+ // Conjunction
+ PartOfSpeech_CONJ PartOfSpeech_Tag = 4
+ // Determiner
+ PartOfSpeech_DET PartOfSpeech_Tag = 5
+ // Noun (common and proper)
+ PartOfSpeech_NOUN PartOfSpeech_Tag = 6
+ // Cardinal number
+ PartOfSpeech_NUM PartOfSpeech_Tag = 7
+ // Pronoun
+ PartOfSpeech_PRON PartOfSpeech_Tag = 8
+ // Particle or other function word
+ PartOfSpeech_PRT PartOfSpeech_Tag = 9
+ // Punctuation
+ PartOfSpeech_PUNCT PartOfSpeech_Tag = 10
+ // Verb (all tenses and modes)
+ PartOfSpeech_VERB PartOfSpeech_Tag = 11
+ // Other: foreign words, typos, abbreviations
+ PartOfSpeech_X PartOfSpeech_Tag = 12
+ // Affix
+ PartOfSpeech_AFFIX PartOfSpeech_Tag = 13
+)
+
+var PartOfSpeech_Tag_name = map[int32]string{
+ 0: "UNKNOWN",
+ 1: "ADJ",
+ 2: "ADP",
+ 3: "ADV",
+ 4: "CONJ",
+ 5: "DET",
+ 6: "NOUN",
+ 7: "NUM",
+ 8: "PRON",
+ 9: "PRT",
+ 10: "PUNCT",
+ 11: "VERB",
+ 12: "X",
+ 13: "AFFIX",
+}
+var PartOfSpeech_Tag_value = map[string]int32{
+ "UNKNOWN": 0,
+ "ADJ": 1,
+ "ADP": 2,
+ "ADV": 3,
+ "CONJ": 4,
+ "DET": 5,
+ "NOUN": 6,
+ "NUM": 7,
+ "PRON": 8,
+ "PRT": 9,
+ "PUNCT": 10,
+ "VERB": 11,
+ "X": 12,
+ "AFFIX": 13,
+}
+
+func (x PartOfSpeech_Tag) String() string {
+ return proto.EnumName(PartOfSpeech_Tag_name, int32(x))
+}
+func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 0} }
+
+// The characteristic of a verb that expresses time flow during an event.
+type PartOfSpeech_Aspect int32
+
+const (
+ // Aspect is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0
+ // Perfective
+ PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1
+ // Imperfective
+ PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2
+ // Progressive
+ PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3
+)
+
+var PartOfSpeech_Aspect_name = map[int32]string{
+ 0: "ASPECT_UNKNOWN",
+ 1: "PERFECTIVE",
+ 2: "IMPERFECTIVE",
+ 3: "PROGRESSIVE",
+}
+var PartOfSpeech_Aspect_value = map[string]int32{
+ "ASPECT_UNKNOWN": 0,
+ "PERFECTIVE": 1,
+ "IMPERFECTIVE": 2,
+ "PROGRESSIVE": 3,
+}
+
+func (x PartOfSpeech_Aspect) String() string {
+ return proto.EnumName(PartOfSpeech_Aspect_name, int32(x))
+}
+func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 1} }
+
+// The grammatical function performed by a noun or pronoun in a phrase,
+// clause, or sentence. In some languages, other parts of speech, such as
+// adjective and determiner, take case inflection in agreement with the noun.
+type PartOfSpeech_Case int32
+
+const (
+ // Case is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0
+ // Accusative
+ PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1
+ // Adverbial
+ PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2
+ // Complementive
+ PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3
+ // Dative
+ PartOfSpeech_DATIVE PartOfSpeech_Case = 4
+ // Genitive
+ PartOfSpeech_GENITIVE PartOfSpeech_Case = 5
+ // Instrumental
+ PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6
+ // Locative
+ PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7
+ // Nominative
+ PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8
+ // Oblique
+ PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9
+ // Partitive
+ PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10
+ // Prepositional
+ PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11
+ // Reflexive
+ PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12
+ // Relative
+ PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13
+ // Vocative
+ PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14
+)
+
+var PartOfSpeech_Case_name = map[int32]string{
+ 0: "CASE_UNKNOWN",
+ 1: "ACCUSATIVE",
+ 2: "ADVERBIAL",
+ 3: "COMPLEMENTIVE",
+ 4: "DATIVE",
+ 5: "GENITIVE",
+ 6: "INSTRUMENTAL",
+ 7: "LOCATIVE",
+ 8: "NOMINATIVE",
+ 9: "OBLIQUE",
+ 10: "PARTITIVE",
+ 11: "PREPOSITIONAL",
+ 12: "REFLEXIVE_CASE",
+ 13: "RELATIVE_CASE",
+ 14: "VOCATIVE",
+}
+var PartOfSpeech_Case_value = map[string]int32{
+ "CASE_UNKNOWN": 0,
+ "ACCUSATIVE": 1,
+ "ADVERBIAL": 2,
+ "COMPLEMENTIVE": 3,
+ "DATIVE": 4,
+ "GENITIVE": 5,
+ "INSTRUMENTAL": 6,
+ "LOCATIVE": 7,
+ "NOMINATIVE": 8,
+ "OBLIQUE": 9,
+ "PARTITIVE": 10,
+ "PREPOSITIONAL": 11,
+ "REFLEXIVE_CASE": 12,
+ "RELATIVE_CASE": 13,
+ "VOCATIVE": 14,
+}
+
+func (x PartOfSpeech_Case) String() string {
+ return proto.EnumName(PartOfSpeech_Case_name, int32(x))
+}
+func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 2} }
+
+// Depending on the language, Form can be categorizing different forms of
+// verbs, adjectives, adverbs, etc. For example, categorizing inflected
+// endings of verbs and adjectives or distinguishing between short and long
+// forms of adjectives and participles
+type PartOfSpeech_Form int32
+
+const (
+ // Form is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0
+ // Adnomial
+ PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1
+ // Auxiliary
+ PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2
+ // Complementizer
+ PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3
+ // Final ending
+ PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4
+ // Gerund
+ PartOfSpeech_GERUND PartOfSpeech_Form = 5
+ // Realis
+ PartOfSpeech_REALIS PartOfSpeech_Form = 6
+ // Irrealis
+ PartOfSpeech_IRREALIS PartOfSpeech_Form = 7
+ // Short form
+ PartOfSpeech_SHORT PartOfSpeech_Form = 8
+ // Long form
+ PartOfSpeech_LONG PartOfSpeech_Form = 9
+ // Order form
+ PartOfSpeech_ORDER PartOfSpeech_Form = 10
+ // Specific form
+ PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11
+)
+
+var PartOfSpeech_Form_name = map[int32]string{
+ 0: "FORM_UNKNOWN",
+ 1: "ADNOMIAL",
+ 2: "AUXILIARY",
+ 3: "COMPLEMENTIZER",
+ 4: "FINAL_ENDING",
+ 5: "GERUND",
+ 6: "REALIS",
+ 7: "IRREALIS",
+ 8: "SHORT",
+ 9: "LONG",
+ 10: "ORDER",
+ 11: "SPECIFIC",
+}
+var PartOfSpeech_Form_value = map[string]int32{
+ "FORM_UNKNOWN": 0,
+ "ADNOMIAL": 1,
+ "AUXILIARY": 2,
+ "COMPLEMENTIZER": 3,
+ "FINAL_ENDING": 4,
+ "GERUND": 5,
+ "REALIS": 6,
+ "IRREALIS": 7,
+ "SHORT": 8,
+ "LONG": 9,
+ "ORDER": 10,
+ "SPECIFIC": 11,
+}
+
+func (x PartOfSpeech_Form) String() string {
+ return proto.EnumName(PartOfSpeech_Form_name, int32(x))
+}
+func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 3} }
+
+// Gender classes of nouns reflected in the behaviour of associated words.
+type PartOfSpeech_Gender int32
+
+const (
+ // Gender is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0
+ // Feminine
+ PartOfSpeech_FEMININE PartOfSpeech_Gender = 1
+ // Masculine
+ PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2
+ // Neuter
+ PartOfSpeech_NEUTER PartOfSpeech_Gender = 3
+)
+
+var PartOfSpeech_Gender_name = map[int32]string{
+ 0: "GENDER_UNKNOWN",
+ 1: "FEMININE",
+ 2: "MASCULINE",
+ 3: "NEUTER",
+}
+var PartOfSpeech_Gender_value = map[string]int32{
+ "GENDER_UNKNOWN": 0,
+ "FEMININE": 1,
+ "MASCULINE": 2,
+ "NEUTER": 3,
+}
+
+func (x PartOfSpeech_Gender) String() string {
+ return proto.EnumName(PartOfSpeech_Gender_name, int32(x))
+}
+func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 4} }
+
+// The grammatical feature of verbs, used for showing modality and attitude.
+type PartOfSpeech_Mood int32
+
+const (
+ // Mood is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0
+ // Conditional
+ PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1
+ // Imperative
+ PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2
+ // Indicative
+ PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3
+ // Interrogative
+ PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4
+ // Jussive
+ PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5
+ // Subjunctive
+ PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6
+)
+
+var PartOfSpeech_Mood_name = map[int32]string{
+ 0: "MOOD_UNKNOWN",
+ 1: "CONDITIONAL_MOOD",
+ 2: "IMPERATIVE",
+ 3: "INDICATIVE",
+ 4: "INTERROGATIVE",
+ 5: "JUSSIVE",
+ 6: "SUBJUNCTIVE",
+}
+var PartOfSpeech_Mood_value = map[string]int32{
+ "MOOD_UNKNOWN": 0,
+ "CONDITIONAL_MOOD": 1,
+ "IMPERATIVE": 2,
+ "INDICATIVE": 3,
+ "INTERROGATIVE": 4,
+ "JUSSIVE": 5,
+ "SUBJUNCTIVE": 6,
+}
+
+func (x PartOfSpeech_Mood) String() string {
+ return proto.EnumName(PartOfSpeech_Mood_name, int32(x))
+}
+func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 5} }
+
+// Count distinctions.
+type PartOfSpeech_Number int32
+
+const (
+ // Number is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0
+ // Singular
+ PartOfSpeech_SINGULAR PartOfSpeech_Number = 1
+ // Plural
+ PartOfSpeech_PLURAL PartOfSpeech_Number = 2
+ // Dual
+ PartOfSpeech_DUAL PartOfSpeech_Number = 3
+)
+
+var PartOfSpeech_Number_name = map[int32]string{
+ 0: "NUMBER_UNKNOWN",
+ 1: "SINGULAR",
+ 2: "PLURAL",
+ 3: "DUAL",
+}
+var PartOfSpeech_Number_value = map[string]int32{
+ "NUMBER_UNKNOWN": 0,
+ "SINGULAR": 1,
+ "PLURAL": 2,
+ "DUAL": 3,
+}
+
+func (x PartOfSpeech_Number) String() string {
+ return proto.EnumName(PartOfSpeech_Number_name, int32(x))
+}
+func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 6} }
+
+// The distinction between the speaker, second person, third person, etc.
+type PartOfSpeech_Person int32
+
+const (
+ // Person is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0
+ // First
+ PartOfSpeech_FIRST PartOfSpeech_Person = 1
+ // Second
+ PartOfSpeech_SECOND PartOfSpeech_Person = 2
+ // Third
+ PartOfSpeech_THIRD PartOfSpeech_Person = 3
+ // Reflexive
+ PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4
+)
+
+var PartOfSpeech_Person_name = map[int32]string{
+ 0: "PERSON_UNKNOWN",
+ 1: "FIRST",
+ 2: "SECOND",
+ 3: "THIRD",
+ 4: "REFLEXIVE_PERSON",
+}
+var PartOfSpeech_Person_value = map[string]int32{
+ "PERSON_UNKNOWN": 0,
+ "FIRST": 1,
+ "SECOND": 2,
+ "THIRD": 3,
+ "REFLEXIVE_PERSON": 4,
+}
+
+func (x PartOfSpeech_Person) String() string {
+ return proto.EnumName(PartOfSpeech_Person_name, int32(x))
+}
+func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 7} }
+
+// This category shows if the token is part of a proper name.
+type PartOfSpeech_Proper int32
+
+const (
+ // Proper is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0
+ // Proper
+ PartOfSpeech_PROPER PartOfSpeech_Proper = 1
+ // Not proper
+ PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2
+)
+
+var PartOfSpeech_Proper_name = map[int32]string{
+ 0: "PROPER_UNKNOWN",
+ 1: "PROPER",
+ 2: "NOT_PROPER",
+}
+var PartOfSpeech_Proper_value = map[string]int32{
+ "PROPER_UNKNOWN": 0,
+ "PROPER": 1,
+ "NOT_PROPER": 2,
+}
+
+func (x PartOfSpeech_Proper) String() string {
+ return proto.EnumName(PartOfSpeech_Proper_name, int32(x))
+}
+func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 8} }
+
+// Reciprocal features of a pronoun.
+type PartOfSpeech_Reciprocity int32
+
+const (
+ // Reciprocity is not applicable in the analyzed language or is not
+ // predicted.
+ PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0
+ // Reciprocal
+ PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1
+ // Non-reciprocal
+ PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2
+)
+
+var PartOfSpeech_Reciprocity_name = map[int32]string{
+ 0: "RECIPROCITY_UNKNOWN",
+ 1: "RECIPROCAL",
+ 2: "NON_RECIPROCAL",
+}
+var PartOfSpeech_Reciprocity_value = map[string]int32{
+ "RECIPROCITY_UNKNOWN": 0,
+ "RECIPROCAL": 1,
+ "NON_RECIPROCAL": 2,
+}
+
+func (x PartOfSpeech_Reciprocity) String() string {
+ return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x))
+}
+func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 9} }
+
+// Time reference.
+type PartOfSpeech_Tense int32
+
+const (
+ // Tense is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0
+ // Conditional
+ PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1
+ // Future
+ PartOfSpeech_FUTURE PartOfSpeech_Tense = 2
+ // Past
+ PartOfSpeech_PAST PartOfSpeech_Tense = 3
+ // Present
+ PartOfSpeech_PRESENT PartOfSpeech_Tense = 4
+ // Imperfect
+ PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5
+ // Pluperfect
+ PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6
+)
+
+var PartOfSpeech_Tense_name = map[int32]string{
+ 0: "TENSE_UNKNOWN",
+ 1: "CONDITIONAL_TENSE",
+ 2: "FUTURE",
+ 3: "PAST",
+ 4: "PRESENT",
+ 5: "IMPERFECT",
+ 6: "PLUPERFECT",
+}
+var PartOfSpeech_Tense_value = map[string]int32{
+ "TENSE_UNKNOWN": 0,
+ "CONDITIONAL_TENSE": 1,
+ "FUTURE": 2,
+ "PAST": 3,
+ "PRESENT": 4,
+ "IMPERFECT": 5,
+ "PLUPERFECT": 6,
+}
+
+func (x PartOfSpeech_Tense) String() string {
+ return proto.EnumName(PartOfSpeech_Tense_name, int32(x))
+}
+func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 10} }
+
+// The relationship between the action that a verb expresses and the
+// participants identified by its arguments.
+type PartOfSpeech_Voice int32
+
+const (
+ // Voice is not applicable in the analyzed language or is not predicted.
+ PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0
+ // Active
+ PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1
+ // Causative
+ PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2
+ // Passive
+ PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3
+)
+
+var PartOfSpeech_Voice_name = map[int32]string{
+ 0: "VOICE_UNKNOWN",
+ 1: "ACTIVE",
+ 2: "CAUSATIVE",
+ 3: "PASSIVE",
+}
+var PartOfSpeech_Voice_value = map[string]int32{
+ "VOICE_UNKNOWN": 0,
+ "ACTIVE": 1,
+ "CAUSATIVE": 2,
+ "PASSIVE": 3,
+}
+
+func (x PartOfSpeech_Voice) String() string {
+ return proto.EnumName(PartOfSpeech_Voice_name, int32(x))
+}
+func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 11} }
+
+// The parse label enum for the token.
+type DependencyEdge_Label int32
+
+const (
+ // Unknown
+ DependencyEdge_UNKNOWN DependencyEdge_Label = 0
+ // Abbreviation modifier
+ DependencyEdge_ABBREV DependencyEdge_Label = 1
+ // Adjectival complement
+ DependencyEdge_ACOMP DependencyEdge_Label = 2
+ // Adverbial clause modifier
+ DependencyEdge_ADVCL DependencyEdge_Label = 3
+ // Adverbial modifier
+ DependencyEdge_ADVMOD DependencyEdge_Label = 4
+ // Adjectival modifier of an NP
+ DependencyEdge_AMOD DependencyEdge_Label = 5
+ // Appositional modifier of an NP
+ DependencyEdge_APPOS DependencyEdge_Label = 6
+ // Attribute dependent of a copular verb
+ DependencyEdge_ATTR DependencyEdge_Label = 7
+ // Auxiliary (non-main) verb
+ DependencyEdge_AUX DependencyEdge_Label = 8
+ // Passive auxiliary
+ DependencyEdge_AUXPASS DependencyEdge_Label = 9
+ // Coordinating conjunction
+ DependencyEdge_CC DependencyEdge_Label = 10
+ // Clausal complement of a verb or adjective
+ DependencyEdge_CCOMP DependencyEdge_Label = 11
+ // Conjunct
+ DependencyEdge_CONJ DependencyEdge_Label = 12
+ // Clausal subject
+ DependencyEdge_CSUBJ DependencyEdge_Label = 13
+ // Clausal passive subject
+ DependencyEdge_CSUBJPASS DependencyEdge_Label = 14
+ // Dependency (unable to determine)
+ DependencyEdge_DEP DependencyEdge_Label = 15
+ // Determiner
+ DependencyEdge_DET DependencyEdge_Label = 16
+ // Discourse
+ DependencyEdge_DISCOURSE DependencyEdge_Label = 17
+ // Direct object
+ DependencyEdge_DOBJ DependencyEdge_Label = 18
+ // Expletive
+ DependencyEdge_EXPL DependencyEdge_Label = 19
+ // Goes with (part of a word in a text not well edited)
+ DependencyEdge_GOESWITH DependencyEdge_Label = 20
+ // Indirect object
+ DependencyEdge_IOBJ DependencyEdge_Label = 21
+ // Marker (word introducing a subordinate clause)
+ DependencyEdge_MARK DependencyEdge_Label = 22
+ // Multi-word expression
+ DependencyEdge_MWE DependencyEdge_Label = 23
+ // Multi-word verbal expression
+ DependencyEdge_MWV DependencyEdge_Label = 24
+ // Negation modifier
+ DependencyEdge_NEG DependencyEdge_Label = 25
+ // Noun compound modifier
+ DependencyEdge_NN DependencyEdge_Label = 26
+ // Noun phrase used as an adverbial modifier
+ DependencyEdge_NPADVMOD DependencyEdge_Label = 27
+ // Nominal subject
+ DependencyEdge_NSUBJ DependencyEdge_Label = 28
+ // Passive nominal subject
+ DependencyEdge_NSUBJPASS DependencyEdge_Label = 29
+ // Numeric modifier of a noun
+ DependencyEdge_NUM DependencyEdge_Label = 30
+ // Element of compound number
+ DependencyEdge_NUMBER DependencyEdge_Label = 31
+ // Punctuation mark
+ DependencyEdge_P DependencyEdge_Label = 32
+ // Parataxis relation
+ DependencyEdge_PARATAXIS DependencyEdge_Label = 33
+ // Participial modifier
+ DependencyEdge_PARTMOD DependencyEdge_Label = 34
+ // The complement of a preposition is a clause
+ DependencyEdge_PCOMP DependencyEdge_Label = 35
+ // Object of a preposition
+ DependencyEdge_POBJ DependencyEdge_Label = 36
+ // Possession modifier
+ DependencyEdge_POSS DependencyEdge_Label = 37
+ // Postverbal negative particle
+ DependencyEdge_POSTNEG DependencyEdge_Label = 38
+ // Predicate complement
+ DependencyEdge_PRECOMP DependencyEdge_Label = 39
+ // Preconjunt
+ DependencyEdge_PRECONJ DependencyEdge_Label = 40
+ // Predeterminer
+ DependencyEdge_PREDET DependencyEdge_Label = 41
+ // Prefix
+ DependencyEdge_PREF DependencyEdge_Label = 42
+ // Prepositional modifier
+ DependencyEdge_PREP DependencyEdge_Label = 43
+ // The relationship between a verb and verbal morpheme
+ DependencyEdge_PRONL DependencyEdge_Label = 44
+ // Particle
+ DependencyEdge_PRT DependencyEdge_Label = 45
+ // Associative or possessive marker
+ DependencyEdge_PS DependencyEdge_Label = 46
+ // Quantifier phrase modifier
+ DependencyEdge_QUANTMOD DependencyEdge_Label = 47
+ // Relative clause modifier
+ DependencyEdge_RCMOD DependencyEdge_Label = 48
+ // Complementizer in relative clause
+ DependencyEdge_RCMODREL DependencyEdge_Label = 49
+ // Ellipsis without a preceding predicate
+ DependencyEdge_RDROP DependencyEdge_Label = 50
+ // Referent
+ DependencyEdge_REF DependencyEdge_Label = 51
+ // Remnant
+ DependencyEdge_REMNANT DependencyEdge_Label = 52
+ // Reparandum
+ DependencyEdge_REPARANDUM DependencyEdge_Label = 53
+ // Root
+ DependencyEdge_ROOT DependencyEdge_Label = 54
+ // Suffix specifying a unit of number
+ DependencyEdge_SNUM DependencyEdge_Label = 55
+ // Suffix
+ DependencyEdge_SUFF DependencyEdge_Label = 56
+ // Temporal modifier
+ DependencyEdge_TMOD DependencyEdge_Label = 57
+ // Topic marker
+ DependencyEdge_TOPIC DependencyEdge_Label = 58
+ // Clause headed by an infinite form of the verb that modifies a noun
+ DependencyEdge_VMOD DependencyEdge_Label = 59
+ // Vocative
+ DependencyEdge_VOCATIVE DependencyEdge_Label = 60
+ // Open clausal complement
+ DependencyEdge_XCOMP DependencyEdge_Label = 61
+ // Name suffix
+ DependencyEdge_SUFFIX DependencyEdge_Label = 62
+ // Name title
+ DependencyEdge_TITLE DependencyEdge_Label = 63
+ // Adverbial phrase modifier
+ DependencyEdge_ADVPHMOD DependencyEdge_Label = 64
+ // Causative auxiliary
+ DependencyEdge_AUXCAUS DependencyEdge_Label = 65
+ // Helper auxiliary
+ DependencyEdge_AUXVV DependencyEdge_Label = 66
+ // Rentaishi (Prenominal modifier)
+ DependencyEdge_DTMOD DependencyEdge_Label = 67
+ // Foreign words
+ DependencyEdge_FOREIGN DependencyEdge_Label = 68
+ // Keyword
+ DependencyEdge_KW DependencyEdge_Label = 69
+ // List for chains of comparable items
+ DependencyEdge_LIST DependencyEdge_Label = 70
+ // Nominalized clause
+ DependencyEdge_NOMC DependencyEdge_Label = 71
+ // Nominalized clausal subject
+ DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72
+ // Nominalized clausal passive
+ DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73
+ // Compound of numeric modifier
+ DependencyEdge_NUMC DependencyEdge_Label = 74
+ // Copula
+ DependencyEdge_COP DependencyEdge_Label = 75
+ // Dislocated relation (for fronted/topicalized elements)
+ DependencyEdge_DISLOCATED DependencyEdge_Label = 76
+)
+
+var DependencyEdge_Label_name = map[int32]string{
+ 0: "UNKNOWN",
+ 1: "ABBREV",
+ 2: "ACOMP",
+ 3: "ADVCL",
+ 4: "ADVMOD",
+ 5: "AMOD",
+ 6: "APPOS",
+ 7: "ATTR",
+ 8: "AUX",
+ 9: "AUXPASS",
+ 10: "CC",
+ 11: "CCOMP",
+ 12: "CONJ",
+ 13: "CSUBJ",
+ 14: "CSUBJPASS",
+ 15: "DEP",
+ 16: "DET",
+ 17: "DISCOURSE",
+ 18: "DOBJ",
+ 19: "EXPL",
+ 20: "GOESWITH",
+ 21: "IOBJ",
+ 22: "MARK",
+ 23: "MWE",
+ 24: "MWV",
+ 25: "NEG",
+ 26: "NN",
+ 27: "NPADVMOD",
+ 28: "NSUBJ",
+ 29: "NSUBJPASS",
+ 30: "NUM",
+ 31: "NUMBER",
+ 32: "P",
+ 33: "PARATAXIS",
+ 34: "PARTMOD",
+ 35: "PCOMP",
+ 36: "POBJ",
+ 37: "POSS",
+ 38: "POSTNEG",
+ 39: "PRECOMP",
+ 40: "PRECONJ",
+ 41: "PREDET",
+ 42: "PREF",
+ 43: "PREP",
+ 44: "PRONL",
+ 45: "PRT",
+ 46: "PS",
+ 47: "QUANTMOD",
+ 48: "RCMOD",
+ 49: "RCMODREL",
+ 50: "RDROP",
+ 51: "REF",
+ 52: "REMNANT",
+ 53: "REPARANDUM",
+ 54: "ROOT",
+ 55: "SNUM",
+ 56: "SUFF",
+ 57: "TMOD",
+ 58: "TOPIC",
+ 59: "VMOD",
+ 60: "VOCATIVE",
+ 61: "XCOMP",
+ 62: "SUFFIX",
+ 63: "TITLE",
+ 64: "ADVPHMOD",
+ 65: "AUXCAUS",
+ 66: "AUXVV",
+ 67: "DTMOD",
+ 68: "FOREIGN",
+ 69: "KW",
+ 70: "LIST",
+ 71: "NOMC",
+ 72: "NOMCSUBJ",
+ 73: "NOMCSUBJPASS",
+ 74: "NUMC",
+ 75: "COP",
+ 76: "DISLOCATED",
+}
+var DependencyEdge_Label_value = map[string]int32{
+ "UNKNOWN": 0,
+ "ABBREV": 1,
+ "ACOMP": 2,
+ "ADVCL": 3,
+ "ADVMOD": 4,
+ "AMOD": 5,
+ "APPOS": 6,
+ "ATTR": 7,
+ "AUX": 8,
+ "AUXPASS": 9,
+ "CC": 10,
+ "CCOMP": 11,
+ "CONJ": 12,
+ "CSUBJ": 13,
+ "CSUBJPASS": 14,
+ "DEP": 15,
+ "DET": 16,
+ "DISCOURSE": 17,
+ "DOBJ": 18,
+ "EXPL": 19,
+ "GOESWITH": 20,
+ "IOBJ": 21,
+ "MARK": 22,
+ "MWE": 23,
+ "MWV": 24,
+ "NEG": 25,
+ "NN": 26,
+ "NPADVMOD": 27,
+ "NSUBJ": 28,
+ "NSUBJPASS": 29,
+ "NUM": 30,
+ "NUMBER": 31,
+ "P": 32,
+ "PARATAXIS": 33,
+ "PARTMOD": 34,
+ "PCOMP": 35,
+ "POBJ": 36,
+ "POSS": 37,
+ "POSTNEG": 38,
+ "PRECOMP": 39,
+ "PRECONJ": 40,
+ "PREDET": 41,
+ "PREF": 42,
+ "PREP": 43,
+ "PRONL": 44,
+ "PRT": 45,
+ "PS": 46,
+ "QUANTMOD": 47,
+ "RCMOD": 48,
+ "RCMODREL": 49,
+ "RDROP": 50,
+ "REF": 51,
+ "REMNANT": 52,
+ "REPARANDUM": 53,
+ "ROOT": 54,
+ "SNUM": 55,
+ "SUFF": 56,
+ "TMOD": 57,
+ "TOPIC": 58,
+ "VMOD": 59,
+ "VOCATIVE": 60,
+ "XCOMP": 61,
+ "SUFFIX": 62,
+ "TITLE": 63,
+ "ADVPHMOD": 64,
+ "AUXCAUS": 65,
+ "AUXVV": 66,
+ "DTMOD": 67,
+ "FOREIGN": 68,
+ "KW": 69,
+ "LIST": 70,
+ "NOMC": 71,
+ "NOMCSUBJ": 72,
+ "NOMCSUBJPASS": 73,
+ "NUMC": 74,
+ "COP": 75,
+ "DISLOCATED": 76,
+}
+
+func (x DependencyEdge_Label) String() string {
+ return proto.EnumName(DependencyEdge_Label_name, int32(x))
+}
+func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{6, 0} }
+
+// The supported types of mentions.
+type EntityMention_Type int32
+
+const (
+ // Unknown
+ EntityMention_TYPE_UNKNOWN EntityMention_Type = 0
+ // Proper name
+ EntityMention_PROPER EntityMention_Type = 1
+ // Common noun (or noun compound)
+ EntityMention_COMMON EntityMention_Type = 2
+)
+
+var EntityMention_Type_name = map[int32]string{
+ 0: "TYPE_UNKNOWN",
+ 1: "PROPER",
+ 2: "COMMON",
+}
+var EntityMention_Type_value = map[string]int32{
+ "TYPE_UNKNOWN": 0,
+ "PROPER": 1,
+ "COMMON": 2,
+}
+
+func (x EntityMention_Type) String() string {
+ return proto.EnumName(EntityMention_Type_name, int32(x))
+}
+func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{7, 0} }
+
+// ################################################################ #
+//
+// Represents the input to API methods.
+type Document struct {
+ // Required. If the type is not set or is `TYPE_UNSPECIFIED`,
+ // returns an `INVALID_ARGUMENT` error.
+ Type Document_Type `protobuf:"varint,1,opt,name=type,enum=google.cloud.language.v1beta2.Document_Type" json:"type,omitempty"`
+ // The source of the document: a string containing the content or a
+ // Google Cloud Storage URI.
+ //
+ // Types that are valid to be assigned to Source:
+ // *Document_Content
+ // *Document_GcsContentUri
+ Source isDocument_Source `protobuf_oneof:"source"`
+ // The language of the document (if not specified, the language is
+ // automatically detected). Both ISO and BCP-47 language codes are
+ // accepted.<br>
+ // [Language Support](https://cloud.google.com/natural-language/docs/languages)
+ // lists currently supported languages for each API method.
+ // If the language (either specified by the caller or automatically detected)
+ // is not supported by the called API method, an `INVALID_ARGUMENT` error
+ // is returned.
+ Language string `protobuf:"bytes,4,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *Document) Reset() { *m = Document{} }
+func (m *Document) String() string { return proto.CompactTextString(m) }
+func (*Document) ProtoMessage() {}
+func (*Document) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+type isDocument_Source interface {
+ isDocument_Source()
+}
+
+type Document_Content struct {
+ Content string `protobuf:"bytes,2,opt,name=content,oneof"`
+}
+type Document_GcsContentUri struct {
+ GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,oneof"`
+}
+
+func (*Document_Content) isDocument_Source() {}
+func (*Document_GcsContentUri) isDocument_Source() {}
+
+func (m *Document) GetSource() isDocument_Source {
+ if m != nil {
+ return m.Source
+ }
+ return nil
+}
+
+func (m *Document) GetType() Document_Type {
+ if m != nil {
+ return m.Type
+ }
+ return Document_TYPE_UNSPECIFIED
+}
+
+func (m *Document) GetContent() string {
+ if x, ok := m.GetSource().(*Document_Content); ok {
+ return x.Content
+ }
+ return ""
+}
+
+func (m *Document) GetGcsContentUri() string {
+ if x, ok := m.GetSource().(*Document_GcsContentUri); ok {
+ return x.GcsContentUri
+ }
+ return ""
+}
+
+func (m *Document) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+// XXX_OneofFuncs is for the internal use of the proto package.
+func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
+ return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{
+ (*Document_Content)(nil),
+ (*Document_GcsContentUri)(nil),
+ }
+}
+
+func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
+ m := msg.(*Document)
+ // source
+ switch x := m.Source.(type) {
+ case *Document_Content:
+ b.EncodeVarint(2<<3 | proto.WireBytes)
+ b.EncodeStringBytes(x.Content)
+ case *Document_GcsContentUri:
+ b.EncodeVarint(3<<3 | proto.WireBytes)
+ b.EncodeStringBytes(x.GcsContentUri)
+ case nil:
+ default:
+ return fmt.Errorf("Document.Source has unexpected type %T", x)
+ }
+ return nil
+}
+
+func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
+ m := msg.(*Document)
+ switch tag {
+ case 2: // source.content
+ if wire != proto.WireBytes {
+ return true, proto.ErrInternalBadWireType
+ }
+ x, err := b.DecodeStringBytes()
+ m.Source = &Document_Content{x}
+ return true, err
+ case 3: // source.gcs_content_uri
+ if wire != proto.WireBytes {
+ return true, proto.ErrInternalBadWireType
+ }
+ x, err := b.DecodeStringBytes()
+ m.Source = &Document_GcsContentUri{x}
+ return true, err
+ default:
+ return false, nil
+ }
+}
+
+func _Document_OneofSizer(msg proto.Message) (n int) {
+ m := msg.(*Document)
+ // source
+ switch x := m.Source.(type) {
+ case *Document_Content:
+ n += proto.SizeVarint(2<<3 | proto.WireBytes)
+ n += proto.SizeVarint(uint64(len(x.Content)))
+ n += len(x.Content)
+ case *Document_GcsContentUri:
+ n += proto.SizeVarint(3<<3 | proto.WireBytes)
+ n += proto.SizeVarint(uint64(len(x.GcsContentUri)))
+ n += len(x.GcsContentUri)
+ case nil:
+ default:
+ panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
+ }
+ return n
+}
+
+// Represents a sentence in the input document.
+type Sentence struct {
+ // The sentence text.
+ Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+ // For calls to [AnalyzeSentiment][] or if
+ // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
+ // true, this field will contain the sentiment for the sentence.
+ Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment" json:"sentiment,omitempty"`
+}
+
+func (m *Sentence) Reset() { *m = Sentence{} }
+func (m *Sentence) String() string { return proto.CompactTextString(m) }
+func (*Sentence) ProtoMessage() {}
+func (*Sentence) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
+
+func (m *Sentence) GetText() *TextSpan {
+ if m != nil {
+ return m.Text
+ }
+ return nil
+}
+
+func (m *Sentence) GetSentiment() *Sentiment {
+ if m != nil {
+ return m.Sentiment
+ }
+ return nil
+}
+
+// Represents a phrase in the text that is a known entity, such as
+// a person, an organization, or location. The API associates information, such
+// as salience and mentions, with entities.
+type Entity struct {
+ // The representative name for the entity.
+ Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+ // The entity type.
+ Type Entity_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1beta2.Entity_Type" json:"type,omitempty"`
+ // Metadata associated with the entity.
+ //
+ // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
+ // available. The associated keys are "wikipedia_url" and "mid", respectively.
+ Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+ // The salience score associated with the entity in the [0, 1.0] range.
+ //
+ // The salience score for an entity provides information about the
+ // importance or centrality of that entity to the entire document text.
+ // Scores closer to 0 are less salient, while scores closer to 1.0 are highly
+ // salient.
+ Salience float32 `protobuf:"fixed32,4,opt,name=salience" json:"salience,omitempty"`
+ // The mentions of this entity in the input document. The API currently
+ // supports proper noun mentions.
+ Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions" json:"mentions,omitempty"`
+ // For calls to [AnalyzeEntitySentiment][] or if
+ // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to
+ // true, this field will contain the aggregate sentiment expressed for this
+ // entity in the provided document.
+ Sentiment *Sentiment `protobuf:"bytes,6,opt,name=sentiment" json:"sentiment,omitempty"`
+}
+
+func (m *Entity) Reset() { *m = Entity{} }
+func (m *Entity) String() string { return proto.CompactTextString(m) }
+func (*Entity) ProtoMessage() {}
+func (*Entity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
+
+func (m *Entity) GetName() string {
+ if m != nil {
+ return m.Name
+ }
+ return ""
+}
+
+func (m *Entity) GetType() Entity_Type {
+ if m != nil {
+ return m.Type
+ }
+ return Entity_UNKNOWN
+}
+
+func (m *Entity) GetMetadata() map[string]string {
+ if m != nil {
+ return m.Metadata
+ }
+ return nil
+}
+
+func (m *Entity) GetSalience() float32 {
+ if m != nil {
+ return m.Salience
+ }
+ return 0
+}
+
+func (m *Entity) GetMentions() []*EntityMention {
+ if m != nil {
+ return m.Mentions
+ }
+ return nil
+}
+
+func (m *Entity) GetSentiment() *Sentiment {
+ if m != nil {
+ return m.Sentiment
+ }
+ return nil
+}
+
+// Represents the smallest syntactic building block of the text.
+type Token struct {
+ // The token text.
+ Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+ // Parts of speech tag for this token.
+ PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech" json:"part_of_speech,omitempty"`
+ // Dependency tree parse for this token.
+ DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge" json:"dependency_edge,omitempty"`
+ // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
+ Lemma string `protobuf:"bytes,4,opt,name=lemma" json:"lemma,omitempty"`
+}
+
+func (m *Token) Reset() { *m = Token{} }
+func (m *Token) String() string { return proto.CompactTextString(m) }
+func (*Token) ProtoMessage() {}
+func (*Token) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
+
+func (m *Token) GetText() *TextSpan {
+ if m != nil {
+ return m.Text
+ }
+ return nil
+}
+
+func (m *Token) GetPartOfSpeech() *PartOfSpeech {
+ if m != nil {
+ return m.PartOfSpeech
+ }
+ return nil
+}
+
+func (m *Token) GetDependencyEdge() *DependencyEdge {
+ if m != nil {
+ return m.DependencyEdge
+ }
+ return nil
+}
+
+func (m *Token) GetLemma() string {
+ if m != nil {
+ return m.Lemma
+ }
+ return ""
+}
+
+// Represents the feeling associated with the entire text or entities in
+// the text.
+type Sentiment struct {
+ // A non-negative number in the [0, +inf) range, which represents
+ // the absolute magnitude of sentiment regardless of score (positive or
+ // negative).
+ Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude" json:"magnitude,omitempty"`
+ // Sentiment score between -1.0 (negative sentiment) and 1.0
+ // (positive sentiment).
+ Score float32 `protobuf:"fixed32,3,opt,name=score" json:"score,omitempty"`
+}
+
+func (m *Sentiment) Reset() { *m = Sentiment{} }
+func (m *Sentiment) String() string { return proto.CompactTextString(m) }
+func (*Sentiment) ProtoMessage() {}
+func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
+
+func (m *Sentiment) GetMagnitude() float32 {
+ if m != nil {
+ return m.Magnitude
+ }
+ return 0
+}
+
+func (m *Sentiment) GetScore() float32 {
+ if m != nil {
+ return m.Score
+ }
+ return 0
+}
+
+// Represents part of speech information for a token.
+type PartOfSpeech struct {
+ // The part of speech tag.
+ Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,enum=google.cloud.language.v1beta2.PartOfSpeech_Tag" json:"tag,omitempty"`
+ // The grammatical aspect.
+ Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,enum=google.cloud.language.v1beta2.PartOfSpeech_Aspect" json:"aspect,omitempty"`
+ // The grammatical case.
+ Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,enum=google.cloud.language.v1beta2.PartOfSpeech_Case" json:"case,omitempty"`
+ // The grammatical form.
+ Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,enum=google.cloud.language.v1beta2.PartOfSpeech_Form" json:"form,omitempty"`
+ // The grammatical gender.
+ Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,enum=google.cloud.language.v1beta2.PartOfSpeech_Gender" json:"gender,omitempty"`
+ // The grammatical mood.
+ Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,enum=google.cloud.language.v1beta2.PartOfSpeech_Mood" json:"mood,omitempty"`
+ // The grammatical number.
+ Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,enum=google.cloud.language.v1beta2.PartOfSpeech_Number" json:"number,omitempty"`
+ // The grammatical person.
+ Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,enum=google.cloud.language.v1beta2.PartOfSpeech_Person" json:"person,omitempty"`
+ // The grammatical properness.
+ Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,enum=google.cloud.language.v1beta2.PartOfSpeech_Proper" json:"proper,omitempty"`
+ // The grammatical reciprocity.
+ Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,enum=google.cloud.language.v1beta2.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"`
+ // The grammatical tense.
+ Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,enum=google.cloud.language.v1beta2.PartOfSpeech_Tense" json:"tense,omitempty"`
+ // The grammatical voice.
+ Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,enum=google.cloud.language.v1beta2.PartOfSpeech_Voice" json:"voice,omitempty"`
+}
+
+func (m *PartOfSpeech) Reset() { *m = PartOfSpeech{} }
+func (m *PartOfSpeech) String() string { return proto.CompactTextString(m) }
+func (*PartOfSpeech) ProtoMessage() {}
+func (*PartOfSpeech) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
+
+func (m *PartOfSpeech) GetTag() PartOfSpeech_Tag {
+ if m != nil {
+ return m.Tag
+ }
+ return PartOfSpeech_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetAspect() PartOfSpeech_Aspect {
+ if m != nil {
+ return m.Aspect
+ }
+ return PartOfSpeech_ASPECT_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetCase() PartOfSpeech_Case {
+ if m != nil {
+ return m.Case
+ }
+ return PartOfSpeech_CASE_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetForm() PartOfSpeech_Form {
+ if m != nil {
+ return m.Form
+ }
+ return PartOfSpeech_FORM_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetGender() PartOfSpeech_Gender {
+ if m != nil {
+ return m.Gender
+ }
+ return PartOfSpeech_GENDER_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetMood() PartOfSpeech_Mood {
+ if m != nil {
+ return m.Mood
+ }
+ return PartOfSpeech_MOOD_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetNumber() PartOfSpeech_Number {
+ if m != nil {
+ return m.Number
+ }
+ return PartOfSpeech_NUMBER_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetPerson() PartOfSpeech_Person {
+ if m != nil {
+ return m.Person
+ }
+ return PartOfSpeech_PERSON_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetProper() PartOfSpeech_Proper {
+ if m != nil {
+ return m.Proper
+ }
+ return PartOfSpeech_PROPER_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetReciprocity() PartOfSpeech_Reciprocity {
+ if m != nil {
+ return m.Reciprocity
+ }
+ return PartOfSpeech_RECIPROCITY_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetTense() PartOfSpeech_Tense {
+ if m != nil {
+ return m.Tense
+ }
+ return PartOfSpeech_TENSE_UNKNOWN
+}
+
+func (m *PartOfSpeech) GetVoice() PartOfSpeech_Voice {
+ if m != nil {
+ return m.Voice
+ }
+ return PartOfSpeech_VOICE_UNKNOWN
+}
+
+// Represents dependency parse tree information for a token.
+type DependencyEdge struct {
+ // Represents the head of this token in the dependency tree.
+ // This is the index of the token which has an arc going to this token.
+ // The index is the position of the token in the array of tokens returned
+ // by the API method. If this token is a root token, then the
+ // `head_token_index` is its own index.
+ HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex" json:"head_token_index,omitempty"`
+ // The parse label for the token.
+ Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,enum=google.cloud.language.v1beta2.DependencyEdge_Label" json:"label,omitempty"`
+}
+
+func (m *DependencyEdge) Reset() { *m = DependencyEdge{} }
+func (m *DependencyEdge) String() string { return proto.CompactTextString(m) }
+func (*DependencyEdge) ProtoMessage() {}
+func (*DependencyEdge) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
+
+func (m *DependencyEdge) GetHeadTokenIndex() int32 {
+ if m != nil {
+ return m.HeadTokenIndex
+ }
+ return 0
+}
+
+func (m *DependencyEdge) GetLabel() DependencyEdge_Label {
+ if m != nil {
+ return m.Label
+ }
+ return DependencyEdge_UNKNOWN
+}
+
+// Represents a mention for an entity in the text. Currently, proper noun
+// mentions are supported.
+type EntityMention struct {
+ // The mention text.
+ Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+ // The type of the entity mention.
+ Type EntityMention_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1beta2.EntityMention_Type" json:"type,omitempty"`
+ // For calls to [AnalyzeEntitySentiment][] or if
+ // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to
+ // true, this field will contain the sentiment expressed for this mention of
+ // the entity in the provided document.
+ Sentiment *Sentiment `protobuf:"bytes,3,opt,name=sentiment" json:"sentiment,omitempty"`
+}
+
+func (m *EntityMention) Reset() { *m = EntityMention{} }
+func (m *EntityMention) String() string { return proto.CompactTextString(m) }
+func (*EntityMention) ProtoMessage() {}
+func (*EntityMention) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
+
+func (m *EntityMention) GetText() *TextSpan {
+ if m != nil {
+ return m.Text
+ }
+ return nil
+}
+
+func (m *EntityMention) GetType() EntityMention_Type {
+ if m != nil {
+ return m.Type
+ }
+ return EntityMention_TYPE_UNKNOWN
+}
+
+func (m *EntityMention) GetSentiment() *Sentiment {
+ if m != nil {
+ return m.Sentiment
+ }
+ return nil
+}
+
+// Represents an output piece of text.
+type TextSpan struct {
+ // The content of the output text.
+ Content string `protobuf:"bytes,1,opt,name=content" json:"content,omitempty"`
+ // The API calculates the beginning offset of the content in the original
+ // document according to the [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the API request.
+ BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset" json:"begin_offset,omitempty"`
+}
+
+func (m *TextSpan) Reset() { *m = TextSpan{} }
+func (m *TextSpan) String() string { return proto.CompactTextString(m) }
+func (*TextSpan) ProtoMessage() {}
+func (*TextSpan) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
+
+func (m *TextSpan) GetContent() string {
+ if m != nil {
+ return m.Content
+ }
+ return ""
+}
+
+func (m *TextSpan) GetBeginOffset() int32 {
+ if m != nil {
+ return m.BeginOffset
+ }
+ return 0
+}
+
+// The sentiment analysis request message.
+type AnalyzeSentimentRequest struct {
+ // Input document.
+ Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+ // The encoding type used by the API to calculate sentence offsets for the
+ // sentence sentiment.
+ EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeSentimentRequest) Reset() { *m = AnalyzeSentimentRequest{} }
+func (m *AnalyzeSentimentRequest) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeSentimentRequest) ProtoMessage() {}
+func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
+
+func (m *AnalyzeSentimentRequest) GetDocument() *Document {
+ if m != nil {
+ return m.Document
+ }
+ return nil
+}
+
+func (m *AnalyzeSentimentRequest) GetEncodingType() EncodingType {
+ if m != nil {
+ return m.EncodingType
+ }
+ return EncodingType_NONE
+}
+
+// The sentiment analysis response message.
+type AnalyzeSentimentResponse struct {
+ // The overall sentiment of the input document.
+ DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment" json:"document_sentiment,omitempty"`
+ // The language of the text, which will be the same as the language specified
+ // in the request or, if not specified, the automatically-detected language.
+ // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
+ Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+ // The sentiment for all the sentences in the document.
+ Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences" json:"sentences,omitempty"`
+}
+
+func (m *AnalyzeSentimentResponse) Reset() { *m = AnalyzeSentimentResponse{} }
+func (m *AnalyzeSentimentResponse) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeSentimentResponse) ProtoMessage() {}
+func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
+
+func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment {
+ if m != nil {
+ return m.DocumentSentiment
+ }
+ return nil
+}
+
+func (m *AnalyzeSentimentResponse) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence {
+ if m != nil {
+ return m.Sentences
+ }
+ return nil
+}
+
+// The entity-level sentiment analysis request message.
+type AnalyzeEntitySentimentRequest struct {
+ // Input document.
+ Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+ // The encoding type used by the API to calculate offsets.
+ EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeEntitySentimentRequest) Reset() { *m = AnalyzeEntitySentimentRequest{} }
+func (m *AnalyzeEntitySentimentRequest) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeEntitySentimentRequest) ProtoMessage() {}
+func (*AnalyzeEntitySentimentRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} }
+
+func (m *AnalyzeEntitySentimentRequest) GetDocument() *Document {
+ if m != nil {
+ return m.Document
+ }
+ return nil
+}
+
+func (m *AnalyzeEntitySentimentRequest) GetEncodingType() EncodingType {
+ if m != nil {
+ return m.EncodingType
+ }
+ return EncodingType_NONE
+}
+
+// The entity-level sentiment analysis response message.
+type AnalyzeEntitySentimentResponse struct {
+ // The recognized entities in the input document with associated sentiments.
+ Entities []*Entity `protobuf:"bytes,1,rep,name=entities" json:"entities,omitempty"`
+ // The language of the text, which will be the same as the language specified
+ // in the request or, if not specified, the automatically-detected language.
+ // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
+ Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeEntitySentimentResponse) Reset() { *m = AnalyzeEntitySentimentResponse{} }
+func (m *AnalyzeEntitySentimentResponse) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeEntitySentimentResponse) ProtoMessage() {}
+func (*AnalyzeEntitySentimentResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} }
+
+func (m *AnalyzeEntitySentimentResponse) GetEntities() []*Entity {
+ if m != nil {
+ return m.Entities
+ }
+ return nil
+}
+
+func (m *AnalyzeEntitySentimentResponse) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+// The entity analysis request message.
+type AnalyzeEntitiesRequest struct {
+ // Input document.
+ Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+ // The encoding type used by the API to calculate offsets.
+ EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeEntitiesRequest) Reset() { *m = AnalyzeEntitiesRequest{} }
+func (m *AnalyzeEntitiesRequest) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeEntitiesRequest) ProtoMessage() {}
+func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
+
+func (m *AnalyzeEntitiesRequest) GetDocument() *Document {
+ if m != nil {
+ return m.Document
+ }
+ return nil
+}
+
+func (m *AnalyzeEntitiesRequest) GetEncodingType() EncodingType {
+ if m != nil {
+ return m.EncodingType
+ }
+ return EncodingType_NONE
+}
+
+// The entity analysis response message.
+type AnalyzeEntitiesResponse struct {
+ // The recognized entities in the input document.
+ Entities []*Entity `protobuf:"bytes,1,rep,name=entities" json:"entities,omitempty"`
+ // The language of the text, which will be the same as the language specified
+ // in the request or, if not specified, the automatically-detected language.
+ // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
+ Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeEntitiesResponse) Reset() { *m = AnalyzeEntitiesResponse{} }
+func (m *AnalyzeEntitiesResponse) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeEntitiesResponse) ProtoMessage() {}
+func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
+
+func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity {
+ if m != nil {
+ return m.Entities
+ }
+ return nil
+}
+
+func (m *AnalyzeEntitiesResponse) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+// The syntax analysis request message.
+type AnalyzeSyntaxRequest struct {
+ // Input document.
+ Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+ // The encoding type used by the API to calculate offsets.
+ EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeSyntaxRequest) Reset() { *m = AnalyzeSyntaxRequest{} }
+func (m *AnalyzeSyntaxRequest) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxRequest) ProtoMessage() {}
+func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
+
+func (m *AnalyzeSyntaxRequest) GetDocument() *Document {
+ if m != nil {
+ return m.Document
+ }
+ return nil
+}
+
+func (m *AnalyzeSyntaxRequest) GetEncodingType() EncodingType {
+ if m != nil {
+ return m.EncodingType
+ }
+ return EncodingType_NONE
+}
+
+// The syntax analysis response message.
+type AnalyzeSyntaxResponse struct {
+ // Sentences in the input document.
+ Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences" json:"sentences,omitempty"`
+ // Tokens, along with their syntactic information, in the input document.
+ Tokens []*Token `protobuf:"bytes,2,rep,name=tokens" json:"tokens,omitempty"`
+ // The language of the text, which will be the same as the language specified
+ // in the request or, if not specified, the automatically-detected language.
+ // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
+ Language string `protobuf:"bytes,3,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeSyntaxResponse) Reset() { *m = AnalyzeSyntaxResponse{} }
+func (m *AnalyzeSyntaxResponse) String() string { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxResponse) ProtoMessage() {}
+func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
+
+func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence {
+ if m != nil {
+ return m.Sentences
+ }
+ return nil
+}
+
+func (m *AnalyzeSyntaxResponse) GetTokens() []*Token {
+ if m != nil {
+ return m.Tokens
+ }
+ return nil
+}
+
+func (m *AnalyzeSyntaxResponse) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+// The request message for the text annotation API, which can perform multiple
+// analysis types (sentiment, entities, and syntax) in one call.
+type AnnotateTextRequest struct {
+ // Input document.
+ Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+ // The enabled features.
+ Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features" json:"features,omitempty"`
+ // The encoding type used by the API to calculate offsets.
+ EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta2.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnnotateTextRequest) Reset() { *m = AnnotateTextRequest{} }
+func (m *AnnotateTextRequest) String() string { return proto.CompactTextString(m) }
+func (*AnnotateTextRequest) ProtoMessage() {}
+func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{17} }
+
+func (m *AnnotateTextRequest) GetDocument() *Document {
+ if m != nil {
+ return m.Document
+ }
+ return nil
+}
+
+func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features {
+ if m != nil {
+ return m.Features
+ }
+ return nil
+}
+
+func (m *AnnotateTextRequest) GetEncodingType() EncodingType {
+ if m != nil {
+ return m.EncodingType
+ }
+ return EncodingType_NONE
+}
+
+// All available features for sentiment, syntax, and semantic analysis.
+// Setting each one to true will enable that specific analysis for the input.
+type AnnotateTextRequest_Features struct {
+ // Extract syntax information.
+ ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax" json:"extract_syntax,omitempty"`
+ // Extract entities.
+ ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities" json:"extract_entities,omitempty"`
+ // Extract document-level sentiment.
+ ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment" json:"extract_document_sentiment,omitempty"`
+ // Extract entities and their associated sentiment.
+ ExtractEntitySentiment bool `protobuf:"varint,4,opt,name=extract_entity_sentiment,json=extractEntitySentiment" json:"extract_entity_sentiment,omitempty"`
+}
+
+func (m *AnnotateTextRequest_Features) Reset() { *m = AnnotateTextRequest_Features{} }
+func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) }
+func (*AnnotateTextRequest_Features) ProtoMessage() {}
+func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) {
+ return fileDescriptor0, []int{17, 0}
+}
+
+func (m *AnnotateTextRequest_Features) GetExtractSyntax() bool {
+ if m != nil {
+ return m.ExtractSyntax
+ }
+ return false
+}
+
+func (m *AnnotateTextRequest_Features) GetExtractEntities() bool {
+ if m != nil {
+ return m.ExtractEntities
+ }
+ return false
+}
+
+func (m *AnnotateTextRequest_Features) GetExtractDocumentSentiment() bool {
+ if m != nil {
+ return m.ExtractDocumentSentiment
+ }
+ return false
+}
+
+func (m *AnnotateTextRequest_Features) GetExtractEntitySentiment() bool {
+ if m != nil {
+ return m.ExtractEntitySentiment
+ }
+ return false
+}
+
+// The text annotations response message.
+type AnnotateTextResponse struct {
+ // Sentences in the input document. Populated if the user enables
+ // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax].
+ Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences" json:"sentences,omitempty"`
+ // Tokens, along with their syntactic information, in the input document.
+ // Populated if the user enables
+ // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax].
+ Tokens []*Token `protobuf:"bytes,2,rep,name=tokens" json:"tokens,omitempty"`
+ // Entities, along with their semantic information, in the input document.
+ // Populated if the user enables
+ // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities].
+ Entities []*Entity `protobuf:"bytes,3,rep,name=entities" json:"entities,omitempty"`
+ // The overall sentiment for the document. Populated if the user enables
+ // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment].
+ DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment" json:"document_sentiment,omitempty"`
+ // The language of the text, which will be the same as the language specified
+ // in the request or, if not specified, the automatically-detected language.
+ // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details.
+ Language string `protobuf:"bytes,5,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnnotateTextResponse) Reset() { *m = AnnotateTextResponse{} }
+func (m *AnnotateTextResponse) String() string { return proto.CompactTextString(m) }
+func (*AnnotateTextResponse) ProtoMessage() {}
+func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{18} }
+
+func (m *AnnotateTextResponse) GetSentences() []*Sentence {
+ if m != nil {
+ return m.Sentences
+ }
+ return nil
+}
+
+func (m *AnnotateTextResponse) GetTokens() []*Token {
+ if m != nil {
+ return m.Tokens
+ }
+ return nil
+}
+
+func (m *AnnotateTextResponse) GetEntities() []*Entity {
+ if m != nil {
+ return m.Entities
+ }
+ return nil
+}
+
+func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment {
+ if m != nil {
+ return m.DocumentSentiment
+ }
+ return nil
+}
+
+func (m *AnnotateTextResponse) GetLanguage() string {
+ if m != nil {
+ return m.Language
+ }
+ return ""
+}
+
+func init() {
+ proto.RegisterType((*Document)(nil), "google.cloud.language.v1beta2.Document")
+ proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1beta2.Sentence")
+ proto.RegisterType((*Entity)(nil), "google.cloud.language.v1beta2.Entity")
+ proto.RegisterType((*Token)(nil), "google.cloud.language.v1beta2.Token")
+ proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1beta2.Sentiment")
+ proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1beta2.PartOfSpeech")
+ proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1beta2.DependencyEdge")
+ proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1beta2.EntityMention")
+ proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1beta2.TextSpan")
+ proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentRequest")
+ proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSentimentResponse")
+ proto.RegisterType((*AnalyzeEntitySentimentRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest")
+ proto.RegisterType((*AnalyzeEntitySentimentResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse")
+ proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesRequest")
+ proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1beta2.AnalyzeEntitiesResponse")
+ proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxRequest")
+ proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1beta2.AnalyzeSyntaxResponse")
+ proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest")
+ proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1beta2.AnnotateTextRequest.Features")
+ proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1beta2.AnnotateTextResponse")
+ proto.RegisterEnum("google.cloud.language.v1beta2.EncodingType", EncodingType_name, EncodingType_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.Document_Type", Document_Type_name, Document_Type_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.Entity_Type", Entity_Type_name, Entity_Type_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value)
+ proto.RegisterEnum("google.cloud.language.v1beta2.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value)
+}
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ context.Context
+var _ grpc.ClientConn
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+const _ = grpc.SupportPackageIsVersion4
+
+// Client API for LanguageService service
+
+type LanguageServiceClient interface {
+ // Analyzes the sentiment of the provided text.
+ AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error)
+ // Finds named entities (currently proper names and common nouns) in the text
+ // along with entity types, salience, mentions for each entity, and
+ // other properties.
+ AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error)
+ // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
+ // sentiment associated with each entity and its mentions.
+ AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error)
+ // Analyzes the syntax of the text and provides sentence boundaries and
+ // tokenization along with part of speech tags, dependency trees, and other
+ // properties.
+ AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error)
+ // A convenience method that provides all syntax, sentiment, and entity
+ // features in one call.
+ AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error)
+}
+
+type languageServiceClient struct {
+ cc *grpc.ClientConn
+}
+
+func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient {
+ return &languageServiceClient{cc}
+}
+
+func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) {
+ out := new(AnalyzeSentimentResponse)
+ err := grpc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment", in, out, c.cc, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) {
+ out := new(AnalyzeEntitiesResponse)
+ err := grpc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities", in, out, c.cc, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *languageServiceClient) AnalyzeEntitySentiment(ctx context.Context, in *AnalyzeEntitySentimentRequest, opts ...grpc.CallOption) (*AnalyzeEntitySentimentResponse, error) {
+ out := new(AnalyzeEntitySentimentResponse)
+ err := grpc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment", in, out, c.cc, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) {
+ out := new(AnalyzeSyntaxResponse)
+ err := grpc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax", in, out, c.cc, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) {
+ out := new(AnnotateTextResponse)
+ err := grpc.Invoke(ctx, "/google.cloud.language.v1beta2.LanguageService/AnnotateText", in, out, c.cc, opts...)
+ if err != nil {
+ return nil, err
+ }
+ return out, nil
+}
+
+// Server API for LanguageService service
+
+type LanguageServiceServer interface {
+ // Analyzes the sentiment of the provided text.
+ AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error)
+ // Finds named entities (currently proper names and common nouns) in the text
+ // along with entity types, salience, mentions for each entity, and
+ // other properties.
+ AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
+ // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes
+ // sentiment associated with each entity and its mentions.
+ AnalyzeEntitySentiment(context.Context, *AnalyzeEntitySentimentRequest) (*AnalyzeEntitySentimentResponse, error)
+ // Analyzes the syntax of the text and provides sentence boundaries and
+ // tokenization along with part of speech tags, dependency trees, and other
+ // properties.
+ AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
+ // A convenience method that provides all syntax, sentiment, and entity
+ // features in one call.
+ AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error)
+}
+
+func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) {
+ s.RegisterService(&_LanguageService_serviceDesc, srv)
+}
+
+func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(AnalyzeSentimentRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(AnalyzeEntitiesRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnalyzeEntitySentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(AnalyzeEntitySentimentRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(LanguageServiceServer).AnalyzeEntitySentiment(ctx, req.(*AnalyzeEntitySentimentRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(AnalyzeSyntaxRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+ in := new(AnnotateTextRequest)
+ if err := dec(in); err != nil {
+ return nil, err
+ }
+ if interceptor == nil {
+ return srv.(LanguageServiceServer).AnnotateText(ctx, in)
+ }
+ info := &grpc.UnaryServerInfo{
+ Server: srv,
+ FullMethod: "/google.cloud.language.v1beta2.LanguageService/AnnotateText",
+ }
+ handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+ return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest))
+ }
+ return interceptor(ctx, in, info, handler)
+}
+
+var _LanguageService_serviceDesc = grpc.ServiceDesc{
+ ServiceName: "google.cloud.language.v1beta2.LanguageService",
+ HandlerType: (*LanguageServiceServer)(nil),
+ Methods: []grpc.MethodDesc{
+ {
+ MethodName: "AnalyzeSentiment",
+ Handler: _LanguageService_AnalyzeSentiment_Handler,
+ },
+ {
+ MethodName: "AnalyzeEntities",
+ Handler: _LanguageService_AnalyzeEntities_Handler,
+ },
+ {
+ MethodName: "AnalyzeEntitySentiment",
+ Handler: _LanguageService_AnalyzeEntitySentiment_Handler,
+ },
+ {
+ MethodName: "AnalyzeSyntax",
+ Handler: _LanguageService_AnalyzeSyntax_Handler,
+ },
+ {
+ MethodName: "AnnotateText",
+ Handler: _LanguageService_AnnotateText_Handler,
+ },
+ },
+ Streams: []grpc.StreamDesc{},
+ Metadata: "google/cloud/language/v1beta2/language_service.proto",
+}
+
+func init() {
+ proto.RegisterFile("google/cloud/language/v1beta2/language_service.proto", fileDescriptor0)
+}
+
+var fileDescriptor0 = []byte{
+ // 2873 bytes of a gzipped FileDescriptorProto
+ 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x5a, 0x4d, 0x73, 0xdb, 0xc6,
+ 0xf9, 0x37, 0xf8, 0x26, 0x72, 0x29, 0xc9, 0x6b, 0xc4, 0x89, 0xf9, 0xd7, 0x3f, 0x2f, 0x0e, 0x12,
+ 0xd7, 0x8a, 0x9d, 0x50, 0xb1, 0xec, 0x38, 0xae, 0xed, 0xbc, 0x40, 0xc0, 0x92, 0x82, 0x4c, 0x02,
+ 0xc8, 0x02, 0xa0, 0xe5, 0x5c, 0x38, 0x30, 0xb9, 0x62, 0x38, 0x91, 0x00, 0x96, 0x80, 0x3c, 0x56,
+ 0x2f, 0x99, 0xc9, 0x4c, 0x8f, 0x99, 0x1e, 0x72, 0xe8, 0x07, 0xe8, 0xa1, 0xa7, 0x4e, 0x3a, 0xd3,
+ 0x99, 0x4e, 0xfb, 0x19, 0x7a, 0x4c, 0xa7, 0xa7, 0x1e, 0x7b, 0xec, 0xa1, 0x87, 0x1e, 0x7a, 0xec,
+ 0x3c, 0xbb, 0x0b, 0xbe, 0xc8, 0x8e, 0x25, 0x3a, 0x99, 0x4e, 0x7a, 0xdb, 0x7d, 0xf0, 0xfc, 0x9e,
+ 0x7d, 0xde, 0x9f, 0x05, 0x48, 0x74, 0x63, 0x10, 0xc7, 0x83, 0x7d, 0xb6, 0xd1, 0xdb, 0x8f, 0x0f,
+ 0xfb, 0x1b, 0xfb, 0x61, 0x34, 0x38, 0x0c, 0x07, 0x6c, 0xe3, 0xd1, 0xb5, 0x87, 0x2c, 0x0d, 0x37,
+ 0x27, 0x84, 0x6e, 0xc2, 0xc6, 0x8f, 0x86, 0x3d, 0x56, 0x1f, 0x8d, 0xe3, 0x34, 0x56, 0x5f, 0x11,
+ 0xa8, 0x3a, 0x47, 0xd5, 0x33, 0xa6, 0xba, 0x44, 0xad, 0xbd, 0x2c, 0x85, 0x86, 0xa3, 0xe1, 0x46,
+ 0x18, 0x45, 0x71, 0x1a, 0xa6, 0xc3, 0x38, 0x4a, 0x04, 0x78, 0xed, 0x0d, 0xf9, 0x74, 0x3f, 0x8e,
+ 0x06, 0xe3, 0xc3, 0x28, 0x1a, 0x46, 0x83, 0x8d, 0x78, 0xc4, 0xc6, 0x73, 0x4c, 0xaf, 0x49, 0x26,
+ 0xbe, 0x7b, 0x78, 0xb8, 0xb7, 0x91, 0x0e, 0x0f, 0x58, 0x92, 0x86, 0x07, 0x23, 0xc9, 0x70, 0x41,
+ 0x32, 0x8c, 0x47, 0xbd, 0x8d, 0x24, 0x0d, 0xd3, 0x43, 0x89, 0xd4, 0xfe, 0xa9, 0xa0, 0xb2, 0x19,
+ 0xf7, 0x0e, 0x0f, 0x58, 0x94, 0xaa, 0x1f, 0xa3, 0x42, 0x7a, 0x34, 0x62, 0x35, 0xe5, 0xa2, 0xb2,
+ 0xbe, 0xba, 0xf9, 0x76, 0xfd, 0x99, 0x7a, 0xd7, 0x33, 0x58, 0xdd, 0x3f, 0x1a, 0x31, 0xca, 0x91,
+ 0xea, 0x1a, 0x5a, 0xea, 0xc5, 0x51, 0xca, 0xa2, 0xb4, 0x96, 0xbb, 0xa8, 0xac, 0x57, 0xb6, 0xcf,
+ 0xd0, 0x8c, 0xa0, 0xae, 0xa3, 0xb3, 0x83, 0x5e, 0xd2, 0x95, 0xdb, 0xee, 0xe1, 0x78, 0x58, 0xcb,
+ 0x4b, 0x9e, 0x95, 0x41, 0x2f, 0x31, 0x04, 0x3d, 0x18, 0x0f, 0xd5, 0x35, 0x54, 0xce, 0x4e, 0xab,
+ 0x15, 0x80, 0x85, 0x4e, 0xf6, 0xda, 0x4d, 0x54, 0x80, 0xf3, 0xd4, 0xf3, 0x08, 0xfb, 0x0f, 0x5c,
+ 0xd2, 0x0d, 0x6c, 0xcf, 0x25, 0x86, 0xd5, 0xb0, 0x88, 0x89, 0xcf, 0xa8, 0xab, 0x08, 0xb9, 0x2d,
+ 0xdd, 0xb2, 0xbb, 0x3e, 0xd9, 0xf5, 0xb1, 0xa2, 0x96, 0x51, 0x61, 0xdb, 0x6f, 0xb7, 0x70, 0x6e,
+ 0xab, 0x8c, 0x4a, 0x49, 0x7c, 0x38, 0xee, 0x31, 0xed, 0x97, 0x0a, 0x2a, 0x7b, 0x0c, 0x0e, 0xeb,
+ 0x31, 0xf5, 0x0e, 0x2a, 0xa4, 0xec, 0x71, 0xca, 0x4d, 0xae, 0x6e, 0x5e, 0x3e, 0xc1, 0x64, 0x9f,
+ 0x3d, 0x4e, 0xbd, 0x51, 0x18, 0x51, 0x0e, 0x52, 0x1b, 0xa8, 0x92, 0xb0, 0x08, 0x7c, 0x2d, 0xed,
+ 0xad, 0x6e, 0xae, 0x9f, 0x20, 0xc1, 0xcb, 0xf8, 0xe9, 0x14, 0xaa, 0x7d, 0x5d, 0x40, 0x25, 0x12,
+ 0xa5, 0xc3, 0xf4, 0x48, 0x55, 0x51, 0x21, 0x0a, 0x0f, 0x44, 0x08, 0x2a, 0x94, 0xaf, 0xd5, 0x0f,
+ 0x65, 0x58, 0x72, 0x3c, 0x2c, 0x57, 0x4e, 0x38, 0x41, 0x08, 0x9a, 0x0d, 0x8a, 0x83, 0xca, 0x07,
+ 0x2c, 0x0d, 0xfb, 0x61, 0x1a, 0xd6, 0xf2, 0x17, 0xf3, 0xeb, 0xd5, 0xcd, 0xeb, 0xa7, 0x93, 0xd1,
+ 0x96, 0x28, 0x12, 0xa5, 0xe3, 0x23, 0x3a, 0x11, 0x02, 0xf1, 0x49, 0xc2, 0xfd, 0x21, 0x38, 0x90,
+ 0xc7, 0x27, 0x47, 0x27, 0x7b, 0x75, 0x1b, 0x0e, 0x8b, 0x78, 0x72, 0xd6, 0x8a, 0xfc, 0xb0, 0xb7,
+ 0x4f, 0x75, 0x58, 0x5b, 0x80, 0xe8, 0x04, 0x3d, 0xef, 0xdd, 0xd2, 0x73, 0x7b, 0x77, 0xed, 0x0e,
+ 0x5a, 0x99, 0x33, 0x44, 0xc5, 0x28, 0xff, 0x39, 0x3b, 0x92, 0x2e, 0x86, 0xa5, 0x7a, 0x1e, 0x15,
+ 0x1f, 0x85, 0xfb, 0x87, 0xc2, 0xc5, 0x15, 0x2a, 0x36, 0xb7, 0x73, 0xb7, 0x14, 0xed, 0x48, 0xa6,
+ 0x5b, 0x15, 0x2d, 0x05, 0xf6, 0x3d, 0xdb, 0xb9, 0x6f, 0xe3, 0x33, 0x2a, 0x42, 0x25, 0x97, 0x50,
+ 0xcf, 0xb1, 0xb1, 0xa2, 0x2e, 0xa3, 0x72, 0xcb, 0x31, 0x74, 0xdf, 0x72, 0x6c, 0x9c, 0x53, 0x31,
+ 0x5a, 0x76, 0x68, 0x53, 0xb7, 0xad, 0x4f, 0x05, 0x25, 0xaf, 0x56, 0x50, 0x91, 0x74, 0x88, 0xed,
+ 0xe3, 0x82, 0x7a, 0x16, 0x55, 0xef, 0x3b, 0xf4, 0x5e, 0xd7, 0x69, 0x74, 0x75, 0xea, 0xe3, 0xa2,
+ 0x7a, 0x0e, 0xad, 0x18, 0x8e, 0xed, 0x05, 0x6d, 0x42, 0xbb, 0x4d, 0xc7, 0x31, 0x71, 0x09, 0xd8,
+ 0x1d, 0x7f, 0x9b, 0x50, 0xbc, 0xa4, 0xfd, 0x22, 0x87, 0x8a, 0x7e, 0xfc, 0x39, 0x8b, 0xbe, 0x5f,
+ 0x92, 0x7e, 0x82, 0x56, 0x47, 0xe1, 0x38, 0xed, 0xc6, 0x7b, 0xdd, 0x64, 0xc4, 0x58, 0xef, 0x33,
+ 0x99, 0xa9, 0x57, 0x4f, 0x10, 0xe3, 0x86, 0xe3, 0xd4, 0xd9, 0xf3, 0x38, 0x84, 0x2e, 0x8f, 0x66,
+ 0x76, 0x6a, 0x07, 0x9d, 0xed, 0xb3, 0x11, 0x8b, 0xfa, 0x2c, 0xea, 0x1d, 0x75, 0x59, 0x7f, 0xc0,
+ 0x78, 0x25, 0x57, 0x37, 0xdf, 0x39, 0xa9, 0x65, 0x4c, 0x50, 0xa4, 0x3f, 0x60, 0x74, 0xb5, 0x3f,
+ 0xb7, 0x87, 0x30, 0xec, 0xb3, 0x83, 0x83, 0x50, 0x16, 0xbd, 0xd8, 0x68, 0x1f, 0xa1, 0xca, 0x24,
+ 0xae, 0xea, 0xcb, 0xa8, 0x72, 0x10, 0x0e, 0xa2, 0x61, 0x7a, 0xd8, 0x17, 0xd1, 0xca, 0xd1, 0x29,
+ 0x01, 0x04, 0x24, 0xbd, 0x78, 0x2c, 0xd4, 0xc9, 0x51, 0xb1, 0xd1, 0xfe, 0x74, 0x0e, 0x2d, 0xcf,
+ 0x5a, 0xa3, 0xea, 0x28, 0x9f, 0x86, 0x03, 0xd9, 0xe6, 0x36, 0x16, 0xf0, 0x43, 0xdd, 0x0f, 0x07,
+ 0x14, 0xb0, 0xea, 0x0e, 0x2a, 0x85, 0xc9, 0x88, 0xf5, 0x52, 0x59, 0x95, 0x9b, 0x8b, 0x48, 0xd1,
+ 0x39, 0x92, 0x4a, 0x09, 0xaa, 0x89, 0x0a, 0xbd, 0x30, 0x11, 0x4a, 0xaf, 0x6e, 0xbe, 0xbb, 0x88,
+ 0x24, 0x23, 0x4c, 0x18, 0xe5, 0x68, 0x90, 0xb2, 0x17, 0x8f, 0x0f, 0xb8, 0xef, 0x16, 0x94, 0xd2,
+ 0x88, 0xc7, 0x07, 0x94, 0xa3, 0xc1, 0xae, 0x01, 0x84, 0x64, 0x5c, 0x2b, 0x2e, 0x6e, 0x57, 0x93,
+ 0x23, 0xa9, 0x94, 0x00, 0x1a, 0x1d, 0xc4, 0x71, 0x9f, 0xd7, 0xee, 0x82, 0x1a, 0xb5, 0xe3, 0xb8,
+ 0x4f, 0x39, 0x1a, 0x34, 0x8a, 0x0e, 0x0f, 0x1e, 0xb2, 0x71, 0x6d, 0x69, 0x71, 0x8d, 0x6c, 0x8e,
+ 0xa4, 0x52, 0x02, 0xc8, 0x1a, 0xb1, 0x71, 0x12, 0x47, 0xb5, 0xf2, 0xe2, 0xb2, 0x5c, 0x8e, 0xa4,
+ 0x52, 0x02, 0x97, 0x35, 0x86, 0x49, 0x5c, 0xab, 0x3c, 0x87, 0x2c, 0x8e, 0xa4, 0x52, 0x82, 0xfa,
+ 0x00, 0x55, 0xc7, 0xac, 0x37, 0x1c, 0x8d, 0xe3, 0xde, 0x30, 0x3d, 0xaa, 0x21, 0x2e, 0xf0, 0xfd,
+ 0x45, 0x04, 0xd2, 0x29, 0x9c, 0xce, 0xca, 0x52, 0x9b, 0xa8, 0x98, 0xb2, 0x28, 0x61, 0xb5, 0x2a,
+ 0x17, 0x7a, 0x6d, 0xa1, 0x6c, 0x07, 0x20, 0x15, 0x78, 0x10, 0xf4, 0x28, 0x1e, 0xf6, 0x58, 0x6d,
+ 0x79, 0x71, 0x41, 0x1d, 0x00, 0x52, 0x81, 0xd7, 0xbe, 0x52, 0x50, 0xde, 0x0f, 0x07, 0xf3, 0x2d,
+ 0x75, 0x09, 0xe5, 0x75, 0x73, 0x07, 0x2b, 0x62, 0xe1, 0xe2, 0x9c, 0x58, 0x74, 0x70, 0x1e, 0x66,
+ 0xb8, 0xe1, 0xd8, 0x3b, 0xb8, 0x00, 0x24, 0x93, 0x40, 0xe3, 0x2c, 0xa3, 0x82, 0xed, 0x04, 0x36,
+ 0x2e, 0x01, 0xc9, 0x0e, 0xda, 0x78, 0x09, 0x48, 0x2e, 0x75, 0x6c, 0x5c, 0x06, 0x92, 0x4b, 0x7d,
+ 0x5c, 0x81, 0x5e, 0xea, 0x06, 0xb6, 0xe1, 0x63, 0x04, 0x4f, 0x3b, 0x84, 0x6e, 0xe1, 0xaa, 0x5a,
+ 0x44, 0xca, 0x2e, 0x5e, 0x86, 0x67, 0x7a, 0xa3, 0x61, 0xed, 0xe2, 0x15, 0xcd, 0x41, 0x25, 0x51,
+ 0x90, 0xaa, 0x8a, 0x56, 0x75, 0xb8, 0x4d, 0xf8, 0xdd, 0xa9, 0x62, 0x70, 0xa3, 0x20, 0xb4, 0x41,
+ 0x0c, 0xdf, 0xea, 0x10, 0xac, 0x40, 0x87, 0xb7, 0xda, 0x33, 0x94, 0x1c, 0xb4, 0x75, 0x97, 0x3a,
+ 0x4d, 0x4a, 0x3c, 0x0f, 0x08, 0x79, 0xed, 0xdf, 0x0a, 0x2a, 0x40, 0x61, 0x02, 0xaf, 0xa1, 0x7b,
+ 0x64, 0x5e, 0x9a, 0x6e, 0x18, 0x81, 0xa7, 0x4b, 0x69, 0x2b, 0xa8, 0xa2, 0x9b, 0xa0, 0x99, 0xa5,
+ 0xb7, 0x70, 0x4e, 0x0c, 0x84, 0xb6, 0xdb, 0x22, 0x6d, 0x62, 0x73, 0x8e, 0x3c, 0xcc, 0x1a, 0x53,
+ 0x70, 0x17, 0x60, 0xd6, 0x34, 0x89, 0x6d, 0xf1, 0x5d, 0x91, 0x6b, 0x62, 0x7b, 0x3e, 0x0d, 0x80,
+ 0x59, 0x6f, 0xe1, 0xd2, 0x74, 0x16, 0x75, 0x08, 0x5e, 0x82, 0xb3, 0x6c, 0xa7, 0x6d, 0xd9, 0x62,
+ 0x5f, 0x06, 0x7f, 0x3b, 0x5b, 0x2d, 0xeb, 0x93, 0x80, 0xe0, 0x0a, 0x1c, 0xec, 0xea, 0xd4, 0x17,
+ 0xb2, 0x10, 0x1c, 0xec, 0x52, 0xe2, 0x3a, 0x9e, 0x05, 0x63, 0x4b, 0x6f, 0xe1, 0x2a, 0x38, 0x83,
+ 0x92, 0x46, 0x8b, 0xec, 0x5a, 0x1d, 0xd2, 0x05, 0x33, 0xf0, 0x32, 0xb0, 0x51, 0xd2, 0xe2, 0x02,
+ 0x05, 0x69, 0x05, 0xce, 0xec, 0x64, 0x67, 0xae, 0x6a, 0xdf, 0x28, 0xa8, 0x00, 0xdd, 0x04, 0x94,
+ 0x6b, 0x38, 0xb4, 0x3d, 0x63, 0xfa, 0x32, 0x2a, 0xeb, 0x26, 0x28, 0xa4, 0xb7, 0xa4, 0xe1, 0xc1,
+ 0xae, 0xd5, 0xb2, 0x74, 0xfa, 0x00, 0xe7, 0xe0, 0xb0, 0x19, 0xc3, 0x3f, 0x25, 0x14, 0xe7, 0xb9,
+ 0x08, 0xcb, 0xd6, 0x5b, 0x5d, 0x62, 0x9b, 0x96, 0xdd, 0xc4, 0x05, 0xf0, 0x45, 0x93, 0xd0, 0xc0,
+ 0x36, 0x71, 0x11, 0xd6, 0x94, 0xe8, 0x2d, 0xcb, 0x13, 0x76, 0x5b, 0x54, 0xee, 0x96, 0x20, 0xb4,
+ 0xde, 0xb6, 0x43, 0x7d, 0x5c, 0x86, 0xb0, 0xb7, 0x1c, 0xbb, 0x29, 0x72, 0xc1, 0xa1, 0x26, 0xa1,
+ 0x18, 0x01, 0xb7, 0xbc, 0x32, 0x1a, 0xb8, 0xaa, 0x11, 0x54, 0x12, 0x6d, 0x0b, 0x74, 0x68, 0x12,
+ 0xdb, 0x24, 0x74, 0x5e, 0xe9, 0x06, 0x69, 0x5b, 0xb6, 0x65, 0xcb, 0x68, 0xb5, 0x75, 0xcf, 0x08,
+ 0x5a, 0xb0, 0xcd, 0x81, 0x0a, 0x36, 0x09, 0x7c, 0x50, 0x56, 0xfb, 0x02, 0x15, 0xa0, 0x67, 0x81,
+ 0xd2, 0x6d, 0xc7, 0x31, 0x67, 0x44, 0x9c, 0x47, 0xd8, 0x70, 0x6c, 0x53, 0x3a, 0xb6, 0x0b, 0x4f,
+ 0xb1, 0x02, 0xc1, 0xe1, 0x69, 0xa4, 0xcb, 0x24, 0x82, 0xbd, 0x6d, 0x5a, 0xd2, 0x91, 0x79, 0xf0,
+ 0xb4, 0x65, 0xfb, 0x84, 0x52, 0xa7, 0x99, 0x45, 0xbf, 0x8a, 0x96, 0x76, 0x02, 0x91, 0x63, 0x45,
+ 0x48, 0x3a, 0x2f, 0xd8, 0xda, 0x81, 0xf4, 0x06, 0x42, 0x49, 0xfb, 0x18, 0x95, 0x44, 0xb3, 0x03,
+ 0x3b, 0xec, 0xa0, 0xbd, 0x75, 0xdc, 0x0e, 0xcf, 0xb2, 0x9b, 0x41, 0x4b, 0xa7, 0x58, 0xe1, 0xf7,
+ 0x97, 0x56, 0x40, 0x79, 0xca, 0x95, 0x51, 0xc1, 0x0c, 0xf4, 0x16, 0xce, 0x6b, 0x3e, 0x2a, 0x89,
+ 0x16, 0x07, 0x12, 0xc4, 0xfd, 0x66, 0x46, 0x42, 0x05, 0x15, 0x1b, 0x16, 0xf5, 0x7c, 0x01, 0xf7,
+ 0x08, 0xd8, 0x84, 0x73, 0x40, 0xf6, 0xb7, 0x2d, 0x6a, 0xe2, 0x3c, 0x18, 0x3a, 0x4d, 0x18, 0x79,
+ 0x3f, 0x2a, 0x68, 0xb7, 0x50, 0x49, 0x34, 0x3b, 0x2e, 0x95, 0x3a, 0xee, 0x9c, 0x5e, 0xa0, 0x09,
+ 0xa7, 0x09, 0x97, 0xd8, 0x8e, 0xdf, 0x95, 0xfb, 0x9c, 0xb6, 0x83, 0xaa, 0x33, 0x5d, 0x4d, 0xbd,
+ 0x80, 0x5e, 0xa0, 0xc4, 0xb0, 0x5c, 0xea, 0x18, 0x96, 0xff, 0x60, 0xbe, 0xa6, 0xb2, 0x07, 0x3c,
+ 0xb5, 0xc0, 0x7e, 0xc7, 0xee, 0xce, 0xd0, 0x72, 0x5a, 0x82, 0x8a, 0xbc, 0x99, 0x81, 0x5f, 0x7d,
+ 0x62, 0xcf, 0xd5, 0xe4, 0x8b, 0xe8, 0xdc, 0x6c, 0x80, 0xf8, 0x63, 0x61, 0x65, 0x23, 0xf0, 0x03,
+ 0x4a, 0x84, 0x93, 0x5c, 0xdd, 0xf3, 0x71, 0x1e, 0x82, 0xe0, 0x52, 0xe2, 0x89, 0x0b, 0xdd, 0x0a,
+ 0xaa, 0x4c, 0x7a, 0x01, 0x2e, 0x8a, 0x97, 0x8f, 0x20, 0xdb, 0x97, 0xb4, 0x2d, 0x54, 0xe4, 0x8d,
+ 0x0f, 0x0e, 0xed, 0x38, 0x96, 0x41, 0xe6, 0x0d, 0xd7, 0x8d, 0x69, 0x13, 0x30, 0xf4, 0xac, 0x27,
+ 0xe4, 0xf8, 0x11, 0x7a, 0xd6, 0x4b, 0xfe, 0xb5, 0x84, 0x56, 0xe7, 0x6f, 0x4d, 0xea, 0x3a, 0xc2,
+ 0x9f, 0xb1, 0xb0, 0xdf, 0x4d, 0xe1, 0x6e, 0xd8, 0x1d, 0x46, 0x7d, 0xf6, 0x98, 0x5f, 0x65, 0x8a,
+ 0x74, 0x15, 0xe8, 0xfc, 0xca, 0x68, 0x01, 0x55, 0xb5, 0x50, 0x71, 0x3f, 0x7c, 0xc8, 0xf6, 0xe5,
+ 0x1d, 0xe5, 0xfa, 0x42, 0xb7, 0xb3, 0x7a, 0x0b, 0xa0, 0x54, 0x48, 0xd0, 0xfe, 0x51, 0x42, 0x45,
+ 0x4e, 0x78, 0xe2, 0x26, 0xac, 0x6f, 0x6d, 0x51, 0xd2, 0xc1, 0x0a, 0x6f, 0xa9, 0x50, 0xc4, 0x22,
+ 0x2b, 0x74, 0xb3, 0x63, 0xb4, 0x44, 0xff, 0xd2, 0xcd, 0x4e, 0xdb, 0x31, 0x71, 0x01, 0xdc, 0xa8,
+ 0xc3, 0xaa, 0xc8, 0x19, 0x5c, 0xd7, 0x81, 0xe2, 0x05, 0xa2, 0xef, 0x53, 0xbc, 0xc4, 0x3b, 0x7e,
+ 0xb0, 0x2b, 0x3a, 0x95, 0x1e, 0xec, 0x82, 0x13, 0x70, 0x45, 0x2d, 0xa1, 0x9c, 0x61, 0x60, 0x04,
+ 0x10, 0x83, 0x8b, 0xaf, 0x4e, 0x26, 0x02, 0x6f, 0xe3, 0x06, 0xd4, 0x01, 0x5e, 0xe1, 0x5e, 0x84,
+ 0x25, 0x87, 0xad, 0x8a, 0x59, 0xe1, 0xe2, 0xb3, 0xd9, 0xd0, 0xc0, 0xc0, 0x60, 0x5a, 0x9e, 0xe1,
+ 0x04, 0xd4, 0x23, 0xf8, 0x1c, 0x4f, 0x7c, 0x67, 0x6b, 0x07, 0xab, 0xb0, 0x22, 0xbb, 0x6e, 0x0b,
+ 0xbf, 0xc0, 0x1b, 0xac, 0x43, 0xbc, 0xfb, 0x96, 0xbf, 0x8d, 0xcf, 0x03, 0xdd, 0x02, 0x8e, 0x17,
+ 0x61, 0xd5, 0xd6, 0xe9, 0x3d, 0xfc, 0x12, 0x48, 0x6b, 0xdf, 0x27, 0xf8, 0x82, 0x58, 0x74, 0x70,
+ 0x8d, 0x4f, 0x20, 0xd2, 0xc4, 0xff, 0x07, 0x8a, 0xda, 0x36, 0x5e, 0x03, 0x21, 0xb6, 0x2b, 0x6d,
+ 0xfe, 0x7f, 0xd0, 0xd0, 0xe6, 0x1a, 0xbe, 0x0c, 0x0a, 0xd8, 0x13, 0x0d, 0x5f, 0xc9, 0x46, 0xd7,
+ 0xab, 0xbc, 0x8f, 0xf0, 0x82, 0xc5, 0xaf, 0xc1, 0x78, 0x72, 0xf1, 0x45, 0xd9, 0x9e, 0x75, 0x5f,
+ 0xdf, 0xb5, 0x3c, 0xfc, 0xba, 0x48, 0x09, 0xea, 0x83, 0x44, 0x8d, 0x8f, 0x35, 0xee, 0x88, 0x37,
+ 0x78, 0x5e, 0x82, 0x86, 0x6f, 0x8a, 0x95, 0xe7, 0xe1, 0x4b, 0x9c, 0xd7, 0xf1, 0x7c, 0xd0, 0xe9,
+ 0x27, 0x32, 0x5d, 0x39, 0xf7, 0xe5, 0xc9, 0xc6, 0xde, 0xc1, 0xeb, 0xa2, 0xf2, 0x08, 0x78, 0xe6,
+ 0x2d, 0x31, 0x3b, 0x49, 0x03, 0x5f, 0x91, 0x2b, 0x17, 0x5f, 0xe5, 0xa7, 0x50, 0xc7, 0x6e, 0xe1,
+ 0xb7, 0xb3, 0x81, 0xfa, 0x0e, 0x58, 0xe8, 0x7a, 0xb8, 0x0e, 0x16, 0x7e, 0x12, 0xe8, 0x36, 0xd7,
+ 0x67, 0x03, 0x38, 0xa9, 0x01, 0xcb, 0x77, 0xe1, 0x01, 0x5f, 0x52, 0xd2, 0xc2, 0xd7, 0xf8, 0x03,
+ 0x93, 0x3a, 0x2e, 0xde, 0x04, 0x11, 0x70, 0xc0, 0x75, 0xd0, 0x81, 0x92, 0xb6, 0xad, 0xdb, 0x3e,
+ 0xbe, 0x21, 0x2a, 0x17, 0xec, 0xb4, 0xcd, 0xa0, 0x8d, 0xdf, 0x83, 0xd3, 0xa9, 0xe3, 0xf8, 0xf8,
+ 0x26, 0xac, 0x3c, 0x70, 0xce, 0xfb, 0x7c, 0x15, 0x34, 0x1a, 0xf8, 0x16, 0xac, 0xf8, 0x89, 0x3f,
+ 0xe5, 0x4d, 0xc7, 0x71, 0x2d, 0x03, 0xdf, 0xe6, 0x83, 0x1d, 0x88, 0x77, 0xe6, 0x06, 0xd1, 0x5d,
+ 0x60, 0xd9, 0xe5, 0x66, 0x7f, 0xc0, 0xdb, 0x55, 0xc0, 0x67, 0xfd, 0x87, 0x1c, 0x69, 0xf9, 0x2d,
+ 0x82, 0x3f, 0x12, 0xf3, 0xa8, 0xe3, 0x6e, 0x03, 0xfa, 0x63, 0x99, 0x72, 0x50, 0x86, 0x58, 0xe7,
+ 0xd9, 0x19, 0xec, 0x76, 0x3a, 0x78, 0x0b, 0x96, 0x26, 0x3f, 0xd5, 0x00, 0x96, 0x86, 0x43, 0x89,
+ 0xd5, 0xb4, 0xb1, 0x09, 0xae, 0xb8, 0x77, 0x1f, 0x13, 0x3e, 0x61, 0x2c, 0xcf, 0xc7, 0x0d, 0x71,
+ 0x27, 0x69, 0x1b, 0xb8, 0xc9, 0x13, 0xc0, 0x69, 0x8b, 0xbc, 0xdc, 0x86, 0x89, 0x90, 0xed, 0x78,
+ 0xe0, 0x2d, 0xce, 0x19, 0xb4, 0x0d, 0xbc, 0x03, 0x6e, 0x31, 0x1c, 0x17, 0xdf, 0x03, 0x4f, 0x98,
+ 0x96, 0xc7, 0x87, 0x37, 0x31, 0x71, 0x4b, 0xfb, 0x2a, 0x87, 0x56, 0xe6, 0xde, 0x8b, 0xbf, 0xdf,
+ 0x3b, 0x20, 0x99, 0xfb, 0x82, 0x70, 0x6d, 0x91, 0x17, 0xf2, 0xd9, 0x0f, 0x09, 0x73, 0x6f, 0xe4,
+ 0xf9, 0xe7, 0xff, 0xde, 0xf1, 0xae, 0x7c, 0xa9, 0xc6, 0x68, 0x59, 0x7e, 0xc3, 0x79, 0xda, 0x3c,
+ 0x40, 0xa8, 0x64, 0x38, 0xed, 0x36, 0xbc, 0x57, 0x6b, 0x4d, 0x54, 0xce, 0x4c, 0x52, 0x6b, 0xd3,
+ 0x6f, 0x4c, 0xe2, 0x15, 0x7e, 0xf2, 0x85, 0xe9, 0x75, 0xb4, 0xfc, 0x90, 0x0d, 0x86, 0x51, 0x37,
+ 0xde, 0xdb, 0x4b, 0x98, 0x78, 0x35, 0x2b, 0xd2, 0x2a, 0xa7, 0x39, 0x9c, 0xa4, 0xfd, 0x4e, 0x41,
+ 0x17, 0xf4, 0x28, 0xdc, 0x3f, 0xfa, 0x39, 0x9b, 0xaa, 0xc6, 0x7e, 0x76, 0xc8, 0x92, 0x54, 0x35,
+ 0x50, 0xb9, 0x2f, 0xbf, 0x69, 0x9d, 0xd2, 0xcd, 0xd9, 0x27, 0x30, 0x3a, 0x01, 0xaa, 0x2e, 0x5a,
+ 0x61, 0x51, 0x2f, 0xee, 0x0f, 0xa3, 0x41, 0x77, 0xc6, 0xe7, 0x57, 0x4f, 0xf4, 0xb9, 0xc0, 0x70,
+ 0x6f, 0x2f, 0xb3, 0x99, 0x9d, 0xf6, 0x57, 0x05, 0xd5, 0x9e, 0x54, 0x39, 0x19, 0xc5, 0x30, 0xcf,
+ 0xee, 0x23, 0x35, 0x3b, 0xba, 0x3b, 0x8d, 0x8d, 0xb2, 0x60, 0x6c, 0xce, 0x65, 0x32, 0xa6, 0x2f,
+ 0xda, 0xb3, 0xdf, 0xe0, 0x72, 0xf3, 0xdf, 0xe0, 0x54, 0x22, 0xf2, 0x80, 0x45, 0x3d, 0x96, 0xc8,
+ 0x2f, 0x4a, 0x97, 0x4f, 0x71, 0x16, 0xf0, 0xd3, 0x29, 0x52, 0xfb, 0x83, 0x82, 0x5e, 0x91, 0x86,
+ 0x89, 0x94, 0xfb, 0x5f, 0x89, 0xc8, 0x17, 0xe8, 0xd5, 0xef, 0xd2, 0x5b, 0x86, 0x45, 0x47, 0x65,
+ 0xa0, 0xa5, 0x43, 0x96, 0xd4, 0x14, 0xee, 0xa0, 0x4b, 0xa7, 0x2a, 0x3a, 0x3a, 0x81, 0x3d, 0x2b,
+ 0x00, 0x70, 0xcd, 0x7e, 0x69, 0x56, 0x83, 0x21, 0x4b, 0x7e, 0xe4, 0x2e, 0x7b, 0x3c, 0x29, 0xbb,
+ 0xa9, 0xc2, 0xff, 0x1d, 0x5f, 0xfd, 0x56, 0x41, 0xe7, 0xb3, 0xf2, 0x39, 0x8a, 0xd2, 0xf0, 0xf1,
+ 0x8f, 0xdc, 0x53, 0x7f, 0x54, 0xd0, 0x8b, 0xc7, 0xf4, 0x95, 0x8e, 0x9a, 0x2b, 0x3b, 0xe5, 0x79,
+ 0xcb, 0x4e, 0xbd, 0x8b, 0x4a, 0xfc, 0xea, 0x98, 0xd4, 0x72, 0x5c, 0xc6, 0x9b, 0x27, 0xcd, 0x12,
+ 0x60, 0xa6, 0x12, 0x33, 0xe7, 0xea, 0xfc, 0x31, 0x57, 0xff, 0x2d, 0x8f, 0x5e, 0xd0, 0xc5, 0x2f,
+ 0x18, 0x0c, 0xda, 0xf5, 0x0f, 0xea, 0xe9, 0xfb, 0xa8, 0xbc, 0xc7, 0xc2, 0xf4, 0x70, 0xcc, 0x12,
+ 0xf9, 0x05, 0xf3, 0xce, 0x09, 0x42, 0x9e, 0xa2, 0x4a, 0xbd, 0x21, 0x45, 0xd0, 0x89, 0xb0, 0x27,
+ 0x43, 0x98, 0xff, 0x9e, 0x21, 0x5c, 0xfb, 0x8b, 0x82, 0xca, 0xd9, 0x41, 0xea, 0x25, 0xb4, 0xca,
+ 0x1e, 0xa7, 0xe3, 0xb0, 0x97, 0x76, 0x13, 0x1e, 0x4f, 0xee, 0x82, 0x32, 0x5d, 0x91, 0x54, 0x11,
+ 0x64, 0xf5, 0x2d, 0x84, 0x33, 0xb6, 0x49, 0x35, 0xe4, 0x38, 0xe3, 0x59, 0x49, 0xcf, 0x0a, 0x47,
+ 0xbd, 0x8b, 0xd6, 0x32, 0xd6, 0xa7, 0xf4, 0xfe, 0x3c, 0x07, 0xd5, 0x24, 0x87, 0xf9, 0x44, 0x63,
+ 0xbf, 0x85, 0x6a, 0x73, 0x07, 0x1d, 0xcd, 0x60, 0x0b, 0x1c, 0xfb, 0xd2, 0xec, 0x81, 0xd3, 0xe6,
+ 0xa6, 0x7d, 0x9b, 0x83, 0x4a, 0x9a, 0xf5, 0xe9, 0x8f, 0x29, 0x31, 0x67, 0xdb, 0x48, 0xfe, 0xf9,
+ 0xda, 0xc8, 0xd3, 0x87, 0x69, 0xe1, 0x87, 0x1d, 0xa6, 0xc5, 0xf9, 0xa2, 0xb9, 0x72, 0x0b, 0x2d,
+ 0xcf, 0xa6, 0x92, 0xb8, 0x47, 0xda, 0x04, 0x9f, 0x81, 0x55, 0xe0, 0x37, 0x6e, 0x89, 0x57, 0xab,
+ 0xc0, 0x6f, 0x5c, 0xbb, 0x29, 0x5e, 0xad, 0x02, 0xbf, 0x71, 0x7d, 0x13, 0xe7, 0x37, 0x7f, 0xb5,
+ 0x84, 0xce, 0xb6, 0xa4, 0x18, 0x4f, 0xfc, 0xe2, 0xa8, 0xfe, 0x5e, 0x41, 0xf8, 0xf8, 0x65, 0x41,
+ 0xbd, 0x79, 0x62, 0xa1, 0x3c, 0xf5, 0x42, 0xb4, 0xf6, 0xfe, 0xc2, 0x38, 0x91, 0x10, 0x5a, 0xfd,
+ 0xcb, 0x6f, 0xff, 0xfe, 0x75, 0x6e, 0x5d, 0x7b, 0x63, 0xf2, 0xd3, 0x68, 0xe6, 0x93, 0xe4, 0x76,
+ 0x78, 0x0c, 0x74, 0x5b, 0xb9, 0xa2, 0x7e, 0xa3, 0xa0, 0xb3, 0xc7, 0xc6, 0x83, 0xfa, 0xde, 0xe9,
+ 0x0e, 0x3f, 0x36, 0xff, 0xd6, 0x6e, 0x2e, 0x0a, 0x93, 0x2a, 0xbf, 0xc3, 0x55, 0xbe, 0xac, 0x69,
+ 0xdf, 0xad, 0x72, 0x86, 0x01, 0x8d, 0xff, 0x7c, 0x6c, 0x02, 0x4f, 0xcb, 0x44, 0xbd, 0xbb, 0x80,
+ 0x06, 0x4f, 0x5c, 0x79, 0xd6, 0x3e, 0x78, 0x4e, 0xb4, 0x34, 0xe3, 0x06, 0x37, 0xa3, 0xae, 0xbd,
+ 0x75, 0x82, 0x19, 0x47, 0x73, 0xfe, 0xff, 0x8d, 0x82, 0x56, 0xe6, 0x66, 0x8e, 0x7a, 0xfd, 0x94,
+ 0xa1, 0x9f, 0x9d, 0xa8, 0x6b, 0x37, 0x16, 0x03, 0x49, 0x95, 0xaf, 0x72, 0x95, 0x2f, 0x69, 0x17,
+ 0x9f, 0x91, 0x2c, 0x1c, 0x01, 0x9a, 0xfe, 0x5a, 0x41, 0xcb, 0xb3, 0x3d, 0x48, 0xdd, 0x5c, 0x7c,
+ 0x08, 0xac, 0x5d, 0x5f, 0x08, 0x23, 0xd5, 0xbc, 0xc2, 0xd5, 0x7c, 0x53, 0x7b, 0xed, 0xa9, 0x6a,
+ 0x4e, 0x01, 0xb7, 0x95, 0x2b, 0x5b, 0x5f, 0x2a, 0xe8, 0xf5, 0x5e, 0x7c, 0xf0, 0xec, 0x63, 0xb6,
+ 0xce, 0x1f, 0x2b, 0x5e, 0x77, 0x1c, 0xa7, 0xb1, 0xab, 0x7c, 0x4a, 0x24, 0x6c, 0x10, 0x03, 0xa4,
+ 0x1e, 0x8f, 0x07, 0x1b, 0x03, 0x16, 0xf1, 0xdf, 0xeb, 0x37, 0xc4, 0xa3, 0x70, 0x34, 0x4c, 0xbe,
+ 0xe3, 0x4f, 0x08, 0x77, 0x32, 0xc2, 0xc3, 0x12, 0x47, 0x5c, 0xff, 0x4f, 0x00, 0x00, 0x00, 0xff,
+ 0xff, 0x55, 0xc3, 0xe3, 0x00, 0xb5, 0x20, 0x00, 0x00,
+}