diff --git a/googleapis/cloud/language/v1/language_service.pb.go b/googleapis/cloud/language/v1/language_service.pb.go
new file mode 100644
index 0000000000000000000000000000000000000000..dceb9f47176034879b7c6fa4537c9af5ae8e8bd1
--- /dev/null
+++ b/googleapis/cloud/language/v1/language_service.pb.go
@@ -0,0 +1,2052 @@
+// Code generated by protoc-gen-go.
+// source: google.golang.org/genproto/googleapis/cloud/language/v1/language_service.proto
+// DO NOT EDIT!
+
+/*
+Package google_cloud_language_v1 is a generated protocol buffer package.
+
+It is generated from these files:
+	google.golang.org/genproto/googleapis/cloud/language/v1/language_service.proto
+
+It has these top-level messages:
+	Document
+	Sentence
+	Entity
+	Token
+	Sentiment
+	PartOfSpeech
+	DependencyEdge
+	EntityMention
+	TextSpan
+	AnalyzeSentimentRequest
+	AnalyzeSentimentResponse
+	AnalyzeEntitiesRequest
+	AnalyzeEntitiesResponse
+	AnalyzeSyntaxRequest
+	AnalyzeSyntaxResponse
+	AnnotateTextRequest
+	AnnotateTextResponse
+*/
+package google_cloud_language_v1 // import "google.golang.org/genproto/googleapis/cloud/language/v1"
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+import _ "google.golang.org/genproto/googleapis/api/serviceconfig"
+
+import (
+	context "golang.org/x/net/context"
+	grpc "google.golang.org/grpc"
+)
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// Represents the text encoding that the caller uses to process the output.
+// Providing an `EncodingType` is recommended because the API provides the
+// beginning offsets for various outputs, such as tokens and mentions, and
+// languages that natively use different text encodings may access offsets
+// differently.
+type EncodingType int32
+
+const (
+	// If `EncodingType` is not specified, encoding-dependent information (such as
+	// `begin_offset`) will be set at `-1`.
+	EncodingType_NONE EncodingType = 0
+	// Encoding-dependent information (such as `begin_offset`) is calculated based
+	// on the UTF-8 encoding of the input. C++ and Go are examples of languages
+	// that use this encoding natively.
+	EncodingType_UTF8 EncodingType = 1
+	// Encoding-dependent information (such as `begin_offset`) is calculated based
+	// on the UTF-16 encoding of the input. Java and Javascript are examples of
+	// languages that use this encoding natively.
+	EncodingType_UTF16 EncodingType = 2
+	// Encoding-dependent information (such as `begin_offset`) is calculated based
+	// on the UTF-32 encoding of the input. Python is an example of a language
+	// that uses this encoding natively.
+	EncodingType_UTF32 EncodingType = 3
+)
+
+var EncodingType_name = map[int32]string{
+	0: "NONE",
+	1: "UTF8",
+	2: "UTF16",
+	3: "UTF32",
+}
+var EncodingType_value = map[string]int32{
+	"NONE":  0,
+	"UTF8":  1,
+	"UTF16": 2,
+	"UTF32": 3,
+}
+
+func (x EncodingType) String() string {
+	return proto.EnumName(EncodingType_name, int32(x))
+}
+func (EncodingType) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+// The document types enum.
+type Document_Type int32
+
+const (
+	// The content type is not specified.
+	Document_TYPE_UNSPECIFIED Document_Type = 0
+	// Plain text
+	Document_PLAIN_TEXT Document_Type = 1
+	// HTML
+	Document_HTML Document_Type = 2
+)
+
+var Document_Type_name = map[int32]string{
+	0: "TYPE_UNSPECIFIED",
+	1: "PLAIN_TEXT",
+	2: "HTML",
+}
+var Document_Type_value = map[string]int32{
+	"TYPE_UNSPECIFIED": 0,
+	"PLAIN_TEXT":       1,
+	"HTML":             2,
+}
+
+func (x Document_Type) String() string {
+	return proto.EnumName(Document_Type_name, int32(x))
+}
+func (Document_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{0, 0} }
+
+// The type of the entity.
+type Entity_Type int32
+
+const (
+	// Unknown
+	Entity_UNKNOWN Entity_Type = 0
+	// Person
+	Entity_PERSON Entity_Type = 1
+	// Location
+	Entity_LOCATION Entity_Type = 2
+	// Organization
+	Entity_ORGANIZATION Entity_Type = 3
+	// Event
+	Entity_EVENT Entity_Type = 4
+	// Work of art
+	Entity_WORK_OF_ART Entity_Type = 5
+	// Consumer goods
+	Entity_CONSUMER_GOOD Entity_Type = 6
+	// Other types
+	Entity_OTHER Entity_Type = 7
+)
+
+var Entity_Type_name = map[int32]string{
+	0: "UNKNOWN",
+	1: "PERSON",
+	2: "LOCATION",
+	3: "ORGANIZATION",
+	4: "EVENT",
+	5: "WORK_OF_ART",
+	6: "CONSUMER_GOOD",
+	7: "OTHER",
+}
+var Entity_Type_value = map[string]int32{
+	"UNKNOWN":       0,
+	"PERSON":        1,
+	"LOCATION":      2,
+	"ORGANIZATION":  3,
+	"EVENT":         4,
+	"WORK_OF_ART":   5,
+	"CONSUMER_GOOD": 6,
+	"OTHER":         7,
+}
+
+func (x Entity_Type) String() string {
+	return proto.EnumName(Entity_Type_name, int32(x))
+}
+func (Entity_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} }
+
+// The part of speech tags enum.
+type PartOfSpeech_Tag int32
+
+const (
+	// Unknown
+	PartOfSpeech_UNKNOWN PartOfSpeech_Tag = 0
+	// Adjective
+	PartOfSpeech_ADJ PartOfSpeech_Tag = 1
+	// Adposition (preposition and postposition)
+	PartOfSpeech_ADP PartOfSpeech_Tag = 2
+	// Adverb
+	PartOfSpeech_ADV PartOfSpeech_Tag = 3
+	// Conjunction
+	PartOfSpeech_CONJ PartOfSpeech_Tag = 4
+	// Determiner
+	PartOfSpeech_DET PartOfSpeech_Tag = 5
+	// Noun (common and proper)
+	PartOfSpeech_NOUN PartOfSpeech_Tag = 6
+	// Cardinal number
+	PartOfSpeech_NUM PartOfSpeech_Tag = 7
+	// Pronoun
+	PartOfSpeech_PRON PartOfSpeech_Tag = 8
+	// Particle or other function word
+	PartOfSpeech_PRT PartOfSpeech_Tag = 9
+	// Punctuation
+	PartOfSpeech_PUNCT PartOfSpeech_Tag = 10
+	// Verb (all tenses and modes)
+	PartOfSpeech_VERB PartOfSpeech_Tag = 11
+	// Other: foreign words, typos, abbreviations
+	PartOfSpeech_X PartOfSpeech_Tag = 12
+	// Affix
+	PartOfSpeech_AFFIX PartOfSpeech_Tag = 13
+)
+
+var PartOfSpeech_Tag_name = map[int32]string{
+	0:  "UNKNOWN",
+	1:  "ADJ",
+	2:  "ADP",
+	3:  "ADV",
+	4:  "CONJ",
+	5:  "DET",
+	6:  "NOUN",
+	7:  "NUM",
+	8:  "PRON",
+	9:  "PRT",
+	10: "PUNCT",
+	11: "VERB",
+	12: "X",
+	13: "AFFIX",
+}
+var PartOfSpeech_Tag_value = map[string]int32{
+	"UNKNOWN": 0,
+	"ADJ":     1,
+	"ADP":     2,
+	"ADV":     3,
+	"CONJ":    4,
+	"DET":     5,
+	"NOUN":    6,
+	"NUM":     7,
+	"PRON":    8,
+	"PRT":     9,
+	"PUNCT":   10,
+	"VERB":    11,
+	"X":       12,
+	"AFFIX":   13,
+}
+
+func (x PartOfSpeech_Tag) String() string {
+	return proto.EnumName(PartOfSpeech_Tag_name, int32(x))
+}
+func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 0} }
+
+// The characteristic of a verb that expresses time flow during an event.
+type PartOfSpeech_Aspect int32
+
+const (
+	// Aspect is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0
+	// Perfective
+	PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1
+	// Imperfective
+	PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2
+	// Progressive
+	PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3
+)
+
+var PartOfSpeech_Aspect_name = map[int32]string{
+	0: "ASPECT_UNKNOWN",
+	1: "PERFECTIVE",
+	2: "IMPERFECTIVE",
+	3: "PROGRESSIVE",
+}
+var PartOfSpeech_Aspect_value = map[string]int32{
+	"ASPECT_UNKNOWN": 0,
+	"PERFECTIVE":     1,
+	"IMPERFECTIVE":   2,
+	"PROGRESSIVE":    3,
+}
+
+func (x PartOfSpeech_Aspect) String() string {
+	return proto.EnumName(PartOfSpeech_Aspect_name, int32(x))
+}
+func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 1} }
+
+// The grammatical function performed by a noun or pronoun in a phrase,
+// clause, or sentence. In some languages, other parts of speech, such as
+// adjective and determiner, take case inflection in agreement with the noun.
+type PartOfSpeech_Case int32
+
+const (
+	// Case is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0
+	// Accusative
+	PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1
+	// Adverbial
+	PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2
+	// Complementive
+	PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3
+	// Dative
+	PartOfSpeech_DATIVE PartOfSpeech_Case = 4
+	// Genitive
+	PartOfSpeech_GENITIVE PartOfSpeech_Case = 5
+	// Instrumental
+	PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6
+	// Locative
+	PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7
+	// Nominative
+	PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8
+	// Oblique
+	PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9
+	// Partitive
+	PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10
+	// Prepositional
+	PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11
+	// Reflexive
+	PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12
+	// Relative
+	PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13
+	// Vocative
+	PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14
+)
+
+var PartOfSpeech_Case_name = map[int32]string{
+	0:  "CASE_UNKNOWN",
+	1:  "ACCUSATIVE",
+	2:  "ADVERBIAL",
+	3:  "COMPLEMENTIVE",
+	4:  "DATIVE",
+	5:  "GENITIVE",
+	6:  "INSTRUMENTAL",
+	7:  "LOCATIVE",
+	8:  "NOMINATIVE",
+	9:  "OBLIQUE",
+	10: "PARTITIVE",
+	11: "PREPOSITIONAL",
+	12: "REFLEXIVE_CASE",
+	13: "RELATIVE_CASE",
+	14: "VOCATIVE",
+}
+var PartOfSpeech_Case_value = map[string]int32{
+	"CASE_UNKNOWN":   0,
+	"ACCUSATIVE":     1,
+	"ADVERBIAL":      2,
+	"COMPLEMENTIVE":  3,
+	"DATIVE":         4,
+	"GENITIVE":       5,
+	"INSTRUMENTAL":   6,
+	"LOCATIVE":       7,
+	"NOMINATIVE":     8,
+	"OBLIQUE":        9,
+	"PARTITIVE":      10,
+	"PREPOSITIONAL":  11,
+	"REFLEXIVE_CASE": 12,
+	"RELATIVE_CASE":  13,
+	"VOCATIVE":       14,
+}
+
+func (x PartOfSpeech_Case) String() string {
+	return proto.EnumName(PartOfSpeech_Case_name, int32(x))
+}
+func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 2} }
+
+// Depending on the language, Form can be categorizing different forms of
+// verbs, adjectives, adverbs, etc. For example, categorizing inflected
+// endings of verbs and adjectives or distinguishing between short and long
+// forms of adjectives and participles
+type PartOfSpeech_Form int32
+
+const (
+	// Form is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0
+	// Adnomial
+	PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1
+	// Auxiliary
+	PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2
+	// Complementizer
+	PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3
+	// Final ending
+	PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4
+	// Gerund
+	PartOfSpeech_GERUND PartOfSpeech_Form = 5
+	// Realis
+	PartOfSpeech_REALIS PartOfSpeech_Form = 6
+	// Irrealis
+	PartOfSpeech_IRREALIS PartOfSpeech_Form = 7
+	// Short form
+	PartOfSpeech_SHORT PartOfSpeech_Form = 8
+	// Long form
+	PartOfSpeech_LONG PartOfSpeech_Form = 9
+	// Order form
+	PartOfSpeech_ORDER PartOfSpeech_Form = 10
+	// Specific form
+	PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11
+)
+
+var PartOfSpeech_Form_name = map[int32]string{
+	0:  "FORM_UNKNOWN",
+	1:  "ADNOMIAL",
+	2:  "AUXILIARY",
+	3:  "COMPLEMENTIZER",
+	4:  "FINAL_ENDING",
+	5:  "GERUND",
+	6:  "REALIS",
+	7:  "IRREALIS",
+	8:  "SHORT",
+	9:  "LONG",
+	10: "ORDER",
+	11: "SPECIFIC",
+}
+var PartOfSpeech_Form_value = map[string]int32{
+	"FORM_UNKNOWN":   0,
+	"ADNOMIAL":       1,
+	"AUXILIARY":      2,
+	"COMPLEMENTIZER": 3,
+	"FINAL_ENDING":   4,
+	"GERUND":         5,
+	"REALIS":         6,
+	"IRREALIS":       7,
+	"SHORT":          8,
+	"LONG":           9,
+	"ORDER":          10,
+	"SPECIFIC":       11,
+}
+
+func (x PartOfSpeech_Form) String() string {
+	return proto.EnumName(PartOfSpeech_Form_name, int32(x))
+}
+func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 3} }
+
+// Gender classes of nouns reflected in the behaviour of associated words.
+type PartOfSpeech_Gender int32
+
+const (
+	// Gender is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0
+	// Feminine
+	PartOfSpeech_FEMININE PartOfSpeech_Gender = 1
+	// Masculine
+	PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2
+	// Neuter
+	PartOfSpeech_NEUTER PartOfSpeech_Gender = 3
+)
+
+var PartOfSpeech_Gender_name = map[int32]string{
+	0: "GENDER_UNKNOWN",
+	1: "FEMININE",
+	2: "MASCULINE",
+	3: "NEUTER",
+}
+var PartOfSpeech_Gender_value = map[string]int32{
+	"GENDER_UNKNOWN": 0,
+	"FEMININE":       1,
+	"MASCULINE":      2,
+	"NEUTER":         3,
+}
+
+func (x PartOfSpeech_Gender) String() string {
+	return proto.EnumName(PartOfSpeech_Gender_name, int32(x))
+}
+func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 4} }
+
+// The grammatical feature of verbs, used for showing modality and attitude.
+type PartOfSpeech_Mood int32
+
+const (
+	// Mood is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0
+	// Conditional
+	PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1
+	// Imperative
+	PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2
+	// Indicative
+	PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3
+	// Interrogative
+	PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4
+	// Jussive
+	PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5
+	// Subjunctive
+	PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6
+)
+
+var PartOfSpeech_Mood_name = map[int32]string{
+	0: "MOOD_UNKNOWN",
+	1: "CONDITIONAL_MOOD",
+	2: "IMPERATIVE",
+	3: "INDICATIVE",
+	4: "INTERROGATIVE",
+	5: "JUSSIVE",
+	6: "SUBJUNCTIVE",
+}
+var PartOfSpeech_Mood_value = map[string]int32{
+	"MOOD_UNKNOWN":     0,
+	"CONDITIONAL_MOOD": 1,
+	"IMPERATIVE":       2,
+	"INDICATIVE":       3,
+	"INTERROGATIVE":    4,
+	"JUSSIVE":          5,
+	"SUBJUNCTIVE":      6,
+}
+
+func (x PartOfSpeech_Mood) String() string {
+	return proto.EnumName(PartOfSpeech_Mood_name, int32(x))
+}
+func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 5} }
+
+// Count distinctions.
+type PartOfSpeech_Number int32
+
+const (
+	// Number is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0
+	// Singular
+	PartOfSpeech_SINGULAR PartOfSpeech_Number = 1
+	// Plural
+	PartOfSpeech_PLURAL PartOfSpeech_Number = 2
+	// Dual
+	PartOfSpeech_DUAL PartOfSpeech_Number = 3
+)
+
+var PartOfSpeech_Number_name = map[int32]string{
+	0: "NUMBER_UNKNOWN",
+	1: "SINGULAR",
+	2: "PLURAL",
+	3: "DUAL",
+}
+var PartOfSpeech_Number_value = map[string]int32{
+	"NUMBER_UNKNOWN": 0,
+	"SINGULAR":       1,
+	"PLURAL":         2,
+	"DUAL":           3,
+}
+
+func (x PartOfSpeech_Number) String() string {
+	return proto.EnumName(PartOfSpeech_Number_name, int32(x))
+}
+func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 6} }
+
+// The distinction between the speaker, second person, third person, etc.
+type PartOfSpeech_Person int32
+
+const (
+	// Person is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0
+	// First
+	PartOfSpeech_FIRST PartOfSpeech_Person = 1
+	// Second
+	PartOfSpeech_SECOND PartOfSpeech_Person = 2
+	// Third
+	PartOfSpeech_THIRD PartOfSpeech_Person = 3
+	// Reflexive
+	PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4
+)
+
+var PartOfSpeech_Person_name = map[int32]string{
+	0: "PERSON_UNKNOWN",
+	1: "FIRST",
+	2: "SECOND",
+	3: "THIRD",
+	4: "REFLEXIVE_PERSON",
+}
+var PartOfSpeech_Person_value = map[string]int32{
+	"PERSON_UNKNOWN":   0,
+	"FIRST":            1,
+	"SECOND":           2,
+	"THIRD":            3,
+	"REFLEXIVE_PERSON": 4,
+}
+
+func (x PartOfSpeech_Person) String() string {
+	return proto.EnumName(PartOfSpeech_Person_name, int32(x))
+}
+func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 7} }
+
+// This category shows if the token is part of a proper name.
+type PartOfSpeech_Proper int32
+
+const (
+	// Proper is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0
+	// Proper
+	PartOfSpeech_PROPER PartOfSpeech_Proper = 1
+	// Not proper
+	PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2
+)
+
+var PartOfSpeech_Proper_name = map[int32]string{
+	0: "PROPER_UNKNOWN",
+	1: "PROPER",
+	2: "NOT_PROPER",
+}
+var PartOfSpeech_Proper_value = map[string]int32{
+	"PROPER_UNKNOWN": 0,
+	"PROPER":         1,
+	"NOT_PROPER":     2,
+}
+
+func (x PartOfSpeech_Proper) String() string {
+	return proto.EnumName(PartOfSpeech_Proper_name, int32(x))
+}
+func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 8} }
+
+// Reciprocal features of a pronoun.
+type PartOfSpeech_Reciprocity int32
+
+const (
+	// Reciprocity is not applicable in the analyzed language or is not
+	// predicted.
+	PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0
+	// Reciprocal
+	PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1
+	// Non-reciprocal
+	PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2
+)
+
+var PartOfSpeech_Reciprocity_name = map[int32]string{
+	0: "RECIPROCITY_UNKNOWN",
+	1: "RECIPROCAL",
+	2: "NON_RECIPROCAL",
+}
+var PartOfSpeech_Reciprocity_value = map[string]int32{
+	"RECIPROCITY_UNKNOWN": 0,
+	"RECIPROCAL":          1,
+	"NON_RECIPROCAL":      2,
+}
+
+func (x PartOfSpeech_Reciprocity) String() string {
+	return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x))
+}
+func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 9} }
+
+// Time reference.
+type PartOfSpeech_Tense int32
+
+const (
+	// Tense is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0
+	// Conditional
+	PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1
+	// Future
+	PartOfSpeech_FUTURE PartOfSpeech_Tense = 2
+	// Past
+	PartOfSpeech_PAST PartOfSpeech_Tense = 3
+	// Present
+	PartOfSpeech_PRESENT PartOfSpeech_Tense = 4
+	// Imperfect
+	PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5
+	// Pluperfect
+	PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6
+)
+
+var PartOfSpeech_Tense_name = map[int32]string{
+	0: "TENSE_UNKNOWN",
+	1: "CONDITIONAL_TENSE",
+	2: "FUTURE",
+	3: "PAST",
+	4: "PRESENT",
+	5: "IMPERFECT",
+	6: "PLUPERFECT",
+}
+var PartOfSpeech_Tense_value = map[string]int32{
+	"TENSE_UNKNOWN":     0,
+	"CONDITIONAL_TENSE": 1,
+	"FUTURE":            2,
+	"PAST":              3,
+	"PRESENT":           4,
+	"IMPERFECT":         5,
+	"PLUPERFECT":        6,
+}
+
+func (x PartOfSpeech_Tense) String() string {
+	return proto.EnumName(PartOfSpeech_Tense_name, int32(x))
+}
+func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 10} }
+
+// The relationship between the action that a verb expresses and the
+// participants identified by its arguments.
+type PartOfSpeech_Voice int32
+
+const (
+	// Voice is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0
+	// Active
+	PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1
+	// Causative
+	PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2
+	// Passive
+	PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3
+)
+
+var PartOfSpeech_Voice_name = map[int32]string{
+	0: "VOICE_UNKNOWN",
+	1: "ACTIVE",
+	2: "CAUSATIVE",
+	3: "PASSIVE",
+}
+var PartOfSpeech_Voice_value = map[string]int32{
+	"VOICE_UNKNOWN": 0,
+	"ACTIVE":        1,
+	"CAUSATIVE":     2,
+	"PASSIVE":       3,
+}
+
+func (x PartOfSpeech_Voice) String() string {
+	return proto.EnumName(PartOfSpeech_Voice_name, int32(x))
+}
+func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 11} }
+
+// The parse label enum for the token.
+type DependencyEdge_Label int32
+
+const (
+	// Unknown
+	DependencyEdge_UNKNOWN DependencyEdge_Label = 0
+	// Abbreviation modifier
+	DependencyEdge_ABBREV DependencyEdge_Label = 1
+	// Adjectival complement
+	DependencyEdge_ACOMP DependencyEdge_Label = 2
+	// Adverbial clause modifier
+	DependencyEdge_ADVCL DependencyEdge_Label = 3
+	// Adverbial modifier
+	DependencyEdge_ADVMOD DependencyEdge_Label = 4
+	// Adjectival modifier of an NP
+	DependencyEdge_AMOD DependencyEdge_Label = 5
+	// Appositional modifier of an NP
+	DependencyEdge_APPOS DependencyEdge_Label = 6
+	// Attribute dependent of a copular verb
+	DependencyEdge_ATTR DependencyEdge_Label = 7
+	// Auxiliary (non-main) verb
+	DependencyEdge_AUX DependencyEdge_Label = 8
+	// Passive auxiliary
+	DependencyEdge_AUXPASS DependencyEdge_Label = 9
+	// Coordinating conjunction
+	DependencyEdge_CC DependencyEdge_Label = 10
+	// Clausal complement of a verb or adjective
+	DependencyEdge_CCOMP DependencyEdge_Label = 11
+	// Conjunct
+	DependencyEdge_CONJ DependencyEdge_Label = 12
+	// Clausal subject
+	DependencyEdge_CSUBJ DependencyEdge_Label = 13
+	// Clausal passive subject
+	DependencyEdge_CSUBJPASS DependencyEdge_Label = 14
+	// Dependency (unable to determine)
+	DependencyEdge_DEP DependencyEdge_Label = 15
+	// Determiner
+	DependencyEdge_DET DependencyEdge_Label = 16
+	// Discourse
+	DependencyEdge_DISCOURSE DependencyEdge_Label = 17
+	// Direct object
+	DependencyEdge_DOBJ DependencyEdge_Label = 18
+	// Expletive
+	DependencyEdge_EXPL DependencyEdge_Label = 19
+	// Goes with (part of a word in a text not well edited)
+	DependencyEdge_GOESWITH DependencyEdge_Label = 20
+	// Indirect object
+	DependencyEdge_IOBJ DependencyEdge_Label = 21
+	// Marker (word introducing a subordinate clause)
+	DependencyEdge_MARK DependencyEdge_Label = 22
+	// Multi-word expression
+	DependencyEdge_MWE DependencyEdge_Label = 23
+	// Multi-word verbal expression
+	DependencyEdge_MWV DependencyEdge_Label = 24
+	// Negation modifier
+	DependencyEdge_NEG DependencyEdge_Label = 25
+	// Noun compound modifier
+	DependencyEdge_NN DependencyEdge_Label = 26
+	// Noun phrase used as an adverbial modifier
+	DependencyEdge_NPADVMOD DependencyEdge_Label = 27
+	// Nominal subject
+	DependencyEdge_NSUBJ DependencyEdge_Label = 28
+	// Passive nominal subject
+	DependencyEdge_NSUBJPASS DependencyEdge_Label = 29
+	// Numeric modifier of a noun
+	DependencyEdge_NUM DependencyEdge_Label = 30
+	// Element of compound number
+	DependencyEdge_NUMBER DependencyEdge_Label = 31
+	// Punctuation mark
+	DependencyEdge_P DependencyEdge_Label = 32
+	// Parataxis relation
+	DependencyEdge_PARATAXIS DependencyEdge_Label = 33
+	// Participial modifier
+	DependencyEdge_PARTMOD DependencyEdge_Label = 34
+	// The complement of a preposition is a clause
+	DependencyEdge_PCOMP DependencyEdge_Label = 35
+	// Object of a preposition
+	DependencyEdge_POBJ DependencyEdge_Label = 36
+	// Possession modifier
+	DependencyEdge_POSS DependencyEdge_Label = 37
+	// Postverbal negative particle
+	DependencyEdge_POSTNEG DependencyEdge_Label = 38
+	// Predicate complement
+	DependencyEdge_PRECOMP DependencyEdge_Label = 39
+	// Preconjunt
+	DependencyEdge_PRECONJ DependencyEdge_Label = 40
+	// Predeterminer
+	DependencyEdge_PREDET DependencyEdge_Label = 41
+	// Prefix
+	DependencyEdge_PREF DependencyEdge_Label = 42
+	// Prepositional modifier
+	DependencyEdge_PREP DependencyEdge_Label = 43
+	// The relationship between a verb and verbal morpheme
+	DependencyEdge_PRONL DependencyEdge_Label = 44
+	// Particle
+	DependencyEdge_PRT DependencyEdge_Label = 45
+	// Associative or possessive marker
+	DependencyEdge_PS DependencyEdge_Label = 46
+	// Quantifier phrase modifier
+	DependencyEdge_QUANTMOD DependencyEdge_Label = 47
+	// Relative clause modifier
+	DependencyEdge_RCMOD DependencyEdge_Label = 48
+	// Complementizer in relative clause
+	DependencyEdge_RCMODREL DependencyEdge_Label = 49
+	// Ellipsis without a preceding predicate
+	DependencyEdge_RDROP DependencyEdge_Label = 50
+	// Referent
+	DependencyEdge_REF DependencyEdge_Label = 51
+	// Remnant
+	DependencyEdge_REMNANT DependencyEdge_Label = 52
+	// Reparandum
+	DependencyEdge_REPARANDUM DependencyEdge_Label = 53
+	// Root
+	DependencyEdge_ROOT DependencyEdge_Label = 54
+	// Suffix specifying a unit of number
+	DependencyEdge_SNUM DependencyEdge_Label = 55
+	// Suffix
+	DependencyEdge_SUFF DependencyEdge_Label = 56
+	// Temporal modifier
+	DependencyEdge_TMOD DependencyEdge_Label = 57
+	// Topic marker
+	DependencyEdge_TOPIC DependencyEdge_Label = 58
+	// Clause headed by an infinite form of the verb that modifies a noun
+	DependencyEdge_VMOD DependencyEdge_Label = 59
+	// Vocative
+	DependencyEdge_VOCATIVE DependencyEdge_Label = 60
+	// Open clausal complement
+	DependencyEdge_XCOMP DependencyEdge_Label = 61
+	// Name suffix
+	DependencyEdge_SUFFIX DependencyEdge_Label = 62
+	// Name title
+	DependencyEdge_TITLE DependencyEdge_Label = 63
+	// Adverbial phrase modifier
+	DependencyEdge_ADVPHMOD DependencyEdge_Label = 64
+	// Causative auxiliary
+	DependencyEdge_AUXCAUS DependencyEdge_Label = 65
+	// Helper auxiliary
+	DependencyEdge_AUXVV DependencyEdge_Label = 66
+	// Rentaishi (Prenominal modifier)
+	DependencyEdge_DTMOD DependencyEdge_Label = 67
+	// Foreign words
+	DependencyEdge_FOREIGN DependencyEdge_Label = 68
+	// Keyword
+	DependencyEdge_KW DependencyEdge_Label = 69
+	// List for chains of comparable items
+	DependencyEdge_LIST DependencyEdge_Label = 70
+	// Nominalized clause
+	DependencyEdge_NOMC DependencyEdge_Label = 71
+	// Nominalized clausal subject
+	DependencyEdge_NOMCSUBJ DependencyEdge_Label = 72
+	// Nominalized clausal passive
+	DependencyEdge_NOMCSUBJPASS DependencyEdge_Label = 73
+	// Compound of numeric modifier
+	DependencyEdge_NUMC DependencyEdge_Label = 74
+	// Copula
+	DependencyEdge_COP DependencyEdge_Label = 75
+	// Dislocated relation (for fronted/topicalized elements)
+	DependencyEdge_DISLOCATED DependencyEdge_Label = 76
+)
+
+var DependencyEdge_Label_name = map[int32]string{
+	0:  "UNKNOWN",
+	1:  "ABBREV",
+	2:  "ACOMP",
+	3:  "ADVCL",
+	4:  "ADVMOD",
+	5:  "AMOD",
+	6:  "APPOS",
+	7:  "ATTR",
+	8:  "AUX",
+	9:  "AUXPASS",
+	10: "CC",
+	11: "CCOMP",
+	12: "CONJ",
+	13: "CSUBJ",
+	14: "CSUBJPASS",
+	15: "DEP",
+	16: "DET",
+	17: "DISCOURSE",
+	18: "DOBJ",
+	19: "EXPL",
+	20: "GOESWITH",
+	21: "IOBJ",
+	22: "MARK",
+	23: "MWE",
+	24: "MWV",
+	25: "NEG",
+	26: "NN",
+	27: "NPADVMOD",
+	28: "NSUBJ",
+	29: "NSUBJPASS",
+	30: "NUM",
+	31: "NUMBER",
+	32: "P",
+	33: "PARATAXIS",
+	34: "PARTMOD",
+	35: "PCOMP",
+	36: "POBJ",
+	37: "POSS",
+	38: "POSTNEG",
+	39: "PRECOMP",
+	40: "PRECONJ",
+	41: "PREDET",
+	42: "PREF",
+	43: "PREP",
+	44: "PRONL",
+	45: "PRT",
+	46: "PS",
+	47: "QUANTMOD",
+	48: "RCMOD",
+	49: "RCMODREL",
+	50: "RDROP",
+	51: "REF",
+	52: "REMNANT",
+	53: "REPARANDUM",
+	54: "ROOT",
+	55: "SNUM",
+	56: "SUFF",
+	57: "TMOD",
+	58: "TOPIC",
+	59: "VMOD",
+	60: "VOCATIVE",
+	61: "XCOMP",
+	62: "SUFFIX",
+	63: "TITLE",
+	64: "ADVPHMOD",
+	65: "AUXCAUS",
+	66: "AUXVV",
+	67: "DTMOD",
+	68: "FOREIGN",
+	69: "KW",
+	70: "LIST",
+	71: "NOMC",
+	72: "NOMCSUBJ",
+	73: "NOMCSUBJPASS",
+	74: "NUMC",
+	75: "COP",
+	76: "DISLOCATED",
+}
+var DependencyEdge_Label_value = map[string]int32{
+	"UNKNOWN":      0,
+	"ABBREV":       1,
+	"ACOMP":        2,
+	"ADVCL":        3,
+	"ADVMOD":       4,
+	"AMOD":         5,
+	"APPOS":        6,
+	"ATTR":         7,
+	"AUX":          8,
+	"AUXPASS":      9,
+	"CC":           10,
+	"CCOMP":        11,
+	"CONJ":         12,
+	"CSUBJ":        13,
+	"CSUBJPASS":    14,
+	"DEP":          15,
+	"DET":          16,
+	"DISCOURSE":    17,
+	"DOBJ":         18,
+	"EXPL":         19,
+	"GOESWITH":     20,
+	"IOBJ":         21,
+	"MARK":         22,
+	"MWE":          23,
+	"MWV":          24,
+	"NEG":          25,
+	"NN":           26,
+	"NPADVMOD":     27,
+	"NSUBJ":        28,
+	"NSUBJPASS":    29,
+	"NUM":          30,
+	"NUMBER":       31,
+	"P":            32,
+	"PARATAXIS":    33,
+	"PARTMOD":      34,
+	"PCOMP":        35,
+	"POBJ":         36,
+	"POSS":         37,
+	"POSTNEG":      38,
+	"PRECOMP":      39,
+	"PRECONJ":      40,
+	"PREDET":       41,
+	"PREF":         42,
+	"PREP":         43,
+	"PRONL":        44,
+	"PRT":          45,
+	"PS":           46,
+	"QUANTMOD":     47,
+	"RCMOD":        48,
+	"RCMODREL":     49,
+	"RDROP":        50,
+	"REF":          51,
+	"REMNANT":      52,
+	"REPARANDUM":   53,
+	"ROOT":         54,
+	"SNUM":         55,
+	"SUFF":         56,
+	"TMOD":         57,
+	"TOPIC":        58,
+	"VMOD":         59,
+	"VOCATIVE":     60,
+	"XCOMP":        61,
+	"SUFFIX":       62,
+	"TITLE":        63,
+	"ADVPHMOD":     64,
+	"AUXCAUS":      65,
+	"AUXVV":        66,
+	"DTMOD":        67,
+	"FOREIGN":      68,
+	"KW":           69,
+	"LIST":         70,
+	"NOMC":         71,
+	"NOMCSUBJ":     72,
+	"NOMCSUBJPASS": 73,
+	"NUMC":         74,
+	"COP":          75,
+	"DISLOCATED":   76,
+}
+
+func (x DependencyEdge_Label) String() string {
+	return proto.EnumName(DependencyEdge_Label_name, int32(x))
+}
+func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{6, 0} }
+
+// The supported types of mentions.
+type EntityMention_Type int32
+
+const (
+	// Unknown
+	EntityMention_TYPE_UNKNOWN EntityMention_Type = 0
+	// Proper name
+	EntityMention_PROPER EntityMention_Type = 1
+	// Common noun (or noun compound)
+	EntityMention_COMMON EntityMention_Type = 2
+)
+
+var EntityMention_Type_name = map[int32]string{
+	0: "TYPE_UNKNOWN",
+	1: "PROPER",
+	2: "COMMON",
+}
+var EntityMention_Type_value = map[string]int32{
+	"TYPE_UNKNOWN": 0,
+	"PROPER":       1,
+	"COMMON":       2,
+}
+
+func (x EntityMention_Type) String() string {
+	return proto.EnumName(EntityMention_Type_name, int32(x))
+}
+func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{7, 0} }
+
+// ################################################################ #
+//
+// Represents the input to API methods.
+type Document struct {
+	// Required. If the type is not set or is `TYPE_UNSPECIFIED`,
+	// returns an `INVALID_ARGUMENT` error.
+	Type Document_Type `protobuf:"varint,1,opt,name=type,enum=google.cloud.language.v1.Document_Type" json:"type,omitempty"`
+	// The source of the document: a string containing the content or a
+	// Google Cloud Storage URI.
+	//
+	// Types that are valid to be assigned to Source:
+	//	*Document_Content
+	//	*Document_GcsContentUri
+	Source isDocument_Source `protobuf_oneof:"source"`
+	// The language of the document (if not specified, the language is
+	// automatically detected). Both ISO and BCP-47 language codes are
+	// accepted.<br>
+	// **Current Language Restrictions:**
+	//
+	//  * Only English, Spanish, and Japanese textual content
+	//    are supported, with the following additional restriction:
+	//    * `analyzeSentiment` only supports English text.
+	// If the language (either specified by the caller or automatically detected)
+	// is not supported by the called API method, an `INVALID_ARGUMENT` error
+	// is returned.
+	Language string `protobuf:"bytes,4,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *Document) Reset()                    { *m = Document{} }
+func (m *Document) String() string            { return proto.CompactTextString(m) }
+func (*Document) ProtoMessage()               {}
+func (*Document) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+type isDocument_Source interface {
+	isDocument_Source()
+}
+
+type Document_Content struct {
+	Content string `protobuf:"bytes,2,opt,name=content,oneof"`
+}
+type Document_GcsContentUri struct {
+	GcsContentUri string `protobuf:"bytes,3,opt,name=gcs_content_uri,json=gcsContentUri,oneof"`
+}
+
+func (*Document_Content) isDocument_Source()       {}
+func (*Document_GcsContentUri) isDocument_Source() {}
+
+func (m *Document) GetSource() isDocument_Source {
+	if m != nil {
+		return m.Source
+	}
+	return nil
+}
+
+func (m *Document) GetContent() string {
+	if x, ok := m.GetSource().(*Document_Content); ok {
+		return x.Content
+	}
+	return ""
+}
+
+func (m *Document) GetGcsContentUri() string {
+	if x, ok := m.GetSource().(*Document_GcsContentUri); ok {
+		return x.GcsContentUri
+	}
+	return ""
+}
+
+// XXX_OneofFuncs is for the internal use of the proto package.
+func (*Document) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
+	return _Document_OneofMarshaler, _Document_OneofUnmarshaler, _Document_OneofSizer, []interface{}{
+		(*Document_Content)(nil),
+		(*Document_GcsContentUri)(nil),
+	}
+}
+
+func _Document_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
+	m := msg.(*Document)
+	// source
+	switch x := m.Source.(type) {
+	case *Document_Content:
+		b.EncodeVarint(2<<3 | proto.WireBytes)
+		b.EncodeStringBytes(x.Content)
+	case *Document_GcsContentUri:
+		b.EncodeVarint(3<<3 | proto.WireBytes)
+		b.EncodeStringBytes(x.GcsContentUri)
+	case nil:
+	default:
+		return fmt.Errorf("Document.Source has unexpected type %T", x)
+	}
+	return nil
+}
+
+func _Document_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
+	m := msg.(*Document)
+	switch tag {
+	case 2: // source.content
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeStringBytes()
+		m.Source = &Document_Content{x}
+		return true, err
+	case 3: // source.gcs_content_uri
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeStringBytes()
+		m.Source = &Document_GcsContentUri{x}
+		return true, err
+	default:
+		return false, nil
+	}
+}
+
+func _Document_OneofSizer(msg proto.Message) (n int) {
+	m := msg.(*Document)
+	// source
+	switch x := m.Source.(type) {
+	case *Document_Content:
+		n += proto.SizeVarint(2<<3 | proto.WireBytes)
+		n += proto.SizeVarint(uint64(len(x.Content)))
+		n += len(x.Content)
+	case *Document_GcsContentUri:
+		n += proto.SizeVarint(3<<3 | proto.WireBytes)
+		n += proto.SizeVarint(uint64(len(x.GcsContentUri)))
+		n += len(x.GcsContentUri)
+	case nil:
+	default:
+		panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
+	}
+	return n
+}
+
+// Represents a sentence in the input document.
+type Sentence struct {
+	// The sentence text.
+	Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+	// For calls to [AnalyzeSentiment][] or if
+	// [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] is set to
+	// true, this field will contain the sentiment for the sentence.
+	Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment" json:"sentiment,omitempty"`
+}
+
+func (m *Sentence) Reset()                    { *m = Sentence{} }
+func (m *Sentence) String() string            { return proto.CompactTextString(m) }
+func (*Sentence) ProtoMessage()               {}
+func (*Sentence) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
+
+func (m *Sentence) GetText() *TextSpan {
+	if m != nil {
+		return m.Text
+	}
+	return nil
+}
+
+func (m *Sentence) GetSentiment() *Sentiment {
+	if m != nil {
+		return m.Sentiment
+	}
+	return nil
+}
+
+// Represents a phrase in the text that is a known entity, such as
+// a person, an organization, or location. The API associates information, such
+// as salience and mentions, with entities.
+type Entity struct {
+	// The representative name for the entity.
+	Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"`
+	// The entity type.
+	Type Entity_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1.Entity_Type" json:"type,omitempty"`
+	// Metadata associated with the entity.
+	//
+	// Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
+	// available. The associated keys are "wikipedia_url" and "mid", respectively.
+	Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+	// The salience score associated with the entity in the [0, 1.0] range.
+	//
+	// The salience score for an entity provides information about the
+	// importance or centrality of that entity to the entire document text.
+	// Scores closer to 0 are less salient, while scores closer to 1.0 are highly
+	// salient.
+	Salience float32 `protobuf:"fixed32,4,opt,name=salience" json:"salience,omitempty"`
+	// The mentions of this entity in the input document. The API currently
+	// supports proper noun mentions.
+	Mentions []*EntityMention `protobuf:"bytes,5,rep,name=mentions" json:"mentions,omitempty"`
+}
+
+func (m *Entity) Reset()                    { *m = Entity{} }
+func (m *Entity) String() string            { return proto.CompactTextString(m) }
+func (*Entity) ProtoMessage()               {}
+func (*Entity) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
+
+func (m *Entity) GetMetadata() map[string]string {
+	if m != nil {
+		return m.Metadata
+	}
+	return nil
+}
+
+func (m *Entity) GetMentions() []*EntityMention {
+	if m != nil {
+		return m.Mentions
+	}
+	return nil
+}
+
+// Represents the smallest syntactic building block of the text.
+type Token struct {
+	// The token text.
+	Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+	// Parts of speech tag for this token.
+	PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech" json:"part_of_speech,omitempty"`
+	// Dependency tree parse for this token.
+	DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge" json:"dependency_edge,omitempty"`
+	// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
+	Lemma string `protobuf:"bytes,4,opt,name=lemma" json:"lemma,omitempty"`
+}
+
+func (m *Token) Reset()                    { *m = Token{} }
+func (m *Token) String() string            { return proto.CompactTextString(m) }
+func (*Token) ProtoMessage()               {}
+func (*Token) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
+
+func (m *Token) GetText() *TextSpan {
+	if m != nil {
+		return m.Text
+	}
+	return nil
+}
+
+func (m *Token) GetPartOfSpeech() *PartOfSpeech {
+	if m != nil {
+		return m.PartOfSpeech
+	}
+	return nil
+}
+
+func (m *Token) GetDependencyEdge() *DependencyEdge {
+	if m != nil {
+		return m.DependencyEdge
+	}
+	return nil
+}
+
+// Represents the feeling associated with the entire text or entities in
+// the text.
+type Sentiment struct {
+	// A non-negative number in the [0, +inf) range, which represents
+	// the absolute magnitude of sentiment regardless of score (positive or
+	// negative).
+	Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude" json:"magnitude,omitempty"`
+	// Sentiment score between -1.0 (negative sentiment) and 1.0
+	// (positive sentiment.)
+	Score float32 `protobuf:"fixed32,3,opt,name=score" json:"score,omitempty"`
+}
+
+func (m *Sentiment) Reset()                    { *m = Sentiment{} }
+func (m *Sentiment) String() string            { return proto.CompactTextString(m) }
+func (*Sentiment) ProtoMessage()               {}
+func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
+
+// Represents part of speech information for a token. Parts of speech
+// are as defined in
+// http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf
+type PartOfSpeech struct {
+	// The part of speech tag.
+	Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,enum=google.cloud.language.v1.PartOfSpeech_Tag" json:"tag,omitempty"`
+	// The grammatical aspect.
+	Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,enum=google.cloud.language.v1.PartOfSpeech_Aspect" json:"aspect,omitempty"`
+	// The grammatical case.
+	Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,enum=google.cloud.language.v1.PartOfSpeech_Case" json:"case,omitempty"`
+	// The grammatical form.
+	Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,enum=google.cloud.language.v1.PartOfSpeech_Form" json:"form,omitempty"`
+	// The grammatical gender.
+	Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,enum=google.cloud.language.v1.PartOfSpeech_Gender" json:"gender,omitempty"`
+	// The grammatical mood.
+	Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,enum=google.cloud.language.v1.PartOfSpeech_Mood" json:"mood,omitempty"`
+	// The grammatical number.
+	Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,enum=google.cloud.language.v1.PartOfSpeech_Number" json:"number,omitempty"`
+	// The grammatical person.
+	Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,enum=google.cloud.language.v1.PartOfSpeech_Person" json:"person,omitempty"`
+	// The grammatical properness.
+	Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,enum=google.cloud.language.v1.PartOfSpeech_Proper" json:"proper,omitempty"`
+	// The grammatical reciprocity.
+	Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,enum=google.cloud.language.v1.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"`
+	// The grammatical tense.
+	Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,enum=google.cloud.language.v1.PartOfSpeech_Tense" json:"tense,omitempty"`
+	// The grammatical voice.
+	Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,enum=google.cloud.language.v1.PartOfSpeech_Voice" json:"voice,omitempty"`
+}
+
+func (m *PartOfSpeech) Reset()                    { *m = PartOfSpeech{} }
+func (m *PartOfSpeech) String() string            { return proto.CompactTextString(m) }
+func (*PartOfSpeech) ProtoMessage()               {}
+func (*PartOfSpeech) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
+
+// Represents dependency parse tree information for a token. (For more
+// information on dependency labels, see
+// http://www.aclweb.org/anthology/P13-2017
+type DependencyEdge struct {
+	// Represents the head of this token in the dependency tree.
+	// This is the index of the token which has an arc going to this token.
+	// The index is the position of the token in the array of tokens returned
+	// by the API method. If this token is a root token, then the
+	// `head_token_index` is its own index.
+	HeadTokenIndex int32 `protobuf:"varint,1,opt,name=head_token_index,json=headTokenIndex" json:"head_token_index,omitempty"`
+	// The parse label for the token.
+	Label DependencyEdge_Label `protobuf:"varint,2,opt,name=label,enum=google.cloud.language.v1.DependencyEdge_Label" json:"label,omitempty"`
+}
+
+func (m *DependencyEdge) Reset()                    { *m = DependencyEdge{} }
+func (m *DependencyEdge) String() string            { return proto.CompactTextString(m) }
+func (*DependencyEdge) ProtoMessage()               {}
+func (*DependencyEdge) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{6} }
+
+// Represents a mention for an entity in the text. Currently, proper noun
+// mentions are supported.
+type EntityMention struct {
+	// The mention text.
+	Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+	// The type of the entity mention.
+	Type EntityMention_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1.EntityMention_Type" json:"type,omitempty"`
+}
+
+func (m *EntityMention) Reset()                    { *m = EntityMention{} }
+func (m *EntityMention) String() string            { return proto.CompactTextString(m) }
+func (*EntityMention) ProtoMessage()               {}
+func (*EntityMention) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{7} }
+
+func (m *EntityMention) GetText() *TextSpan {
+	if m != nil {
+		return m.Text
+	}
+	return nil
+}
+
+// Represents an output piece of text.
+type TextSpan struct {
+	// The content of the output text.
+	Content string `protobuf:"bytes,1,opt,name=content" json:"content,omitempty"`
+	// The API calculates the beginning offset of the content in the original
+	// document according to the [EncodingType][google.cloud.language.v1.EncodingType] specified in the API request.
+	BeginOffset int32 `protobuf:"varint,2,opt,name=begin_offset,json=beginOffset" json:"begin_offset,omitempty"`
+}
+
+func (m *TextSpan) Reset()                    { *m = TextSpan{} }
+func (m *TextSpan) String() string            { return proto.CompactTextString(m) }
+func (*TextSpan) ProtoMessage()               {}
+func (*TextSpan) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{8} }
+
+// The sentiment analysis request message.
+type AnalyzeSentimentRequest struct {
+	// Input document. Currently, `analyzeSentiment` only supports English text
+	// ([Document.language][google.cloud.language.v1.Document.language]="EN").
+	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The encoding type used by the API to calculate sentence offsets.
+	EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeSentimentRequest) Reset()                    { *m = AnalyzeSentimentRequest{} }
+func (m *AnalyzeSentimentRequest) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSentimentRequest) ProtoMessage()               {}
+func (*AnalyzeSentimentRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{9} }
+
+func (m *AnalyzeSentimentRequest) GetDocument() *Document {
+	if m != nil {
+		return m.Document
+	}
+	return nil
+}
+
+// The sentiment analysis response message.
+type AnalyzeSentimentResponse struct {
+	// The overall sentiment of the input document.
+	DocumentSentiment *Sentiment `protobuf:"bytes,1,opt,name=document_sentiment,json=documentSentiment" json:"document_sentiment,omitempty"`
+	// The language of the text, which will be the same as the language specified
+	// in the request or, if not specified, the automatically-detected language.
+	// See `Document.language` field for more details.
+	Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+	// The sentiment for all the sentences in the document.
+	Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences" json:"sentences,omitempty"`
+}
+
+func (m *AnalyzeSentimentResponse) Reset()                    { *m = AnalyzeSentimentResponse{} }
+func (m *AnalyzeSentimentResponse) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSentimentResponse) ProtoMessage()               {}
+func (*AnalyzeSentimentResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{10} }
+
+func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment {
+	if m != nil {
+		return m.DocumentSentiment
+	}
+	return nil
+}
+
+func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence {
+	if m != nil {
+		return m.Sentences
+	}
+	return nil
+}
+
+// The entity analysis request message.
+type AnalyzeEntitiesRequest struct {
+	// Input document.
+	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The encoding type used by the API to calculate offsets.
+	EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeEntitiesRequest) Reset()                    { *m = AnalyzeEntitiesRequest{} }
+func (m *AnalyzeEntitiesRequest) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeEntitiesRequest) ProtoMessage()               {}
+func (*AnalyzeEntitiesRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{11} }
+
+func (m *AnalyzeEntitiesRequest) GetDocument() *Document {
+	if m != nil {
+		return m.Document
+	}
+	return nil
+}
+
+// The entity analysis response message.
+type AnalyzeEntitiesResponse struct {
+	// The recognized entities in the input document.
+	Entities []*Entity `protobuf:"bytes,1,rep,name=entities" json:"entities,omitempty"`
+	// The language of the text, which will be the same as the language specified
+	// in the request or, if not specified, the automatically-detected language.
+	// See `Document.language` field for more details.
+	Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeEntitiesResponse) Reset()                    { *m = AnalyzeEntitiesResponse{} }
+func (m *AnalyzeEntitiesResponse) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeEntitiesResponse) ProtoMessage()               {}
+func (*AnalyzeEntitiesResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{12} }
+
+func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity {
+	if m != nil {
+		return m.Entities
+	}
+	return nil
+}
+
+// The syntax analysis request message.
+type AnalyzeSyntaxRequest struct {
+	// Input document.
+	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The encoding type used by the API to calculate offsets.
+	EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeSyntaxRequest) Reset()                    { *m = AnalyzeSyntaxRequest{} }
+func (m *AnalyzeSyntaxRequest) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxRequest) ProtoMessage()               {}
+func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
+
+func (m *AnalyzeSyntaxRequest) GetDocument() *Document {
+	if m != nil {
+		return m.Document
+	}
+	return nil
+}
+
+// The syntax analysis response message.
+type AnalyzeSyntaxResponse struct {
+	// Sentences in the input document.
+	Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences" json:"sentences,omitempty"`
+	// Tokens, along with their syntactic information, in the input document.
+	Tokens []*Token `protobuf:"bytes,2,rep,name=tokens" json:"tokens,omitempty"`
+	// The language of the text, which will be the same as the language specified
+	// in the request or, if not specified, the automatically-detected language.
+	// See `Document.language` field for more details.
+	Language string `protobuf:"bytes,3,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeSyntaxResponse) Reset()                    { *m = AnalyzeSyntaxResponse{} }
+func (m *AnalyzeSyntaxResponse) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxResponse) ProtoMessage()               {}
+func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
+
+func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence {
+	if m != nil {
+		return m.Sentences
+	}
+	return nil
+}
+
+func (m *AnalyzeSyntaxResponse) GetTokens() []*Token {
+	if m != nil {
+		return m.Tokens
+	}
+	return nil
+}
+
+// The request message for the text annotation API, which can perform multiple
+// analysis types (sentiment, entities, and syntax) in one call.
+type AnnotateTextRequest struct {
+	// Input document.
+	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The enabled features.
+	Features *AnnotateTextRequest_Features `protobuf:"bytes,2,opt,name=features" json:"features,omitempty"`
+	// The encoding type used by the API to calculate offsets.
+	EncodingType EncodingType `protobuf:"varint,3,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnnotateTextRequest) Reset()                    { *m = AnnotateTextRequest{} }
+func (m *AnnotateTextRequest) String() string            { return proto.CompactTextString(m) }
+func (*AnnotateTextRequest) ProtoMessage()               {}
+func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
+
+func (m *AnnotateTextRequest) GetDocument() *Document {
+	if m != nil {
+		return m.Document
+	}
+	return nil
+}
+
+func (m *AnnotateTextRequest) GetFeatures() *AnnotateTextRequest_Features {
+	if m != nil {
+		return m.Features
+	}
+	return nil
+}
+
+// All available features for sentiment, syntax, and semantic analysis.
+// Setting each one to true will enable that specific analysis for the input.
+type AnnotateTextRequest_Features struct {
+	// Extract syntax information.
+	ExtractSyntax bool `protobuf:"varint,1,opt,name=extract_syntax,json=extractSyntax" json:"extract_syntax,omitempty"`
+	// Extract entities.
+	ExtractEntities bool `protobuf:"varint,2,opt,name=extract_entities,json=extractEntities" json:"extract_entities,omitempty"`
+	// Extract document-level sentiment.
+	ExtractDocumentSentiment bool `protobuf:"varint,3,opt,name=extract_document_sentiment,json=extractDocumentSentiment" json:"extract_document_sentiment,omitempty"`
+}
+
+func (m *AnnotateTextRequest_Features) Reset()         { *m = AnnotateTextRequest_Features{} }
+func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) }
+func (*AnnotateTextRequest_Features) ProtoMessage()    {}
+func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{15, 0}
+}
+
+// The text annotations response message.
+type AnnotateTextResponse struct {
+	// Sentences in the input document. Populated if the user enables
+	// [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax].
+	Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences" json:"sentences,omitempty"`
+	// Tokens, along with their syntactic information, in the input document.
+	// Populated if the user enables
+	// [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax].
+	Tokens []*Token `protobuf:"bytes,2,rep,name=tokens" json:"tokens,omitempty"`
+	// Entities, along with their semantic information, in the input document.
+	// Populated if the user enables
+	// [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities].
+	Entities []*Entity `protobuf:"bytes,3,rep,name=entities" json:"entities,omitempty"`
+	// The overall sentiment for the document. Populated if the user enables
+	// [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment].
+	DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment" json:"document_sentiment,omitempty"`
+	// The language of the text, which will be the same as the language specified
+	// in the request or, if not specified, the automatically-detected language.
+	// See `Document.language` field for more details.
+	Language string `protobuf:"bytes,5,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnnotateTextResponse) Reset()                    { *m = AnnotateTextResponse{} }
+func (m *AnnotateTextResponse) String() string            { return proto.CompactTextString(m) }
+func (*AnnotateTextResponse) ProtoMessage()               {}
+func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
+
+func (m *AnnotateTextResponse) GetSentences() []*Sentence {
+	if m != nil {
+		return m.Sentences
+	}
+	return nil
+}
+
+func (m *AnnotateTextResponse) GetTokens() []*Token {
+	if m != nil {
+		return m.Tokens
+	}
+	return nil
+}
+
+func (m *AnnotateTextResponse) GetEntities() []*Entity {
+	if m != nil {
+		return m.Entities
+	}
+	return nil
+}
+
+func (m *AnnotateTextResponse) GetDocumentSentiment() *Sentiment {
+	if m != nil {
+		return m.DocumentSentiment
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*Document)(nil), "google.cloud.language.v1.Document")
+	proto.RegisterType((*Sentence)(nil), "google.cloud.language.v1.Sentence")
+	proto.RegisterType((*Entity)(nil), "google.cloud.language.v1.Entity")
+	proto.RegisterType((*Token)(nil), "google.cloud.language.v1.Token")
+	proto.RegisterType((*Sentiment)(nil), "google.cloud.language.v1.Sentiment")
+	proto.RegisterType((*PartOfSpeech)(nil), "google.cloud.language.v1.PartOfSpeech")
+	proto.RegisterType((*DependencyEdge)(nil), "google.cloud.language.v1.DependencyEdge")
+	proto.RegisterType((*EntityMention)(nil), "google.cloud.language.v1.EntityMention")
+	proto.RegisterType((*TextSpan)(nil), "google.cloud.language.v1.TextSpan")
+	proto.RegisterType((*AnalyzeSentimentRequest)(nil), "google.cloud.language.v1.AnalyzeSentimentRequest")
+	proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1.AnalyzeSentimentResponse")
+	proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1.AnalyzeEntitiesRequest")
+	proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1.AnalyzeEntitiesResponse")
+	proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1.AnalyzeSyntaxRequest")
+	proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1.AnalyzeSyntaxResponse")
+	proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1.AnnotateTextRequest")
+	proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1.AnnotateTextRequest.Features")
+	proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1.AnnotateTextResponse")
+	proto.RegisterEnum("google.cloud.language.v1.EncodingType", EncodingType_name, EncodingType_value)
+	proto.RegisterEnum("google.cloud.language.v1.Document_Type", Document_Type_name, Document_Type_value)
+	proto.RegisterEnum("google.cloud.language.v1.Entity_Type", Entity_Type_name, Entity_Type_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value)
+	proto.RegisterEnum("google.cloud.language.v1.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value)
+	proto.RegisterEnum("google.cloud.language.v1.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value)
+	proto.RegisterEnum("google.cloud.language.v1.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value)
+}
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ context.Context
+var _ grpc.ClientConn
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the grpc package it is being compiled against.
+const _ = grpc.SupportPackageIsVersion4
+
+// Client API for LanguageService service
+
+type LanguageServiceClient interface {
+	// Analyzes the sentiment of the provided text.
+	AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error)
+	// Finds named entities (currently finds proper names) in the text,
+	// entity types, salience, mentions for each entity, and other properties.
+	AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error)
+	// Analyzes the syntax of the text and provides sentence boundaries and
+	// tokenization along with part of speech tags, dependency trees, and other
+	// properties.
+	AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error)
+	// A convenience method that provides all the features that analyzeSentiment,
+	// analyzeEntities, and analyzeSyntax provide in one call.
+	AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error)
+}
+
+type languageServiceClient struct {
+	cc *grpc.ClientConn
+}
+
+func NewLanguageServiceClient(cc *grpc.ClientConn) LanguageServiceClient {
+	return &languageServiceClient{cc}
+}
+
+func (c *languageServiceClient) AnalyzeSentiment(ctx context.Context, in *AnalyzeSentimentRequest, opts ...grpc.CallOption) (*AnalyzeSentimentResponse, error) {
+	out := new(AnalyzeSentimentResponse)
+	err := grpc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeSentiment", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error) {
+	out := new(AnalyzeEntitiesResponse)
+	err := grpc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeEntities", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) {
+	out := new(AnalyzeSyntaxResponse)
+	err := grpc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnalyzeSyntax", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) {
+	out := new(AnnotateTextResponse)
+	err := grpc.Invoke(ctx, "/google.cloud.language.v1.LanguageService/AnnotateText", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
+// Server API for LanguageService service
+
+type LanguageServiceServer interface {
+	// Analyzes the sentiment of the provided text.
+	AnalyzeSentiment(context.Context, *AnalyzeSentimentRequest) (*AnalyzeSentimentResponse, error)
+	// Finds named entities (currently finds proper names) in the text,
+	// entity types, salience, mentions for each entity, and other properties.
+	AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
+	// Analyzes the syntax of the text and provides sentence boundaries and
+	// tokenization along with part of speech tags, dependency trees, and other
+	// properties.
+	AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
+	// A convenience method that provides all the features that analyzeSentiment,
+	// analyzeEntities, and analyzeSyntax provide in one call.
+	AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error)
+}
+
+func RegisterLanguageServiceServer(s *grpc.Server, srv LanguageServiceServer) {
+	s.RegisterService(&_LanguageService_serviceDesc, srv)
+}
+
+func _LanguageService_AnalyzeSentiment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(AnalyzeSentimentRequest)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeSentiment",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(LanguageServiceServer).AnalyzeSentiment(ctx, req.(*AnalyzeSentimentRequest))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(AnalyzeEntitiesRequest)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(LanguageServiceServer).AnalyzeEntities(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeEntities",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(LanguageServiceServer).AnalyzeEntities(ctx, req.(*AnalyzeEntitiesRequest))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(AnalyzeSyntaxRequest)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/google.cloud.language.v1.LanguageService/AnalyzeSyntax",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(AnnotateTextRequest)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(LanguageServiceServer).AnnotateText(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/google.cloud.language.v1.LanguageService/AnnotateText",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(LanguageServiceServer).AnnotateText(ctx, req.(*AnnotateTextRequest))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
+var _LanguageService_serviceDesc = grpc.ServiceDesc{
+	ServiceName: "google.cloud.language.v1.LanguageService",
+	HandlerType: (*LanguageServiceServer)(nil),
+	Methods: []grpc.MethodDesc{
+		{
+			MethodName: "AnalyzeSentiment",
+			Handler:    _LanguageService_AnalyzeSentiment_Handler,
+		},
+		{
+			MethodName: "AnalyzeEntities",
+			Handler:    _LanguageService_AnalyzeEntities_Handler,
+		},
+		{
+			MethodName: "AnalyzeSyntax",
+			Handler:    _LanguageService_AnalyzeSyntax_Handler,
+		},
+		{
+			MethodName: "AnnotateText",
+			Handler:    _LanguageService_AnnotateText_Handler,
+		},
+	},
+	Streams:  []grpc.StreamDesc{},
+	Metadata: "google.golang.org/genproto/googleapis/cloud/language/v1/language_service.proto",
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/genproto/googleapis/cloud/language/v1/language_service.proto", fileDescriptor0)
+}
+
+var fileDescriptor0 = []byte{
+	// 2730 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xcc, 0x59, 0xcd, 0x6f, 0xdc, 0xc6,
+	0x15, 0x37, 0xf7, 0x4b, 0xbb, 0xb3, 0x92, 0x3c, 0x66, 0x9c, 0x64, 0xab, 0x7c, 0x39, 0x4c, 0xed,
+	0x28, 0x4e, 0xb2, 0x8a, 0x94, 0xd6, 0x71, 0x1c, 0x37, 0x31, 0x45, 0xce, 0xae, 0x28, 0x73, 0x87,
+	0xcc, 0x90, 0x5c, 0x2b, 0xb9, 0x2c, 0xe8, 0xdd, 0xd1, 0x66, 0x11, 0x89, 0xdc, 0x2e, 0x29, 0xc3,
+	0xea, 0xa5, 0x40, 0x81, 0x1e, 0x73, 0x4a, 0x0f, 0x45, 0x4f, 0x05, 0xfa, 0x71, 0x6c, 0xff, 0x80,
+	0x16, 0xe8, 0x3f, 0xd0, 0x5b, 0xff, 0x85, 0xde, 0x5a, 0xa0, 0xb7, 0xa2, 0x97, 0x02, 0xc5, 0x9b,
+	0x21, 0xf7, 0x43, 0x91, 0x6c, 0xc9, 0x08, 0xd0, 0xdc, 0x66, 0xde, 0xbe, 0xdf, 0xfb, 0x9e, 0xf7,
+	0x86, 0xb3, 0x88, 0x0e, 0xe3, 0x78, 0x78, 0xc0, 0x9b, 0xc3, 0xf8, 0x20, 0x8c, 0x86, 0xcd, 0x78,
+	0x32, 0xdc, 0x18, 0xf2, 0x68, 0x3c, 0x89, 0xd3, 0x78, 0x43, 0xfe, 0x14, 0x8e, 0x47, 0xc9, 0x46,
+	0xff, 0x20, 0x3e, 0x1a, 0x6c, 0x00, 0xcb, 0x51, 0x38, 0xe4, 0x1b, 0x8f, 0x36, 0xa7, 0xeb, 0x5e,
+	0xc2, 0x27, 0x8f, 0x46, 0x7d, 0xde, 0x14, 0x18, 0xb5, 0x91, 0xc9, 0x13, 0x80, 0x66, 0xce, 0xd4,
+	0x7c, 0xb4, 0xb9, 0x66, 0x9d, 0x4f, 0x53, 0x38, 0x1e, 0x6d, 0x64, 0x22, 0xfb, 0x71, 0xb4, 0x3f,
+	0x1a, 0x6e, 0x84, 0x51, 0x14, 0xa7, 0x61, 0x3a, 0x8a, 0xa3, 0x44, 0x2a, 0xd1, 0xfe, 0xa1, 0xa0,
+	0xaa, 0x19, 0xf7, 0x8f, 0x0e, 0x79, 0x94, 0xaa, 0x1f, 0xa1, 0x52, 0x7a, 0x3c, 0xe6, 0x0d, 0xe5,
+	0x9a, 0xb2, 0xbe, 0xba, 0xf5, 0x66, 0xf3, 0x2c, 0x03, 0x9a, 0x39, 0xa2, 0xe9, 0x1f, 0x8f, 0x39,
+	0x13, 0x20, 0x75, 0x0d, 0x2d, 0xf5, 0xe3, 0x28, 0xe5, 0x51, 0xda, 0x28, 0x5c, 0x53, 0xd6, 0x6b,
+	0x3b, 0x97, 0x58, 0x4e, 0x50, 0xd7, 0xd1, 0xe5, 0x61, 0x3f, 0xe9, 0x65, 0xdb, 0xde, 0xd1, 0x64,
+	0xd4, 0x28, 0x66, 0x3c, 0x2b, 0xc3, 0x7e, 0x62, 0x48, 0x7a, 0x30, 0x19, 0xa9, 0x6b, 0xa8, 0x9a,
+	0x2b, 0x6a, 0x94, 0x80, 0x85, 0x4d, 0xf7, 0xda, 0x2d, 0x54, 0x02, 0x7d, 0xea, 0x55, 0x84, 0xfd,
+	0xcf, 0x5c, 0xd2, 0x0b, 0xa8, 0xe7, 0x12, 0xc3, 0x6a, 0x59, 0xc4, 0xc4, 0x97, 0xd4, 0x55, 0x84,
+	0x5c, 0x5b, 0xb7, 0x68, 0xcf, 0x27, 0x7b, 0x3e, 0x56, 0xd4, 0x2a, 0x2a, 0xed, 0xf8, 0x1d, 0x1b,
+	0x17, 0xb6, 0xab, 0xa8, 0x92, 0xc4, 0x47, 0x93, 0x3e, 0xd7, 0x7e, 0xae, 0xa0, 0xaa, 0xc7, 0x41,
+	0x59, 0x9f, 0xab, 0xb7, 0x50, 0x29, 0xe5, 0x8f, 0x53, 0xe1, 0x6d, 0x7d, 0x4b, 0x3b, 0xdb, 0x5b,
+	0x9f, 0x3f, 0x4e, 0xbd, 0x71, 0x18, 0x31, 0xc1, 0xaf, 0xea, 0xa8, 0x96, 0xf0, 0x28, 0x1d, 0x1d,
+	0xe6, 0xae, 0xd6, 0xb7, 0xde, 0x38, 0x1b, 0xec, 0xe5, 0xac, 0x6c, 0x86, 0xd2, 0xfe, 0x5c, 0x44,
+	0x15, 0x12, 0xa5, 0xa3, 0xf4, 0x58, 0x55, 0x51, 0x29, 0x0a, 0x0f, 0x65, 0xcc, 0x6b, 0x4c, 0xac,
+	0xd5, 0x0f, 0xb3, 0x3c, 0x14, 0x44, 0x1e, 0xae, 0x9f, 0x2d, 0x5c, 0xca, 0x98, 0xcf, 0xc2, 0x2e,
+	0xaa, 0x1e, 0xf2, 0x34, 0x1c, 0x84, 0x69, 0xd8, 0x28, 0x5e, 0x2b, 0xae, 0xd7, 0xb7, 0x9a, 0x4f,
+	0x85, 0x77, 0x32, 0x00, 0x89, 0xd2, 0xc9, 0x31, 0x9b, 0xe2, 0x21, 0x17, 0x49, 0x78, 0x30, 0x82,
+	0x60, 0x89, 0x5c, 0x14, 0xd8, 0x74, 0xaf, 0x1a, 0xa0, 0x27, 0x12, 0x95, 0xd4, 0x28, 0x0b, 0x3d,
+	0x6f, 0x3e, 0x4d, 0x4f, 0x47, 0xf2, 0xb3, 0x29, 0x70, 0xed, 0x23, 0xb4, 0xb2, 0xa0, 0x5b, 0xc5,
+	0xa8, 0xf8, 0x25, 0x3f, 0xce, 0x62, 0x01, 0x4b, 0xf5, 0x2a, 0x2a, 0x3f, 0x0a, 0x0f, 0x8e, 0x64,
+	0x2c, 0x6a, 0x4c, 0x6e, 0xee, 0x14, 0x6e, 0x2b, 0xda, 0x71, 0x56, 0x0d, 0x75, 0xb4, 0x14, 0xd0,
+	0xfb, 0xd4, 0x79, 0x40, 0xf1, 0x25, 0x15, 0xa1, 0x8a, 0x4b, 0x98, 0xe7, 0x50, 0xac, 0xa8, 0xcb,
+	0xa8, 0x6a, 0x3b, 0x86, 0xee, 0x5b, 0x0e, 0xc5, 0x05, 0x15, 0xa3, 0x65, 0x87, 0xb5, 0x75, 0x6a,
+	0x7d, 0x2e, 0x29, 0x45, 0xb5, 0x86, 0xca, 0xa4, 0x4b, 0xa8, 0x8f, 0x4b, 0xea, 0x65, 0x54, 0x7f,
+	0xe0, 0xb0, 0xfb, 0x3d, 0xa7, 0xd5, 0xd3, 0x99, 0x8f, 0xcb, 0xea, 0x15, 0xb4, 0x62, 0x38, 0xd4,
+	0x0b, 0x3a, 0x84, 0xf5, 0xda, 0x8e, 0x63, 0xe2, 0x0a, 0xb0, 0x3b, 0xfe, 0x0e, 0x61, 0x78, 0x49,
+	0xfb, 0xb7, 0x82, 0xca, 0x7e, 0xfc, 0x25, 0x8f, 0x9e, 0xb9, 0x86, 0x6c, 0xb4, 0x3a, 0x0e, 0x27,
+	0x69, 0x2f, 0xde, 0xef, 0x25, 0x63, 0xce, 0xfb, 0x5f, 0x64, 0x85, 0x74, 0xe3, 0x6c, 0x09, 0x6e,
+	0x38, 0x49, 0x9d, 0x7d, 0x4f, 0x70, 0xb3, 0xe5, 0xf1, 0xdc, 0x4e, 0xfd, 0x14, 0x5d, 0x1e, 0xf0,
+	0x31, 0x8f, 0x06, 0x3c, 0xea, 0x1f, 0xf7, 0xf8, 0x60, 0xc8, 0xc5, 0xf1, 0xaa, 0x6f, 0xad, 0x3f,
+	0xe1, 0x08, 0x4f, 0x01, 0x64, 0x30, 0xe4, 0x6c, 0x75, 0xb0, 0xb0, 0x87, 0xb8, 0x1f, 0xf0, 0xc3,
+	0xc3, 0x30, 0x3b, 0x84, 0x72, 0xa3, 0x7d, 0x82, 0x6a, 0xd3, 0x7a, 0x56, 0x5f, 0x46, 0xb5, 0xc3,
+	0x70, 0x18, 0x8d, 0xd2, 0xa3, 0x81, 0x4c, 0x4f, 0x81, 0xcd, 0x08, 0x20, 0x20, 0xe9, 0xc7, 0x13,
+	0x69, 0x49, 0x81, 0xc9, 0x8d, 0xf6, 0x5f, 0x8c, 0x96, 0xe7, 0x1d, 0x51, 0xef, 0xa2, 0x62, 0x1a,
+	0x0e, 0xb3, 0x8e, 0x73, 0xf3, 0x7c, 0xde, 0x37, 0xfd, 0x70, 0xc8, 0x00, 0xa6, 0x12, 0x54, 0x09,
+	0x93, 0x31, 0xef, 0xa7, 0xd9, 0x51, 0x79, 0xf7, 0x9c, 0x02, 0x74, 0x01, 0x62, 0x19, 0x58, 0xfd,
+	0x04, 0x95, 0xfa, 0x61, 0x22, 0x4d, 0x5d, 0xdd, 0x7a, 0xfb, 0x9c, 0x42, 0x8c, 0x30, 0xe1, 0x4c,
+	0x00, 0x41, 0xc0, 0x7e, 0x3c, 0x39, 0x14, 0xc1, 0x3a, 0xbf, 0x80, 0x56, 0x3c, 0x39, 0x64, 0x02,
+	0x08, 0x8e, 0x0c, 0x21, 0xfc, 0x93, 0x46, 0xf9, 0x42, 0x8e, 0xb4, 0x05, 0x88, 0x65, 0x60, 0xb0,
+	0xe3, 0x30, 0x8e, 0x07, 0x8d, 0xca, 0x85, 0xec, 0xe8, 0xc4, 0xf1, 0x80, 0x09, 0x20, 0xd8, 0x11,
+	0x1d, 0x1d, 0x3e, 0xe4, 0x93, 0xc6, 0xd2, 0x85, 0xec, 0xa0, 0x02, 0xc4, 0x32, 0x30, 0x88, 0x19,
+	0xf3, 0x49, 0x12, 0x47, 0x8d, 0xea, 0x85, 0xc4, 0xb8, 0x02, 0xc4, 0x32, 0xb0, 0x10, 0x33, 0x89,
+	0xc7, 0x7c, 0xd2, 0xa8, 0x5d, 0x4c, 0x8c, 0x00, 0xb1, 0x0c, 0xac, 0xfa, 0xa8, 0x3e, 0xe1, 0xfd,
+	0xd1, 0x78, 0x12, 0xf7, 0x47, 0xe9, 0x71, 0x03, 0x09, 0x59, 0x5b, 0xe7, 0x94, 0xc5, 0x66, 0x48,
+	0x36, 0x2f, 0x46, 0xdd, 0x46, 0xe5, 0x94, 0x47, 0x09, 0x6f, 0xd4, 0x85, 0xbc, 0x77, 0xce, 0x5b,
+	0xbb, 0x80, 0x61, 0x12, 0x0a, 0x32, 0x1e, 0xc5, 0xa3, 0x3e, 0x6f, 0x2c, 0x5f, 0x48, 0x46, 0x17,
+	0x30, 0x4c, 0x42, 0xb5, 0xaf, 0x14, 0x54, 0xf4, 0xc3, 0xe1, 0x62, 0x1f, 0x5c, 0x42, 0x45, 0xdd,
+	0xdc, 0xc5, 0x8a, 0x5c, 0xb8, 0xb8, 0x20, 0x17, 0x5d, 0x5c, 0x84, 0xb9, 0x68, 0x38, 0x74, 0x17,
+	0x97, 0x80, 0x64, 0x12, 0xe8, 0x76, 0x55, 0x54, 0xa2, 0x4e, 0x40, 0x71, 0x05, 0x48, 0x34, 0xe8,
+	0xe0, 0x25, 0x20, 0xb9, 0xcc, 0xa1, 0xb8, 0x0a, 0x24, 0x97, 0xf9, 0xb8, 0x06, 0x0d, 0xd0, 0x0d,
+	0xa8, 0xe1, 0x63, 0x04, 0xbf, 0x76, 0x09, 0xdb, 0xc6, 0x75, 0xb5, 0x8c, 0x94, 0x3d, 0xbc, 0x0c,
+	0xbf, 0xe9, 0xad, 0x96, 0xb5, 0x87, 0x57, 0x34, 0x07, 0x55, 0xe4, 0xf1, 0x52, 0x55, 0xb4, 0xaa,
+	0xc3, 0x84, 0xf6, 0x7b, 0x33, 0xc3, 0x60, 0x4a, 0x13, 0xd6, 0x22, 0x86, 0x6f, 0x75, 0x09, 0x56,
+	0xa0, 0x2d, 0x5b, 0x9d, 0x39, 0x4a, 0x01, 0x7a, 0xb1, 0xcb, 0x9c, 0x36, 0x23, 0x9e, 0x07, 0x84,
+	0xa2, 0xf6, 0x1f, 0x05, 0x95, 0xe0, 0xac, 0x01, 0xaf, 0xa1, 0x7b, 0x64, 0x51, 0x9a, 0x6e, 0x18,
+	0x81, 0xa7, 0x67, 0xd2, 0x56, 0x50, 0x4d, 0x37, 0xc1, 0x32, 0x4b, 0xb7, 0x71, 0x41, 0x76, 0xf1,
+	0x8e, 0x6b, 0x93, 0x0e, 0xa1, 0x82, 0xa3, 0x08, 0x03, 0xc2, 0x94, 0xdc, 0x25, 0x18, 0x10, 0x6d,
+	0x42, 0x2d, 0xb1, 0x2b, 0x0b, 0x4b, 0xa8, 0xe7, 0xb3, 0x00, 0x98, 0x75, 0x1b, 0x57, 0x66, 0x03,
+	0xa4, 0x4b, 0xf0, 0x12, 0xe8, 0xa2, 0x4e, 0xc7, 0xa2, 0x72, 0x5f, 0x85, 0x78, 0x3b, 0xdb, 0xb6,
+	0xf5, 0x69, 0x40, 0x70, 0x0d, 0x14, 0xbb, 0x3a, 0xf3, 0xa5, 0x2c, 0x04, 0x8a, 0x5d, 0x46, 0x5c,
+	0xc7, 0xb3, 0x60, 0xd6, 0xe8, 0x36, 0xae, 0x43, 0x30, 0x18, 0x69, 0xd9, 0x64, 0xcf, 0xea, 0x92,
+	0x1e, 0xb8, 0x81, 0x97, 0x81, 0x8d, 0x11, 0x5b, 0x08, 0x94, 0xa4, 0x15, 0xd0, 0xd9, 0xcd, 0x75,
+	0xae, 0x6a, 0x7f, 0x54, 0x50, 0x09, 0xba, 0x04, 0x18, 0xd7, 0x72, 0x58, 0x67, 0xce, 0xf5, 0x65,
+	0x54, 0xd5, 0x4d, 0x30, 0x48, 0xb7, 0x33, 0xc7, 0x83, 0x3d, 0xcb, 0xb6, 0x74, 0xf6, 0x19, 0x2e,
+	0x80, 0xb2, 0x39, 0xc7, 0x3f, 0x27, 0x0c, 0x17, 0x85, 0x08, 0x8b, 0xea, 0x76, 0x8f, 0x50, 0xd3,
+	0xa2, 0x6d, 0x5c, 0x82, 0x58, 0xb4, 0x09, 0x0b, 0xa8, 0x89, 0xcb, 0xb0, 0x66, 0x44, 0xb7, 0x2d,
+	0x4f, 0xfa, 0x6d, 0xb1, 0x6c, 0xb7, 0x04, 0xa9, 0xf5, 0x76, 0x1c, 0xe6, 0xe3, 0x2a, 0xa4, 0xdd,
+	0x76, 0x68, 0x5b, 0xd6, 0x82, 0xc3, 0x4c, 0xc2, 0x30, 0x02, 0xee, 0xec, 0x1a, 0x66, 0xe0, 0xba,
+	0x46, 0x50, 0x45, 0xf6, 0x24, 0xb0, 0xa1, 0x4d, 0xa8, 0x49, 0xd8, 0xa2, 0xd1, 0x2d, 0xd2, 0xb1,
+	0xa8, 0x45, 0xb3, 0x6c, 0x75, 0x74, 0xcf, 0x08, 0x6c, 0xd8, 0x16, 0xc0, 0x04, 0x4a, 0x02, 0x1f,
+	0x8c, 0xd5, 0x7e, 0x8a, 0x4a, 0xd0, 0x95, 0xc0, 0xe8, 0x8e, 0xe3, 0x98, 0x73, 0x22, 0xae, 0x22,
+	0x6c, 0x38, 0xd4, 0xcc, 0x02, 0xdb, 0x83, 0x5f, 0xb1, 0x02, 0xc9, 0x11, 0x65, 0xa4, 0x67, 0x45,
+	0x04, 0x7b, 0x6a, 0x5a, 0x59, 0x20, 0x8b, 0x10, 0x69, 0x8b, 0xfa, 0x84, 0x31, 0xa7, 0x9d, 0x67,
+	0xbf, 0x8e, 0x96, 0x76, 0x03, 0x59, 0x63, 0x65, 0x28, 0x3a, 0x2f, 0xd8, 0xde, 0x85, 0xf2, 0x06,
+	0x42, 0x45, 0xbb, 0x87, 0x2a, 0xb2, 0xa7, 0x81, 0x1f, 0x34, 0xe8, 0x6c, 0x9f, 0xf4, 0xc3, 0xb3,
+	0x68, 0x3b, 0xb0, 0x75, 0x86, 0x15, 0x71, 0xe9, 0xb0, 0x03, 0x26, 0x4a, 0xae, 0x8a, 0x4a, 0x66,
+	0xa0, 0xdb, 0xb8, 0xa8, 0xf9, 0xa8, 0x22, 0xdb, 0x19, 0x48, 0x90, 0x97, 0x92, 0x39, 0x09, 0x35,
+	0x54, 0x6e, 0x59, 0xcc, 0xf3, 0x25, 0xdc, 0x23, 0xe0, 0x13, 0x2e, 0x00, 0xd9, 0xdf, 0xb1, 0x98,
+	0x89, 0x8b, 0xe0, 0xe8, 0xac, 0x60, 0xb2, 0x4b, 0x4d, 0x49, 0xbb, 0x8d, 0x2a, 0xb2, 0xbb, 0x09,
+	0xa9, 0xcc, 0x71, 0x17, 0xec, 0x02, 0x4b, 0x04, 0x4d, 0x86, 0x84, 0x3a, 0x7e, 0x2f, 0xdb, 0x17,
+	0xb4, 0x5d, 0x54, 0x9f, 0xeb, 0x65, 0xea, 0x8b, 0xe8, 0x39, 0x46, 0x0c, 0xcb, 0x65, 0x8e, 0x61,
+	0xf9, 0x9f, 0x2d, 0x9e, 0xa9, 0xfc, 0x07, 0x51, 0x5a, 0xe0, 0xbf, 0x43, 0x7b, 0x73, 0xb4, 0x82,
+	0x96, 0xa0, 0xb2, 0xe8, 0x63, 0x10, 0x57, 0x9f, 0xd0, 0x85, 0x33, 0xf9, 0x3c, 0xba, 0x32, 0x9f,
+	0x20, 0xf1, 0xb3, 0xf4, 0xb2, 0x15, 0xf8, 0x01, 0x23, 0x32, 0x48, 0xae, 0xee, 0xf9, 0xb8, 0x08,
+	0x49, 0x70, 0x19, 0xf1, 0xe4, 0x2d, 0x6c, 0x05, 0xd5, 0xa6, 0xbd, 0x00, 0x97, 0xe5, 0x85, 0x3e,
+	0xc8, 0xf7, 0x15, 0x6d, 0x1b, 0x95, 0x45, 0xe3, 0x03, 0xa5, 0x5d, 0xc7, 0x32, 0xc8, 0xa2, 0xe3,
+	0xba, 0x31, 0x6b, 0x02, 0x86, 0x9e, 0xf7, 0x84, 0x82, 0x50, 0xa1, 0xe7, 0xbd, 0xe4, 0x5f, 0x4b,
+	0x68, 0x75, 0xf1, 0xe6, 0xa3, 0xae, 0x23, 0xfc, 0x05, 0x0f, 0x07, 0xbd, 0x14, 0x2e, 0x74, 0xbd,
+	0x51, 0x34, 0xe0, 0x8f, 0xc5, 0x75, 0xa4, 0xcc, 0x56, 0x81, 0x2e, 0xee, 0x79, 0x16, 0x50, 0x55,
+	0x13, 0x95, 0x0f, 0xc2, 0x87, 0xfc, 0x20, 0xbb, 0x6c, 0x34, 0xcf, 0x7b, 0xb9, 0x6a, 0xda, 0x80,
+	0x62, 0x12, 0xac, 0xfd, 0xb3, 0x82, 0xca, 0x82, 0xf0, 0x8d, 0x9b, 0xab, 0xbe, 0xbd, 0xcd, 0x48,
+	0x17, 0x2b, 0xa2, 0x9b, 0xc2, 0xf9, 0x95, 0x05, 0xa1, 0x9b, 0x5d, 0xc3, 0x96, 0xad, 0x4b, 0x37,
+	0xbb, 0x1d, 0xc7, 0xc4, 0x25, 0x88, 0xa0, 0x0e, 0xab, 0xb2, 0x60, 0x70, 0x5d, 0x07, 0xce, 0x2d,
+	0x10, 0x7d, 0x9f, 0xe1, 0x25, 0xd1, 0xec, 0x83, 0x3d, 0xd9, 0xa4, 0xf4, 0x60, 0x0f, 0xfc, 0xc7,
+	0x35, 0xb5, 0x82, 0x0a, 0x86, 0x81, 0x11, 0x40, 0x0c, 0x21, 0xbe, 0x3e, 0x1d, 0x06, 0xa2, 0x83,
+	0x1b, 0x70, 0x04, 0xf0, 0x8a, 0x08, 0x20, 0x2c, 0x05, 0x6c, 0x55, 0x8e, 0x09, 0x17, 0x5f, 0xce,
+	0xe7, 0x05, 0x06, 0x06, 0xd3, 0xf2, 0x0c, 0x27, 0x60, 0x1e, 0xc1, 0x57, 0x44, 0xcd, 0x3b, 0xdb,
+	0xbb, 0x58, 0x85, 0x15, 0xd9, 0x73, 0x6d, 0xfc, 0x9c, 0xe8, 0xad, 0x0e, 0xf1, 0x1e, 0x58, 0xfe,
+	0x0e, 0xbe, 0x0a, 0x74, 0x0b, 0x38, 0x9e, 0x87, 0x55, 0x47, 0x67, 0xf7, 0xf1, 0x0b, 0x20, 0xad,
+	0xf3, 0x80, 0xe0, 0x17, 0xe5, 0xa2, 0x8b, 0x1b, 0x62, 0xf8, 0x90, 0x36, 0xfe, 0x1e, 0x18, 0x4a,
+	0x29, 0x5e, 0x03, 0x21, 0xd4, 0xcd, 0x7c, 0x7e, 0x09, 0x2c, 0xa4, 0xc2, 0xc2, 0x97, 0xc1, 0x00,
+	0x3a, 0xb5, 0xf0, 0x95, 0x7c, 0x6a, 0xbd, 0x2a, 0x5a, 0x88, 0x38, 0xab, 0xf8, 0x35, 0x98, 0x4c,
+	0x2e, 0xbe, 0x96, 0x75, 0x66, 0xdd, 0xd7, 0xf7, 0x2c, 0x0f, 0xbf, 0x2e, 0xab, 0x81, 0xf9, 0x20,
+	0x51, 0x13, 0x13, 0x4d, 0x04, 0xe2, 0x0d, 0x51, 0x92, 0x60, 0xe1, 0xf7, 0xe5, 0xca, 0xf3, 0xf0,
+	0x75, 0xc1, 0xeb, 0x78, 0x3e, 0xd8, 0x74, 0x23, 0xab, 0x54, 0xc1, 0xfd, 0xe6, 0x74, 0x43, 0x77,
+	0xf1, 0xba, 0x3c, 0x74, 0x04, 0x22, 0xf3, 0x96, 0x1c, 0x9b, 0xa4, 0x85, 0x6f, 0x66, 0x2b, 0x17,
+	0xbf, 0x2d, 0xb4, 0x30, 0x87, 0xda, 0xf8, 0x9d, 0x7c, 0x96, 0xbe, 0x0b, 0x1e, 0xba, 0x1e, 0x6e,
+	0x82, 0x87, 0x9f, 0x06, 0x3a, 0x15, 0xf6, 0x6c, 0x00, 0x27, 0x33, 0x60, 0xf9, 0x1e, 0xfc, 0x20,
+	0x96, 0x8c, 0xd8, 0x78, 0x53, 0xfc, 0x60, 0x32, 0xc7, 0xc5, 0x5b, 0x20, 0x02, 0x14, 0xbc, 0x0f,
+	0x36, 0x30, 0xd2, 0xa1, 0x3a, 0xf5, 0xf1, 0x0f, 0xe4, 0xa1, 0x05, 0x3f, 0xa9, 0x19, 0x74, 0xf0,
+	0x0f, 0x41, 0x3b, 0x73, 0x1c, 0x1f, 0xdf, 0x82, 0x95, 0x07, 0xc1, 0xf9, 0x40, 0xac, 0x82, 0x56,
+	0x0b, 0xdf, 0x86, 0x95, 0xd0, 0xf8, 0xa1, 0xe8, 0x37, 0x8e, 0x6b, 0x19, 0xf8, 0x8e, 0x98, 0xe9,
+	0x40, 0xfc, 0x68, 0x61, 0x06, 0xdd, 0x05, 0x96, 0x3d, 0xe1, 0xf6, 0x8f, 0x44, 0xa7, 0x0a, 0xc4,
+	0x98, 0xff, 0x58, 0x20, 0x2d, 0xdf, 0x26, 0xf8, 0x13, 0x39, 0x8a, 0xba, 0xee, 0x0e, 0xa0, 0xef,
+	0x65, 0x25, 0x07, 0x27, 0x10, 0xeb, 0xa2, 0x3a, 0x83, 0xbd, 0x6e, 0x17, 0x6f, 0xc3, 0xd2, 0x14,
+	0x5a, 0x0d, 0x60, 0x69, 0x39, 0x8c, 0x58, 0x6d, 0x8a, 0x4d, 0x08, 0xc5, 0xfd, 0x07, 0x98, 0x88,
+	0xe1, 0x62, 0x79, 0x3e, 0x6e, 0xc9, 0xeb, 0x48, 0xc7, 0xc0, 0x6d, 0x51, 0x00, 0x4e, 0x47, 0xd6,
+	0xe5, 0x0e, 0x0c, 0x83, 0x7c, 0x27, 0x12, 0x6f, 0x09, 0xce, 0xa0, 0x63, 0xe0, 0x5d, 0x08, 0x8b,
+	0xe1, 0xb8, 0xf8, 0x3e, 0x44, 0xc2, 0xb4, 0x3c, 0x31, 0xb7, 0x89, 0x89, 0x6d, 0xed, 0x4f, 0x0a,
+	0x5a, 0x59, 0xf8, 0xfe, 0x7c, 0xe6, 0x6f, 0xb6, 0x7b, 0x0b, 0x5f, 0xe5, 0xef, 0x9c, 0xf3, 0x73,
+	0x77, 0xee, 0xe3, 0x5c, 0x7b, 0x2f, 0xfb, 0x64, 0xc5, 0x68, 0x39, 0x7b, 0xc0, 0x38, 0xad, 0x71,
+	0x23, 0x54, 0x31, 0x9c, 0x4e, 0x07, 0xbe, 0x5a, 0xb5, 0x36, 0xaa, 0xe6, 0x56, 0xa8, 0x8d, 0xd9,
+	0x03, 0x8b, 0xfc, 0x40, 0x9e, 0x3e, 0xaf, 0xbc, 0x8e, 0x96, 0x1f, 0xf2, 0xe1, 0x28, 0xea, 0xc5,
+	0xfb, 0xfb, 0x09, 0x97, 0x1f, 0x43, 0x65, 0x56, 0x17, 0x34, 0x47, 0x90, 0xb4, 0xdf, 0x2b, 0xe8,
+	0x45, 0x3d, 0x0a, 0x0f, 0x8e, 0x7f, 0xc2, 0x67, 0x2f, 0x12, 0xfc, 0xc7, 0x47, 0x3c, 0x49, 0xd5,
+	0x8f, 0x51, 0x75, 0x90, 0x3d, 0xe8, 0x3c, 0x3d, 0x28, 0xf9, 0xd3, 0x0f, 0x9b, 0x62, 0xd4, 0xfb,
+	0x68, 0x85, 0x47, 0xfd, 0x78, 0x30, 0x8a, 0x86, 0xbd, 0xb9, 0x08, 0xdd, 0x78, 0x52, 0x84, 0x24,
+	0xbb, 0x88, 0xcd, 0x32, 0x9f, 0xdb, 0x69, 0x7f, 0x55, 0x50, 0xe3, 0x9b, 0x86, 0x26, 0xe3, 0x18,
+	0xc6, 0x0d, 0x43, 0x6a, 0xae, 0xb5, 0x37, 0x7b, 0x83, 0x51, 0xce, 0xff, 0x06, 0x73, 0x25, 0x87,
+	0xcf, 0x3e, 0x63, 0xe7, 0x5f, 0x9c, 0x0a, 0x8b, 0x2f, 0x4e, 0xea, 0x3d, 0xf9, 0xd4, 0xc3, 0xa3,
+	0x3e, 0x4f, 0xb2, 0xe7, 0x14, 0xed, 0xc9, 0x6a, 0x80, 0x95, 0xcd, 0x40, 0xda, 0xef, 0x14, 0xf4,
+	0x42, 0xe6, 0x8e, 0x28, 0x8b, 0x11, 0x4f, 0xbe, 0x93, 0x61, 0x4f, 0xa6, 0xe5, 0x31, 0x33, 0x33,
+	0x0b, 0xfa, 0x5d, 0x54, 0xe5, 0x19, 0xad, 0xa1, 0x88, 0x18, 0x5c, 0x7b, 0x5a, 0xed, 0xb3, 0x29,
+	0xe2, 0x49, 0xe1, 0xd5, 0x7e, 0xa3, 0xa0, 0xab, 0x79, 0xae, 0x8f, 0xa3, 0x34, 0x7c, 0xfc, 0x9d,
+	0x0c, 0xcd, 0x1f, 0x14, 0xf4, 0xfc, 0x09, 0x2b, 0xb3, 0xc8, 0x2c, 0x94, 0x87, 0xf2, 0x0c, 0xe5,
+	0xa1, 0x7e, 0x80, 0x2a, 0xe2, 0xde, 0x91, 0x34, 0x0a, 0x02, 0xfe, 0xda, 0x13, 0xba, 0x11, 0xf0,
+	0xb1, 0x8c, 0x7d, 0x21, 0xac, 0xc5, 0x13, 0x61, 0xfd, 0xba, 0x88, 0x9e, 0xd3, 0xe5, 0x4b, 0x2f,
+	0x87, 0xee, 0xf1, 0x6d, 0x45, 0x95, 0xa1, 0xea, 0x3e, 0x0f, 0xd3, 0xa3, 0x09, 0x4f, 0xb2, 0xe7,
+	0xaa, 0x5b, 0x67, 0xe3, 0x4f, 0x31, 0xa0, 0xd9, 0xca, 0xd0, 0x6c, 0x2a, 0xe7, 0x9b, 0x99, 0x2a,
+	0x3e, 0x7b, 0xa6, 0xd6, 0x7e, 0xa5, 0xa0, 0x6a, 0xae, 0x43, 0xbd, 0x8e, 0x56, 0xf9, 0xe3, 0x74,
+	0x12, 0xf6, 0xd3, 0x5e, 0x22, 0xd2, 0x26, 0x7c, 0xae, 0xb2, 0x95, 0x8c, 0x2a, 0x73, 0xa9, 0xbe,
+	0x85, 0x70, 0xce, 0x36, 0xad, 0xf2, 0x82, 0x60, 0xbc, 0x9c, 0xd1, 0xf3, 0x03, 0xa1, 0xde, 0x45,
+	0x6b, 0x39, 0xeb, 0x29, 0x5d, 0xa8, 0x28, 0x40, 0x8d, 0x8c, 0xc3, 0x3c, 0xd9, 0x67, 0xb4, 0xbf,
+	0x14, 0xa0, 0xd8, 0xe7, 0x83, 0xf2, 0xff, 0xaf, 0xa2, 0xf9, 0xa3, 0x5d, 0xbc, 0xf0, 0xd1, 0x3e,
+	0xbd, 0x1b, 0x97, 0xbe, 0xb5, 0x6e, 0x5c, 0x5e, 0xac, 0xeb, 0x9b, 0xb7, 0xd1, 0xf2, 0x7c, 0xf2,
+	0xe5, 0x3d, 0x81, 0x12, 0x7c, 0x09, 0x56, 0x81, 0xdf, 0xba, 0x2d, 0xaf, 0xce, 0x81, 0xdf, 0xda,
+	0xbc, 0x25, 0xaf, 0xce, 0x81, 0xdf, 0x7a, 0x7f, 0x0b, 0x17, 0xb7, 0xbe, 0x2a, 0xa3, 0xcb, 0x76,
+	0x26, 0xc6, 0x93, 0xff, 0x88, 0xa8, 0xbf, 0x55, 0x10, 0x3e, 0x39, 0x68, 0xd4, 0xcd, 0x27, 0x15,
+	0xf4, 0xa9, 0xd3, 0x73, 0x6d, 0xeb, 0x22, 0x10, 0x99, 0x72, 0xed, 0xad, 0x9f, 0xfd, 0xed, 0xef,
+	0x5f, 0x17, 0xde, 0xd0, 0x5e, 0xdd, 0x78, 0xb4, 0xb9, 0x91, 0x07, 0x21, 0xb9, 0x13, 0x9e, 0xe0,
+	0xbf, 0xa3, 0xdc, 0x54, 0x7f, 0xad, 0xa0, 0xcb, 0x27, 0x3a, 0xb3, 0xfa, 0xde, 0x53, 0x55, 0x9e,
+	0x98, 0x35, 0x6b, 0x9b, 0x17, 0x40, 0x64, 0x36, 0xae, 0x0b, 0x1b, 0x35, 0xed, 0x95, 0x53, 0x6d,
+	0xcc, 0xd9, 0xc1, 0xc4, 0x5f, 0x2a, 0x68, 0x65, 0xa1, 0x41, 0xaa, 0xcd, 0xa7, 0xc7, 0x64, 0xbe,
+	0xdf, 0xaf, 0x6d, 0x9c, 0x9b, 0x3f, 0x33, 0xee, 0x86, 0x30, 0xee, 0x9a, 0xf6, 0xd2, 0xe9, 0x01,
+	0x14, 0xcc, 0x60, 0xda, 0x2f, 0x14, 0xb4, 0x3c, 0x7f, 0xe8, 0xd4, 0x77, 0x2f, 0xd4, 0xb1, 0xd6,
+	0x9a, 0xe7, 0x65, 0xcf, 0xec, 0xba, 0x2e, 0xec, 0x7a, 0x4d, 0x5b, 0x3b, 0x69, 0xd7, 0x8c, 0xf7,
+	0x8e, 0x72, 0x73, 0xfb, 0x16, 0x7a, 0xb9, 0x1f, 0x1f, 0x9e, 0x29, 0x7b, 0xfb, 0xea, 0x89, 0x62,
+	0x75, 0x27, 0x71, 0x1a, 0xbb, 0xca, 0xc3, 0x8a, 0xf8, 0xd3, 0xee, 0xfd, 0xff, 0x05, 0x00, 0x00,
+	0xff, 0xff, 0xde, 0x74, 0x83, 0x80, 0x6b, 0x1c, 0x00, 0x00,
+}
diff --git a/googleapis/cloud/language/v1/language_service.proto b/googleapis/cloud/language/v1/language_service.proto
new file mode 100644
index 0000000000000000000000000000000000000000..d83aaf4b9300307c54df1891e7d2f379bb5e9f4b
--- /dev/null
+++ b/googleapis/cloud/language/v1/language_service.proto
@@ -0,0 +1,950 @@
+// Copyright 2016 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.cloud.language.v1;
+
+import "google.golang.org/genproto/googleapis/api/serviceconfig/annotations.proto"; // from google/api/annotations.proto
+
+option java_multiple_files = true;
+option java_outer_classname = "LanguageServiceProto";
+option java_package = "com.google.cloud.language.v1";
+
+
+// Provides text analysis operations such as sentiment analysis and entity
+// recognition.
+service LanguageService {
+  // Analyzes the sentiment of the provided text.
+  rpc AnalyzeSentiment(AnalyzeSentimentRequest) returns (AnalyzeSentimentResponse) {
+    option (google.api.http) = { post: "/v1/documents:analyzeSentiment" body: "*" };
+  }
+
+  // Finds named entities (currently finds proper names) in the text,
+  // entity types, salience, mentions for each entity, and other properties.
+  rpc AnalyzeEntities(AnalyzeEntitiesRequest) returns (AnalyzeEntitiesResponse) {
+    option (google.api.http) = { post: "/v1/documents:analyzeEntities" body: "*" };
+  }
+
+  // Analyzes the syntax of the text and provides sentence boundaries and
+  // tokenization along with part of speech tags, dependency trees, and other
+  // properties.
+  rpc AnalyzeSyntax(AnalyzeSyntaxRequest) returns (AnalyzeSyntaxResponse) {
+    option (google.api.http) = { post: "/v1/documents:analyzeSyntax" body: "*" };
+  }
+
+  // A convenience method that provides all the features that analyzeSentiment,
+  // analyzeEntities, and analyzeSyntax provide in one call.
+  rpc AnnotateText(AnnotateTextRequest) returns (AnnotateTextResponse) {
+    option (google.api.http) = { post: "/v1/documents:annotateText" body: "*" };
+  }
+}
+
+// ################################################################ #
+//
+// Represents the input to API methods.
+message Document {
+  // The document types enum.
+  enum Type {
+    // The content type is not specified.
+    TYPE_UNSPECIFIED = 0;
+
+    // Plain text
+    PLAIN_TEXT = 1;
+
+    // HTML
+    HTML = 2;
+  }
+
+  // Required. If the type is not set or is `TYPE_UNSPECIFIED`,
+  // returns an `INVALID_ARGUMENT` error.
+  Type type = 1;
+
+  // The source of the document: a string containing the content or a
+  // Google Cloud Storage URI.
+  oneof source {
+    // The content of the input in string format.
+    string content = 2;
+
+    // The Google Cloud Storage URI where the file content is located.
+    // This URI must be of the form: gs://bucket_name/object_name. For more
+    // details, see https://cloud.google.com/storage/docs/reference-uris.
+    // NOTE: Cloud Storage object versioning is not supported.
+    string gcs_content_uri = 3;
+  }
+
+  // The language of the document (if not specified, the language is
+  // automatically detected). Both ISO and BCP-47 language codes are
+  // accepted.<br>
+  // **Current Language Restrictions:**
+  //
+  //  * Only English, Spanish, and Japanese textual content
+  //    are supported, with the following additional restriction:
+  //    * `analyzeSentiment` only supports English text.
+  // If the language (either specified by the caller or automatically detected)
+  // is not supported by the called API method, an `INVALID_ARGUMENT` error
+  // is returned.
+  string language = 4;
+}
+
+// Represents a sentence in the input document.
+message Sentence {
+  // The sentence text.
+  TextSpan text = 1;
+
+  // For calls to [AnalyzeSentiment][] or if
+  // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment] is set to
+  // true, this field will contain the sentiment for the sentence.
+  Sentiment sentiment = 2;
+}
+
+// Represents a phrase in the text that is a known entity, such as
+// a person, an organization, or location. The API associates information, such
+// as salience and mentions, with entities.
+message Entity {
+  // The type of the entity.
+  enum Type {
+    // Unknown
+    UNKNOWN = 0;
+
+    // Person
+    PERSON = 1;
+
+    // Location
+    LOCATION = 2;
+
+    // Organization
+    ORGANIZATION = 3;
+
+    // Event
+    EVENT = 4;
+
+    // Work of art
+    WORK_OF_ART = 5;
+
+    // Consumer goods
+    CONSUMER_GOOD = 6;
+
+    // Other types
+    OTHER = 7;
+  }
+
+  // The representative name for the entity.
+  string name = 1;
+
+  // The entity type.
+  Type type = 2;
+
+  // Metadata associated with the entity.
+  //
+  // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
+  // available. The associated keys are "wikipedia_url" and "mid", respectively.
+  map<string, string> metadata = 3;
+
+  // The salience score associated with the entity in the [0, 1.0] range.
+  //
+  // The salience score for an entity provides information about the
+  // importance or centrality of that entity to the entire document text.
+  // Scores closer to 0 are less salient, while scores closer to 1.0 are highly
+  // salient.
+  float salience = 4;
+
+  // The mentions of this entity in the input document. The API currently
+  // supports proper noun mentions.
+  repeated EntityMention mentions = 5;
+}
+
+// Represents the smallest syntactic building block of the text.
+message Token {
+  // The token text.
+  TextSpan text = 1;
+
+  // Parts of speech tag for this token.
+  PartOfSpeech part_of_speech = 2;
+
+  // Dependency tree parse for this token.
+  DependencyEdge dependency_edge = 3;
+
+  // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
+  string lemma = 4;
+}
+
+// Represents the feeling associated with the entire text or entities in
+// the text.
+message Sentiment {
+  // A non-negative number in the [0, +inf) range, which represents
+  // the absolute magnitude of sentiment regardless of score (positive or
+  // negative).
+  float magnitude = 2;
+
+  // Sentiment score between -1.0 (negative sentiment) and 1.0
+  // (positive sentiment.)
+  float score = 3;
+}
+
+// Represents part of speech information for a token. Parts of speech
+// are as defined in
+// http://www.lrec-conf.org/proceedings/lrec2012/pdf/274_Paper.pdf
+message PartOfSpeech {
+  // The part of speech tags enum.
+  enum Tag {
+    // Unknown
+    UNKNOWN = 0;
+
+    // Adjective
+    ADJ = 1;
+
+    // Adposition (preposition and postposition)
+    ADP = 2;
+
+    // Adverb
+    ADV = 3;
+
+    // Conjunction
+    CONJ = 4;
+
+    // Determiner
+    DET = 5;
+
+    // Noun (common and proper)
+    NOUN = 6;
+
+    // Cardinal number
+    NUM = 7;
+
+    // Pronoun
+    PRON = 8;
+
+    // Particle or other function word
+    PRT = 9;
+
+    // Punctuation
+    PUNCT = 10;
+
+    // Verb (all tenses and modes)
+    VERB = 11;
+
+    // Other: foreign words, typos, abbreviations
+    X = 12;
+
+    // Affix
+    AFFIX = 13;
+  }
+
+  // The characteristic of a verb that expresses time flow during an event.
+  enum Aspect {
+    // Aspect is not applicable in the analyzed language or is not predicted.
+    ASPECT_UNKNOWN = 0;
+
+    // Perfective
+    PERFECTIVE = 1;
+
+    // Imperfective
+    IMPERFECTIVE = 2;
+
+    // Progressive
+    PROGRESSIVE = 3;
+  }
+
+  // The grammatical function performed by a noun or pronoun in a phrase,
+  // clause, or sentence. In some languages, other parts of speech, such as
+  // adjective and determiner, take case inflection in agreement with the noun.
+  enum Case {
+    // Case is not applicable in the analyzed language or is not predicted.
+    CASE_UNKNOWN = 0;
+
+    // Accusative
+    ACCUSATIVE = 1;
+
+    // Adverbial
+    ADVERBIAL = 2;
+
+    // Complementive
+    COMPLEMENTIVE = 3;
+
+    // Dative
+    DATIVE = 4;
+
+    // Genitive
+    GENITIVE = 5;
+
+    // Instrumental
+    INSTRUMENTAL = 6;
+
+    // Locative
+    LOCATIVE = 7;
+
+    // Nominative
+    NOMINATIVE = 8;
+
+    // Oblique
+    OBLIQUE = 9;
+
+    // Partitive
+    PARTITIVE = 10;
+
+    // Prepositional
+    PREPOSITIONAL = 11;
+
+    // Reflexive
+    REFLEXIVE_CASE = 12;
+
+    // Relative
+    RELATIVE_CASE = 13;
+
+    // Vocative
+    VOCATIVE = 14;
+  }
+
+  // Depending on the language, Form can be categorizing different forms of
+  // verbs, adjectives, adverbs, etc. For example, categorizing inflected
+  // endings of verbs and adjectives or distinguishing between short and long
+  // forms of adjectives and participles
+  enum Form {
+    // Form is not applicable in the analyzed language or is not predicted.
+    FORM_UNKNOWN = 0;
+
+    // Adnomial
+    ADNOMIAL = 1;
+
+    // Auxiliary
+    AUXILIARY = 2;
+
+    // Complementizer
+    COMPLEMENTIZER = 3;
+
+    // Final ending
+    FINAL_ENDING = 4;
+
+    // Gerund
+    GERUND = 5;
+
+    // Realis
+    REALIS = 6;
+
+    // Irrealis
+    IRREALIS = 7;
+
+    // Short form
+    SHORT = 8;
+
+    // Long form
+    LONG = 9;
+
+    // Order form
+    ORDER = 10;
+
+    // Specific form
+    SPECIFIC = 11;
+  }
+
+  // Gender classes of nouns reflected in the behaviour of associated words.
+  enum Gender {
+    // Gender is not applicable in the analyzed language or is not predicted.
+    GENDER_UNKNOWN = 0;
+
+    // Feminine
+    FEMININE = 1;
+
+    // Masculine
+    MASCULINE = 2;
+
+    // Neuter
+    NEUTER = 3;
+  }
+
+  // The grammatical feature of verbs, used for showing modality and attitude.
+  enum Mood {
+    // Mood is not applicable in the analyzed language or is not predicted.
+    MOOD_UNKNOWN = 0;
+
+    // Conditional
+    CONDITIONAL_MOOD = 1;
+
+    // Imperative
+    IMPERATIVE = 2;
+
+    // Indicative
+    INDICATIVE = 3;
+
+    // Interrogative
+    INTERROGATIVE = 4;
+
+    // Jussive
+    JUSSIVE = 5;
+
+    // Subjunctive
+    SUBJUNCTIVE = 6;
+  }
+
+  // Count distinctions.
+  enum Number {
+    // Number is not applicable in the analyzed language or is not predicted.
+    NUMBER_UNKNOWN = 0;
+
+    // Singular
+    SINGULAR = 1;
+
+    // Plural
+    PLURAL = 2;
+
+    // Dual
+    DUAL = 3;
+  }
+
+  // The distinction between the speaker, second person, third person, etc.
+  enum Person {
+    // Person is not applicable in the analyzed language or is not predicted.
+    PERSON_UNKNOWN = 0;
+
+    // First
+    FIRST = 1;
+
+    // Second
+    SECOND = 2;
+
+    // Third
+    THIRD = 3;
+
+    // Reflexive
+    REFLEXIVE_PERSON = 4;
+  }
+
+  // This category shows if the token is part of a proper name.
+  enum Proper {
+    // Proper is not applicable in the analyzed language or is not predicted.
+    PROPER_UNKNOWN = 0;
+
+    // Proper
+    PROPER = 1;
+
+    // Not proper
+    NOT_PROPER = 2;
+  }
+
+  // Reciprocal features of a pronoun.
+  enum Reciprocity {
+    // Reciprocity is not applicable in the analyzed language or is not
+    // predicted.
+    RECIPROCITY_UNKNOWN = 0;
+
+    // Reciprocal
+    RECIPROCAL = 1;
+
+    // Non-reciprocal
+    NON_RECIPROCAL = 2;
+  }
+
+  // Time reference.
+  enum Tense {
+    // Tense is not applicable in the analyzed language or is not predicted.
+    TENSE_UNKNOWN = 0;
+
+    // Conditional
+    CONDITIONAL_TENSE = 1;
+
+    // Future
+    FUTURE = 2;
+
+    // Past
+    PAST = 3;
+
+    // Present
+    PRESENT = 4;
+
+    // Imperfect
+    IMPERFECT = 5;
+
+    // Pluperfect
+    PLUPERFECT = 6;
+  }
+
+  // The relationship between the action that a verb expresses and the
+  // participants identified by its arguments.
+  enum Voice {
+    // Voice is not applicable in the analyzed language or is not predicted.
+    VOICE_UNKNOWN = 0;
+
+    // Active
+    ACTIVE = 1;
+
+    // Causative
+    CAUSATIVE = 2;
+
+    // Passive
+    PASSIVE = 3;
+  }
+
+  // The part of speech tag.
+  Tag tag = 1;
+
+  // The grammatical aspect.
+  Aspect aspect = 2;
+
+  // The grammatical case.
+  Case case = 3;
+
+  // The grammatical form.
+  Form form = 4;
+
+  // The grammatical gender.
+  Gender gender = 5;
+
+  // The grammatical mood.
+  Mood mood = 6;
+
+  // The grammatical number.
+  Number number = 7;
+
+  // The grammatical person.
+  Person person = 8;
+
+  // The grammatical properness.
+  Proper proper = 9;
+
+  // The grammatical reciprocity.
+  Reciprocity reciprocity = 10;
+
+  // The grammatical tense.
+  Tense tense = 11;
+
+  // The grammatical voice.
+  Voice voice = 12;
+}
+
+// Represents dependency parse tree information for a token. (For more
+// information on dependency labels, see
+// http://www.aclweb.org/anthology/P13-2017
+message DependencyEdge {
+  // The parse label enum for the token.
+  enum Label {
+    // Unknown
+    UNKNOWN = 0;
+
+    // Abbreviation modifier
+    ABBREV = 1;
+
+    // Adjectival complement
+    ACOMP = 2;
+
+    // Adverbial clause modifier
+    ADVCL = 3;
+
+    // Adverbial modifier
+    ADVMOD = 4;
+
+    // Adjectival modifier of an NP
+    AMOD = 5;
+
+    // Appositional modifier of an NP
+    APPOS = 6;
+
+    // Attribute dependent of a copular verb
+    ATTR = 7;
+
+    // Auxiliary (non-main) verb
+    AUX = 8;
+
+    // Passive auxiliary
+    AUXPASS = 9;
+
+    // Coordinating conjunction
+    CC = 10;
+
+    // Clausal complement of a verb or adjective
+    CCOMP = 11;
+
+    // Conjunct
+    CONJ = 12;
+
+    // Clausal subject
+    CSUBJ = 13;
+
+    // Clausal passive subject
+    CSUBJPASS = 14;
+
+    // Dependency (unable to determine)
+    DEP = 15;
+
+    // Determiner
+    DET = 16;
+
+    // Discourse
+    DISCOURSE = 17;
+
+    // Direct object
+    DOBJ = 18;
+
+    // Expletive
+    EXPL = 19;
+
+    // Goes with (part of a word in a text not well edited)
+    GOESWITH = 20;
+
+    // Indirect object
+    IOBJ = 21;
+
+    // Marker (word introducing a subordinate clause)
+    MARK = 22;
+
+    // Multi-word expression
+    MWE = 23;
+
+    // Multi-word verbal expression
+    MWV = 24;
+
+    // Negation modifier
+    NEG = 25;
+
+    // Noun compound modifier
+    NN = 26;
+
+    // Noun phrase used as an adverbial modifier
+    NPADVMOD = 27;
+
+    // Nominal subject
+    NSUBJ = 28;
+
+    // Passive nominal subject
+    NSUBJPASS = 29;
+
+    // Numeric modifier of a noun
+    NUM = 30;
+
+    // Element of compound number
+    NUMBER = 31;
+
+    // Punctuation mark
+    P = 32;
+
+    // Parataxis relation
+    PARATAXIS = 33;
+
+    // Participial modifier
+    PARTMOD = 34;
+
+    // The complement of a preposition is a clause
+    PCOMP = 35;
+
+    // Object of a preposition
+    POBJ = 36;
+
+    // Possession modifier
+    POSS = 37;
+
+    // Postverbal negative particle
+    POSTNEG = 38;
+
+    // Predicate complement
+    PRECOMP = 39;
+
+    // Preconjunt
+    PRECONJ = 40;
+
+    // Predeterminer
+    PREDET = 41;
+
+    // Prefix
+    PREF = 42;
+
+    // Prepositional modifier
+    PREP = 43;
+
+    // The relationship between a verb and verbal morpheme
+    PRONL = 44;
+
+    // Particle
+    PRT = 45;
+
+    // Associative or possessive marker
+    PS = 46;
+
+    // Quantifier phrase modifier
+    QUANTMOD = 47;
+
+    // Relative clause modifier
+    RCMOD = 48;
+
+    // Complementizer in relative clause
+    RCMODREL = 49;
+
+    // Ellipsis without a preceding predicate
+    RDROP = 50;
+
+    // Referent
+    REF = 51;
+
+    // Remnant
+    REMNANT = 52;
+
+    // Reparandum
+    REPARANDUM = 53;
+
+    // Root
+    ROOT = 54;
+
+    // Suffix specifying a unit of number
+    SNUM = 55;
+
+    // Suffix
+    SUFF = 56;
+
+    // Temporal modifier
+    TMOD = 57;
+
+    // Topic marker
+    TOPIC = 58;
+
+    // Clause headed by an infinite form of the verb that modifies a noun
+    VMOD = 59;
+
+    // Vocative
+    VOCATIVE = 60;
+
+    // Open clausal complement
+    XCOMP = 61;
+
+    // Name suffix
+    SUFFIX = 62;
+
+    // Name title
+    TITLE = 63;
+
+    // Adverbial phrase modifier
+    ADVPHMOD = 64;
+
+    // Causative auxiliary
+    AUXCAUS = 65;
+
+    // Helper auxiliary
+    AUXVV = 66;
+
+    // Rentaishi (Prenominal modifier)
+    DTMOD = 67;
+
+    // Foreign words
+    FOREIGN = 68;
+
+    // Keyword
+    KW = 69;
+
+    // List for chains of comparable items
+    LIST = 70;
+
+    // Nominalized clause
+    NOMC = 71;
+
+    // Nominalized clausal subject
+    NOMCSUBJ = 72;
+
+    // Nominalized clausal passive
+    NOMCSUBJPASS = 73;
+
+    // Compound of numeric modifier
+    NUMC = 74;
+
+    // Copula
+    COP = 75;
+
+    // Dislocated relation (for fronted/topicalized elements)
+    DISLOCATED = 76;
+  }
+
+  // Represents the head of this token in the dependency tree.
+  // This is the index of the token which has an arc going to this token.
+  // The index is the position of the token in the array of tokens returned
+  // by the API method. If this token is a root token, then the
+  // `head_token_index` is its own index.
+  int32 head_token_index = 1;
+
+  // The parse label for the token.
+  Label label = 2;
+}
+
+// Represents a mention for an entity in the text. Currently, proper noun
+// mentions are supported.
+message EntityMention {
+  // The supported types of mentions.
+  enum Type {
+    // Unknown
+    TYPE_UNKNOWN = 0;
+
+    // Proper name
+    PROPER = 1;
+
+    // Common noun (or noun compound)
+    COMMON = 2;
+  }
+
+  // The mention text.
+  TextSpan text = 1;
+
+  // The type of the entity mention.
+  Type type = 2;
+}
+
+// Represents an output piece of text.
+message TextSpan {
+  // The content of the output text.
+  string content = 1;
+
+  // The API calculates the beginning offset of the content in the original
+  // document according to the [EncodingType][google.cloud.language.v1.EncodingType] specified in the API request.
+  int32 begin_offset = 2;
+}
+
+// The sentiment analysis request message.
+message AnalyzeSentimentRequest {
+  // Input document. Currently, `analyzeSentiment` only supports English text
+  // ([Document.language][google.cloud.language.v1.Document.language]="EN").
+  Document document = 1;
+
+  // The encoding type used by the API to calculate sentence offsets.
+  EncodingType encoding_type = 2;
+}
+
+// The sentiment analysis response message.
+message AnalyzeSentimentResponse {
+  // The overall sentiment of the input document.
+  Sentiment document_sentiment = 1;
+
+  // The language of the text, which will be the same as the language specified
+  // in the request or, if not specified, the automatically-detected language.
+  // See `Document.language` field for more details.
+  string language = 2;
+
+  // The sentiment for all the sentences in the document.
+  repeated Sentence sentences = 3;
+}
+
+// The entity analysis request message.
+message AnalyzeEntitiesRequest {
+  // Input document.
+  Document document = 1;
+
+  // The encoding type used by the API to calculate offsets.
+  EncodingType encoding_type = 2;
+}
+
+// The entity analysis response message.
+message AnalyzeEntitiesResponse {
+  // The recognized entities in the input document.
+  repeated Entity entities = 1;
+
+  // The language of the text, which will be the same as the language specified
+  // in the request or, if not specified, the automatically-detected language.
+  // See `Document.language` field for more details.
+  string language = 2;
+}
+
+// The syntax analysis request message.
+message AnalyzeSyntaxRequest {
+  // Input document.
+  Document document = 1;
+
+  // The encoding type used by the API to calculate offsets.
+  EncodingType encoding_type = 2;
+}
+
+// The syntax analysis response message.
+message AnalyzeSyntaxResponse {
+  // Sentences in the input document.
+  repeated Sentence sentences = 1;
+
+  // Tokens, along with their syntactic information, in the input document.
+  repeated Token tokens = 2;
+
+  // The language of the text, which will be the same as the language specified
+  // in the request or, if not specified, the automatically-detected language.
+  // See `Document.language` field for more details.
+  string language = 3;
+}
+
+// The request message for the text annotation API, which can perform multiple
+// analysis types (sentiment, entities, and syntax) in one call.
+message AnnotateTextRequest {
+  // All available features for sentiment, syntax, and semantic analysis.
+  // Setting each one to true will enable that specific analysis for the input.
+  message Features {
+    // Extract syntax information.
+    bool extract_syntax = 1;
+
+    // Extract entities.
+    bool extract_entities = 2;
+
+    // Extract document-level sentiment.
+    bool extract_document_sentiment = 3;
+  }
+
+  // Input document.
+  Document document = 1;
+
+  // The enabled features.
+  Features features = 2;
+
+  // The encoding type used by the API to calculate offsets.
+  EncodingType encoding_type = 3;
+}
+
+// The text annotations response message.
+message AnnotateTextResponse {
+  // Sentences in the input document. Populated if the user enables
+  // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax].
+  repeated Sentence sentences = 1;
+
+  // Tokens, along with their syntactic information, in the input document.
+  // Populated if the user enables
+  // [AnnotateTextRequest.Features.extract_syntax][google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax].
+  repeated Token tokens = 2;
+
+  // Entities, along with their semantic information, in the input document.
+  // Populated if the user enables
+  // [AnnotateTextRequest.Features.extract_entities][google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities].
+  repeated Entity entities = 3;
+
+  // The overall sentiment for the document. Populated if the user enables
+  // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment].
+  Sentiment document_sentiment = 4;
+
+  // The language of the text, which will be the same as the language specified
+  // in the request or, if not specified, the automatically-detected language.
+  // See `Document.language` field for more details.
+  string language = 5;
+}
+
+// Represents the text encoding that the caller uses to process the output.
+// Providing an `EncodingType` is recommended because the API provides the
+// beginning offsets for various outputs, such as tokens and mentions, and
+// languages that natively use different text encodings may access offsets
+// differently.
+enum EncodingType {
+  // If `EncodingType` is not specified, encoding-dependent information (such as
+  // `begin_offset`) will be set at `-1`.
+  NONE = 0;
+
+  // Encoding-dependent information (such as `begin_offset`) is calculated based
+  // on the UTF-8 encoding of the input. C++ and Go are examples of languages
+  // that use this encoding natively.
+  UTF8 = 1;
+
+  // Encoding-dependent information (such as `begin_offset`) is calculated based
+  // on the UTF-16 encoding of the input. Java and Javascript are examples of
+  // languages that use this encoding natively.
+  UTF16 = 2;
+
+  // Encoding-dependent information (such as `begin_offset`) is calculated based
+  // on the UTF-32 encoding of the input. Python is an example of a language
+  // that uses this encoding natively.
+  UTF32 = 3;
+}
diff --git a/googleapis/cloud/language/v1beta1/language_service.pb.go b/googleapis/cloud/language/v1beta1/language_service.pb.go
index 65361d4706f8efe2464308291929c3f58b4923fc..62a3c88f17d641d3053f5e18daec93e8f180e03b 100644
--- a/googleapis/cloud/language/v1beta1/language_service.pb.go
+++ b/googleapis/cloud/language/v1beta1/language_service.pb.go
@@ -3,7 +3,7 @@
 // DO NOT EDIT!
 
 /*
-Package v1beta1 is a generated protocol buffer package.
+Package google_cloud_language_v1beta1 is a generated protocol buffer package.
 
 It is generated from these files:
 	google.golang.org/genproto/googleapis/cloud/language/v1beta1/language_service.proto
@@ -22,10 +22,12 @@ It has these top-level messages:
 	AnalyzeSentimentResponse
 	AnalyzeEntitiesRequest
 	AnalyzeEntitiesResponse
+	AnalyzeSyntaxRequest
+	AnalyzeSyntaxResponse
 	AnnotateTextRequest
 	AnnotateTextResponse
 */
-package v1beta1 // import "google.golang.org/genproto/googleapis/cloud/language/v1beta1"
+package google_cloud_language_v1beta1 // import "google.golang.org/genproto/googleapis/cloud/language/v1beta1"
 
 import proto "github.com/golang/protobuf/proto"
 import fmt "fmt"
@@ -239,6 +241,461 @@ func (x PartOfSpeech_Tag) String() string {
 }
 func (PartOfSpeech_Tag) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 0} }
 
+// The characteristic of a verb that expresses time flow during an event.
+type PartOfSpeech_Aspect int32
+
+const (
+	// Aspect is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_ASPECT_UNKNOWN PartOfSpeech_Aspect = 0
+	// Perfective
+	PartOfSpeech_PERFECTIVE PartOfSpeech_Aspect = 1
+	// Imperfective
+	PartOfSpeech_IMPERFECTIVE PartOfSpeech_Aspect = 2
+	// Progressive
+	PartOfSpeech_PROGRESSIVE PartOfSpeech_Aspect = 3
+)
+
+var PartOfSpeech_Aspect_name = map[int32]string{
+	0: "ASPECT_UNKNOWN",
+	1: "PERFECTIVE",
+	2: "IMPERFECTIVE",
+	3: "PROGRESSIVE",
+}
+var PartOfSpeech_Aspect_value = map[string]int32{
+	"ASPECT_UNKNOWN": 0,
+	"PERFECTIVE":     1,
+	"IMPERFECTIVE":   2,
+	"PROGRESSIVE":    3,
+}
+
+func (x PartOfSpeech_Aspect) String() string {
+	return proto.EnumName(PartOfSpeech_Aspect_name, int32(x))
+}
+func (PartOfSpeech_Aspect) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 1} }
+
+// The grammatical function performed by a noun or pronoun in a phrase,
+// clause, or sentence. In some languages, other parts of speech, such as
+// adjective and determiner, take case inflection in agreement with the noun.
+type PartOfSpeech_Case int32
+
+const (
+	// Case is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_CASE_UNKNOWN PartOfSpeech_Case = 0
+	// Accusative
+	PartOfSpeech_ACCUSATIVE PartOfSpeech_Case = 1
+	// Adverbial
+	PartOfSpeech_ADVERBIAL PartOfSpeech_Case = 2
+	// Complementive
+	PartOfSpeech_COMPLEMENTIVE PartOfSpeech_Case = 3
+	// Dative
+	PartOfSpeech_DATIVE PartOfSpeech_Case = 4
+	// Genitive
+	PartOfSpeech_GENITIVE PartOfSpeech_Case = 5
+	// Instrumental
+	PartOfSpeech_INSTRUMENTAL PartOfSpeech_Case = 6
+	// Locative
+	PartOfSpeech_LOCATIVE PartOfSpeech_Case = 7
+	// Nominative
+	PartOfSpeech_NOMINATIVE PartOfSpeech_Case = 8
+	// Oblique
+	PartOfSpeech_OBLIQUE PartOfSpeech_Case = 9
+	// Partitive
+	PartOfSpeech_PARTITIVE PartOfSpeech_Case = 10
+	// Prepositional
+	PartOfSpeech_PREPOSITIONAL PartOfSpeech_Case = 11
+	// Reflexive
+	PartOfSpeech_REFLEXIVE_CASE PartOfSpeech_Case = 12
+	// Relative
+	PartOfSpeech_RELATIVE_CASE PartOfSpeech_Case = 13
+	// Vocative
+	PartOfSpeech_VOCATIVE PartOfSpeech_Case = 14
+)
+
+var PartOfSpeech_Case_name = map[int32]string{
+	0:  "CASE_UNKNOWN",
+	1:  "ACCUSATIVE",
+	2:  "ADVERBIAL",
+	3:  "COMPLEMENTIVE",
+	4:  "DATIVE",
+	5:  "GENITIVE",
+	6:  "INSTRUMENTAL",
+	7:  "LOCATIVE",
+	8:  "NOMINATIVE",
+	9:  "OBLIQUE",
+	10: "PARTITIVE",
+	11: "PREPOSITIONAL",
+	12: "REFLEXIVE_CASE",
+	13: "RELATIVE_CASE",
+	14: "VOCATIVE",
+}
+var PartOfSpeech_Case_value = map[string]int32{
+	"CASE_UNKNOWN":   0,
+	"ACCUSATIVE":     1,
+	"ADVERBIAL":      2,
+	"COMPLEMENTIVE":  3,
+	"DATIVE":         4,
+	"GENITIVE":       5,
+	"INSTRUMENTAL":   6,
+	"LOCATIVE":       7,
+	"NOMINATIVE":     8,
+	"OBLIQUE":        9,
+	"PARTITIVE":      10,
+	"PREPOSITIONAL":  11,
+	"REFLEXIVE_CASE": 12,
+	"RELATIVE_CASE":  13,
+	"VOCATIVE":       14,
+}
+
+func (x PartOfSpeech_Case) String() string {
+	return proto.EnumName(PartOfSpeech_Case_name, int32(x))
+}
+func (PartOfSpeech_Case) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 2} }
+
+// Depending on the language, Form can be categorizing different forms of
+// verbs, adjectives, adverbs, etc. For example, categorizing inflected
+// endings of verbs and adjectives or distinguishing between short and long
+// forms of adjectives and participles
+type PartOfSpeech_Form int32
+
+const (
+	// Form is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_FORM_UNKNOWN PartOfSpeech_Form = 0
+	// Adnomial
+	PartOfSpeech_ADNOMIAL PartOfSpeech_Form = 1
+	// Auxiliary
+	PartOfSpeech_AUXILIARY PartOfSpeech_Form = 2
+	// Complementizer
+	PartOfSpeech_COMPLEMENTIZER PartOfSpeech_Form = 3
+	// Final ending
+	PartOfSpeech_FINAL_ENDING PartOfSpeech_Form = 4
+	// Gerund
+	PartOfSpeech_GERUND PartOfSpeech_Form = 5
+	// Realis
+	PartOfSpeech_REALIS PartOfSpeech_Form = 6
+	// Irrealis
+	PartOfSpeech_IRREALIS PartOfSpeech_Form = 7
+	// Short form
+	PartOfSpeech_SHORT PartOfSpeech_Form = 8
+	// Long form
+	PartOfSpeech_LONG PartOfSpeech_Form = 9
+	// Order form
+	PartOfSpeech_ORDER PartOfSpeech_Form = 10
+	// Specific form
+	PartOfSpeech_SPECIFIC PartOfSpeech_Form = 11
+)
+
+var PartOfSpeech_Form_name = map[int32]string{
+	0:  "FORM_UNKNOWN",
+	1:  "ADNOMIAL",
+	2:  "AUXILIARY",
+	3:  "COMPLEMENTIZER",
+	4:  "FINAL_ENDING",
+	5:  "GERUND",
+	6:  "REALIS",
+	7:  "IRREALIS",
+	8:  "SHORT",
+	9:  "LONG",
+	10: "ORDER",
+	11: "SPECIFIC",
+}
+var PartOfSpeech_Form_value = map[string]int32{
+	"FORM_UNKNOWN":   0,
+	"ADNOMIAL":       1,
+	"AUXILIARY":      2,
+	"COMPLEMENTIZER": 3,
+	"FINAL_ENDING":   4,
+	"GERUND":         5,
+	"REALIS":         6,
+	"IRREALIS":       7,
+	"SHORT":          8,
+	"LONG":           9,
+	"ORDER":          10,
+	"SPECIFIC":       11,
+}
+
+func (x PartOfSpeech_Form) String() string {
+	return proto.EnumName(PartOfSpeech_Form_name, int32(x))
+}
+func (PartOfSpeech_Form) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 3} }
+
+// Gender classes of nouns reflected in the behaviour of associated words.
+type PartOfSpeech_Gender int32
+
+const (
+	// Gender is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_GENDER_UNKNOWN PartOfSpeech_Gender = 0
+	// Feminine
+	PartOfSpeech_FEMININE PartOfSpeech_Gender = 1
+	// Masculine
+	PartOfSpeech_MASCULINE PartOfSpeech_Gender = 2
+	// Neuter
+	PartOfSpeech_NEUTER PartOfSpeech_Gender = 3
+)
+
+var PartOfSpeech_Gender_name = map[int32]string{
+	0: "GENDER_UNKNOWN",
+	1: "FEMININE",
+	2: "MASCULINE",
+	3: "NEUTER",
+}
+var PartOfSpeech_Gender_value = map[string]int32{
+	"GENDER_UNKNOWN": 0,
+	"FEMININE":       1,
+	"MASCULINE":      2,
+	"NEUTER":         3,
+}
+
+func (x PartOfSpeech_Gender) String() string {
+	return proto.EnumName(PartOfSpeech_Gender_name, int32(x))
+}
+func (PartOfSpeech_Gender) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 4} }
+
+// The grammatical feature of verbs, used for showing modality and attitude.
+type PartOfSpeech_Mood int32
+
+const (
+	// Mood is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_MOOD_UNKNOWN PartOfSpeech_Mood = 0
+	// Conditional
+	PartOfSpeech_CONDITIONAL_MOOD PartOfSpeech_Mood = 1
+	// Imperative
+	PartOfSpeech_IMPERATIVE PartOfSpeech_Mood = 2
+	// Indicative
+	PartOfSpeech_INDICATIVE PartOfSpeech_Mood = 3
+	// Interrogative
+	PartOfSpeech_INTERROGATIVE PartOfSpeech_Mood = 4
+	// Jussive
+	PartOfSpeech_JUSSIVE PartOfSpeech_Mood = 5
+	// Subjunctive
+	PartOfSpeech_SUBJUNCTIVE PartOfSpeech_Mood = 6
+)
+
+var PartOfSpeech_Mood_name = map[int32]string{
+	0: "MOOD_UNKNOWN",
+	1: "CONDITIONAL_MOOD",
+	2: "IMPERATIVE",
+	3: "INDICATIVE",
+	4: "INTERROGATIVE",
+	5: "JUSSIVE",
+	6: "SUBJUNCTIVE",
+}
+var PartOfSpeech_Mood_value = map[string]int32{
+	"MOOD_UNKNOWN":     0,
+	"CONDITIONAL_MOOD": 1,
+	"IMPERATIVE":       2,
+	"INDICATIVE":       3,
+	"INTERROGATIVE":    4,
+	"JUSSIVE":          5,
+	"SUBJUNCTIVE":      6,
+}
+
+func (x PartOfSpeech_Mood) String() string {
+	return proto.EnumName(PartOfSpeech_Mood_name, int32(x))
+}
+func (PartOfSpeech_Mood) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 5} }
+
+// Count distinctions.
+type PartOfSpeech_Number int32
+
+const (
+	// Number is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_NUMBER_UNKNOWN PartOfSpeech_Number = 0
+	// Singular
+	PartOfSpeech_SINGULAR PartOfSpeech_Number = 1
+	// Plural
+	PartOfSpeech_PLURAL PartOfSpeech_Number = 2
+	// Dual
+	PartOfSpeech_DUAL PartOfSpeech_Number = 3
+)
+
+var PartOfSpeech_Number_name = map[int32]string{
+	0: "NUMBER_UNKNOWN",
+	1: "SINGULAR",
+	2: "PLURAL",
+	3: "DUAL",
+}
+var PartOfSpeech_Number_value = map[string]int32{
+	"NUMBER_UNKNOWN": 0,
+	"SINGULAR":       1,
+	"PLURAL":         2,
+	"DUAL":           3,
+}
+
+func (x PartOfSpeech_Number) String() string {
+	return proto.EnumName(PartOfSpeech_Number_name, int32(x))
+}
+func (PartOfSpeech_Number) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 6} }
+
+// The distinction between the speaker, second person, third person, etc.
+type PartOfSpeech_Person int32
+
+const (
+	// Person is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_PERSON_UNKNOWN PartOfSpeech_Person = 0
+	// First
+	PartOfSpeech_FIRST PartOfSpeech_Person = 1
+	// Second
+	PartOfSpeech_SECOND PartOfSpeech_Person = 2
+	// Third
+	PartOfSpeech_THIRD PartOfSpeech_Person = 3
+	// Reflexive
+	PartOfSpeech_REFLEXIVE_PERSON PartOfSpeech_Person = 4
+)
+
+var PartOfSpeech_Person_name = map[int32]string{
+	0: "PERSON_UNKNOWN",
+	1: "FIRST",
+	2: "SECOND",
+	3: "THIRD",
+	4: "REFLEXIVE_PERSON",
+}
+var PartOfSpeech_Person_value = map[string]int32{
+	"PERSON_UNKNOWN":   0,
+	"FIRST":            1,
+	"SECOND":           2,
+	"THIRD":            3,
+	"REFLEXIVE_PERSON": 4,
+}
+
+func (x PartOfSpeech_Person) String() string {
+	return proto.EnumName(PartOfSpeech_Person_name, int32(x))
+}
+func (PartOfSpeech_Person) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 7} }
+
+// This category shows if the token is part of a proper name.
+type PartOfSpeech_Proper int32
+
+const (
+	// Proper is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_PROPER_UNKNOWN PartOfSpeech_Proper = 0
+	// Proper
+	PartOfSpeech_PROPER PartOfSpeech_Proper = 1
+	// Not proper
+	PartOfSpeech_NOT_PROPER PartOfSpeech_Proper = 2
+)
+
+var PartOfSpeech_Proper_name = map[int32]string{
+	0: "PROPER_UNKNOWN",
+	1: "PROPER",
+	2: "NOT_PROPER",
+}
+var PartOfSpeech_Proper_value = map[string]int32{
+	"PROPER_UNKNOWN": 0,
+	"PROPER":         1,
+	"NOT_PROPER":     2,
+}
+
+func (x PartOfSpeech_Proper) String() string {
+	return proto.EnumName(PartOfSpeech_Proper_name, int32(x))
+}
+func (PartOfSpeech_Proper) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 8} }
+
+// Reciprocal features of a pronoun.
+type PartOfSpeech_Reciprocity int32
+
+const (
+	// Reciprocity is not applicable in the analyzed language or is not
+	// predicted.
+	PartOfSpeech_RECIPROCITY_UNKNOWN PartOfSpeech_Reciprocity = 0
+	// Reciprocal
+	PartOfSpeech_RECIPROCAL PartOfSpeech_Reciprocity = 1
+	// Non-reciprocal
+	PartOfSpeech_NON_RECIPROCAL PartOfSpeech_Reciprocity = 2
+)
+
+var PartOfSpeech_Reciprocity_name = map[int32]string{
+	0: "RECIPROCITY_UNKNOWN",
+	1: "RECIPROCAL",
+	2: "NON_RECIPROCAL",
+}
+var PartOfSpeech_Reciprocity_value = map[string]int32{
+	"RECIPROCITY_UNKNOWN": 0,
+	"RECIPROCAL":          1,
+	"NON_RECIPROCAL":      2,
+}
+
+func (x PartOfSpeech_Reciprocity) String() string {
+	return proto.EnumName(PartOfSpeech_Reciprocity_name, int32(x))
+}
+func (PartOfSpeech_Reciprocity) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 9} }
+
+// Time reference.
+type PartOfSpeech_Tense int32
+
+const (
+	// Tense is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_TENSE_UNKNOWN PartOfSpeech_Tense = 0
+	// Conditional
+	PartOfSpeech_CONDITIONAL_TENSE PartOfSpeech_Tense = 1
+	// Future
+	PartOfSpeech_FUTURE PartOfSpeech_Tense = 2
+	// Past
+	PartOfSpeech_PAST PartOfSpeech_Tense = 3
+	// Present
+	PartOfSpeech_PRESENT PartOfSpeech_Tense = 4
+	// Imperfect
+	PartOfSpeech_IMPERFECT PartOfSpeech_Tense = 5
+	// Pluperfect
+	PartOfSpeech_PLUPERFECT PartOfSpeech_Tense = 6
+)
+
+var PartOfSpeech_Tense_name = map[int32]string{
+	0: "TENSE_UNKNOWN",
+	1: "CONDITIONAL_TENSE",
+	2: "FUTURE",
+	3: "PAST",
+	4: "PRESENT",
+	5: "IMPERFECT",
+	6: "PLUPERFECT",
+}
+var PartOfSpeech_Tense_value = map[string]int32{
+	"TENSE_UNKNOWN":     0,
+	"CONDITIONAL_TENSE": 1,
+	"FUTURE":            2,
+	"PAST":              3,
+	"PRESENT":           4,
+	"IMPERFECT":         5,
+	"PLUPERFECT":        6,
+}
+
+func (x PartOfSpeech_Tense) String() string {
+	return proto.EnumName(PartOfSpeech_Tense_name, int32(x))
+}
+func (PartOfSpeech_Tense) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 10} }
+
+// The relationship between the action that a verb expresses and the
+// participants identified by its arguments.
+type PartOfSpeech_Voice int32
+
+const (
+	// Voice is not applicable in the analyzed language or is not predicted.
+	PartOfSpeech_VOICE_UNKNOWN PartOfSpeech_Voice = 0
+	// Active
+	PartOfSpeech_ACTIVE PartOfSpeech_Voice = 1
+	// Causative
+	PartOfSpeech_CAUSATIVE PartOfSpeech_Voice = 2
+	// Passive
+	PartOfSpeech_PASSIVE PartOfSpeech_Voice = 3
+)
+
+var PartOfSpeech_Voice_name = map[int32]string{
+	0: "VOICE_UNKNOWN",
+	1: "ACTIVE",
+	2: "CAUSATIVE",
+	3: "PASSIVE",
+}
+var PartOfSpeech_Voice_value = map[string]int32{
+	"VOICE_UNKNOWN": 0,
+	"ACTIVE":        1,
+	"CAUSATIVE":     2,
+	"PASSIVE":       3,
+}
+
+func (x PartOfSpeech_Voice) String() string {
+	return proto.EnumName(PartOfSpeech_Voice_name, int32(x))
+}
+func (PartOfSpeech_Voice) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{5, 11} }
+
 // The parse label enum for the token.
 type DependencyEdge_Label int32
 
@@ -563,6 +1020,34 @@ func (x DependencyEdge_Label) String() string {
 }
 func (DependencyEdge_Label) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{6, 0} }
 
+// The supported types of mentions.
+type EntityMention_Type int32
+
+const (
+	// Unknown
+	EntityMention_TYPE_UNKNOWN EntityMention_Type = 0
+	// Proper name
+	EntityMention_PROPER EntityMention_Type = 1
+	// Common noun (or noun compound)
+	EntityMention_COMMON EntityMention_Type = 2
+)
+
+var EntityMention_Type_name = map[int32]string{
+	0: "TYPE_UNKNOWN",
+	1: "PROPER",
+	2: "COMMON",
+}
+var EntityMention_Type_value = map[string]int32{
+	"TYPE_UNKNOWN": 0,
+	"PROPER":       1,
+	"COMMON":       2,
+}
+
+func (x EntityMention_Type) String() string {
+	return proto.EnumName(EntityMention_Type_name, int32(x))
+}
+func (EntityMention_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{7, 0} }
+
 // ################################################################ #
 //
 // Represents the input to API methods.
@@ -701,6 +1186,10 @@ func _Document_OneofSizer(msg proto.Message) (n int) {
 type Sentence struct {
 	// The sentence text.
 	Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+	// For calls to [AnalyzeSentiment][] or if
+	// [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment] is set to
+	// true, this field will contain the sentiment for the sentence.
+	Sentiment *Sentiment `protobuf:"bytes,2,opt,name=sentiment" json:"sentiment,omitempty"`
 }
 
 func (m *Sentence) Reset()                    { *m = Sentence{} }
@@ -715,6 +1204,13 @@ func (m *Sentence) GetText() *TextSpan {
 	return nil
 }
 
+func (m *Sentence) GetSentiment() *Sentiment {
+	if m != nil {
+		return m.Sentiment
+	}
+	return nil
+}
+
 // Represents a phrase in the text that is a known entity, such as
 // a person, an organization, or location. The API associates information, such
 // as salience and mentions, with entities.
@@ -725,8 +1221,8 @@ type Entity struct {
 	Type Entity_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1beta1.Entity_Type" json:"type,omitempty"`
 	// Metadata associated with the entity.
 	//
-	// Currently, only Wikipedia URLs are provided, if available.
-	// The associated key is "wikipedia_url".
+	// Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
+	// available. The associated keys are "wikipedia_url" and "mid", respectively.
 	Metadata map[string]string `protobuf:"bytes,3,rep,name=metadata" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
 	// The salience score associated with the entity in the [0, 1.0] range.
 	//
@@ -767,8 +1263,7 @@ type Token struct {
 	PartOfSpeech *PartOfSpeech `protobuf:"bytes,2,opt,name=part_of_speech,json=partOfSpeech" json:"part_of_speech,omitempty"`
 	// Dependency tree parse for this token.
 	DependencyEdge *DependencyEdge `protobuf:"bytes,3,opt,name=dependency_edge,json=dependencyEdge" json:"dependency_edge,omitempty"`
-	// [Lemma](https://en.wikipedia.org/wiki/Lemma_(morphology))
-	// of the token.
+	// [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
 	Lemma string `protobuf:"bytes,4,opt,name=lemma" json:"lemma,omitempty"`
 }
 
@@ -801,13 +1296,17 @@ func (m *Token) GetDependencyEdge() *DependencyEdge {
 // Represents the feeling associated with the entire text or entities in
 // the text.
 type Sentiment struct {
-	// Polarity of the sentiment in the [-1.0, 1.0] range. Larger numbers
-	// represent more positive sentiments.
+	// DEPRECATED FIELD - This field is being deprecated in
+	// favor of score. Please refer to our documentation at
+	// https://cloud.google.com/natural-language/docs for more information.
 	Polarity float32 `protobuf:"fixed32,1,opt,name=polarity" json:"polarity,omitempty"`
 	// A non-negative number in the [0, +inf) range, which represents
-	// the absolute magnitude of sentiment regardless of polarity (positive or
+	// the absolute magnitude of sentiment regardless of score (positive or
 	// negative).
 	Magnitude float32 `protobuf:"fixed32,2,opt,name=magnitude" json:"magnitude,omitempty"`
+	// Sentiment score between -1.0 (negative sentiment) and 1.0
+	// (positive sentiment.)
+	Score float32 `protobuf:"fixed32,3,opt,name=score" json:"score,omitempty"`
 }
 
 func (m *Sentiment) Reset()                    { *m = Sentiment{} }
@@ -819,6 +1318,28 @@ func (*Sentiment) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4
 type PartOfSpeech struct {
 	// The part of speech tag.
 	Tag PartOfSpeech_Tag `protobuf:"varint,1,opt,name=tag,enum=google.cloud.language.v1beta1.PartOfSpeech_Tag" json:"tag,omitempty"`
+	// The grammatical aspect.
+	Aspect PartOfSpeech_Aspect `protobuf:"varint,2,opt,name=aspect,enum=google.cloud.language.v1beta1.PartOfSpeech_Aspect" json:"aspect,omitempty"`
+	// The grammatical case.
+	Case PartOfSpeech_Case `protobuf:"varint,3,opt,name=case,enum=google.cloud.language.v1beta1.PartOfSpeech_Case" json:"case,omitempty"`
+	// The grammatical form.
+	Form PartOfSpeech_Form `protobuf:"varint,4,opt,name=form,enum=google.cloud.language.v1beta1.PartOfSpeech_Form" json:"form,omitempty"`
+	// The grammatical gender.
+	Gender PartOfSpeech_Gender `protobuf:"varint,5,opt,name=gender,enum=google.cloud.language.v1beta1.PartOfSpeech_Gender" json:"gender,omitempty"`
+	// The grammatical mood.
+	Mood PartOfSpeech_Mood `protobuf:"varint,6,opt,name=mood,enum=google.cloud.language.v1beta1.PartOfSpeech_Mood" json:"mood,omitempty"`
+	// The grammatical number.
+	Number PartOfSpeech_Number `protobuf:"varint,7,opt,name=number,enum=google.cloud.language.v1beta1.PartOfSpeech_Number" json:"number,omitempty"`
+	// The grammatical person.
+	Person PartOfSpeech_Person `protobuf:"varint,8,opt,name=person,enum=google.cloud.language.v1beta1.PartOfSpeech_Person" json:"person,omitempty"`
+	// The grammatical properness.
+	Proper PartOfSpeech_Proper `protobuf:"varint,9,opt,name=proper,enum=google.cloud.language.v1beta1.PartOfSpeech_Proper" json:"proper,omitempty"`
+	// The grammatical reciprocity.
+	Reciprocity PartOfSpeech_Reciprocity `protobuf:"varint,10,opt,name=reciprocity,enum=google.cloud.language.v1beta1.PartOfSpeech_Reciprocity" json:"reciprocity,omitempty"`
+	// The grammatical tense.
+	Tense PartOfSpeech_Tense `protobuf:"varint,11,opt,name=tense,enum=google.cloud.language.v1beta1.PartOfSpeech_Tense" json:"tense,omitempty"`
+	// The grammatical voice.
+	Voice PartOfSpeech_Voice `protobuf:"varint,12,opt,name=voice,enum=google.cloud.language.v1beta1.PartOfSpeech_Voice" json:"voice,omitempty"`
 }
 
 func (m *PartOfSpeech) Reset()                    { *m = PartOfSpeech{} }
@@ -848,6 +1369,8 @@ func (*DependencyEdge) Descriptor() ([]byte, []int) { return fileDescriptor0, []
 type EntityMention struct {
 	// The mention text.
 	Text *TextSpan `protobuf:"bytes,1,opt,name=text" json:"text,omitempty"`
+	// The type of the entity mention.
+	Type EntityMention_Type `protobuf:"varint,2,opt,name=type,enum=google.cloud.language.v1beta1.EntityMention_Type" json:"type,omitempty"`
 }
 
 func (m *EntityMention) Reset()                    { *m = EntityMention{} }
@@ -881,6 +1404,9 @@ type AnalyzeSentimentRequest struct {
 	// Input document. Currently, `analyzeSentiment` only supports English text
 	// ([Document.language][google.cloud.language.v1beta1.Document.language]="EN").
 	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The encoding type used by the API to calculate sentence offsets for the
+	// sentence sentiment.
+	EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"`
 }
 
 func (m *AnalyzeSentimentRequest) Reset()                    { *m = AnalyzeSentimentRequest{} }
@@ -902,6 +1428,8 @@ type AnalyzeSentimentResponse struct {
 	// The language of the text, which will be the same as the language specified
 	// in the request or, if not specified, the automatically-detected language.
 	Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
+	// The sentiment for all the sentences in the document.
+	Sentences []*Sentence `protobuf:"bytes,3,rep,name=sentences" json:"sentences,omitempty"`
 }
 
 func (m *AnalyzeSentimentResponse) Reset()                    { *m = AnalyzeSentimentResponse{} }
@@ -916,6 +1444,13 @@ func (m *AnalyzeSentimentResponse) GetDocumentSentiment() *Sentiment {
 	return nil
 }
 
+func (m *AnalyzeSentimentResponse) GetSentences() []*Sentence {
+	if m != nil {
+		return m.Sentences
+	}
+	return nil
+}
+
 // The entity analysis request message.
 type AnalyzeEntitiesRequest struct {
 	// Input document.
@@ -942,6 +1477,7 @@ type AnalyzeEntitiesResponse struct {
 	Entities []*Entity `protobuf:"bytes,1,rep,name=entities" json:"entities,omitempty"`
 	// The language of the text, which will be the same as the language specified
 	// in the request or, if not specified, the automatically-detected language.
+	// See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
 	Language string `protobuf:"bytes,2,opt,name=language" json:"language,omitempty"`
 }
 
@@ -957,8 +1493,59 @@ func (m *AnalyzeEntitiesResponse) GetEntities() []*Entity {
 	return nil
 }
 
-// The request message for the advanced text annotation API, which performs all
-// the above plus syntactic analysis.
+// The syntax analysis request message.
+type AnalyzeSyntaxRequest struct {
+	// Input document.
+	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
+	// The encoding type used by the API to calculate offsets.
+	EncodingType EncodingType `protobuf:"varint,2,opt,name=encoding_type,json=encodingType,enum=google.cloud.language.v1beta1.EncodingType" json:"encoding_type,omitempty"`
+}
+
+func (m *AnalyzeSyntaxRequest) Reset()                    { *m = AnalyzeSyntaxRequest{} }
+func (m *AnalyzeSyntaxRequest) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxRequest) ProtoMessage()               {}
+func (*AnalyzeSyntaxRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
+
+func (m *AnalyzeSyntaxRequest) GetDocument() *Document {
+	if m != nil {
+		return m.Document
+	}
+	return nil
+}
+
+// The syntax analysis response message.
+type AnalyzeSyntaxResponse struct {
+	// Sentences in the input document.
+	Sentences []*Sentence `protobuf:"bytes,1,rep,name=sentences" json:"sentences,omitempty"`
+	// Tokens, along with their syntactic information, in the input document.
+	Tokens []*Token `protobuf:"bytes,2,rep,name=tokens" json:"tokens,omitempty"`
+	// The language of the text, which will be the same as the language specified
+	// in the request or, if not specified, the automatically-detected language.
+	// See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
+	Language string `protobuf:"bytes,3,opt,name=language" json:"language,omitempty"`
+}
+
+func (m *AnalyzeSyntaxResponse) Reset()                    { *m = AnalyzeSyntaxResponse{} }
+func (m *AnalyzeSyntaxResponse) String() string            { return proto.CompactTextString(m) }
+func (*AnalyzeSyntaxResponse) ProtoMessage()               {}
+func (*AnalyzeSyntaxResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
+
+func (m *AnalyzeSyntaxResponse) GetSentences() []*Sentence {
+	if m != nil {
+		return m.Sentences
+	}
+	return nil
+}
+
+func (m *AnalyzeSyntaxResponse) GetTokens() []*Token {
+	if m != nil {
+		return m.Tokens
+	}
+	return nil
+}
+
+// The request message for the text annotation API, which can perform multiple
+// analysis types (sentiment, entities, and syntax) in one call.
 type AnnotateTextRequest struct {
 	// Input document.
 	Document *Document `protobuf:"bytes,1,opt,name=document" json:"document,omitempty"`
@@ -971,7 +1558,7 @@ type AnnotateTextRequest struct {
 func (m *AnnotateTextRequest) Reset()                    { *m = AnnotateTextRequest{} }
 func (m *AnnotateTextRequest) String() string            { return proto.CompactTextString(m) }
 func (*AnnotateTextRequest) ProtoMessage()               {}
-func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{13} }
+func (*AnnotateTextRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{15} }
 
 func (m *AnnotateTextRequest) GetDocument() *Document {
 	if m != nil {
@@ -1002,7 +1589,7 @@ func (m *AnnotateTextRequest_Features) Reset()         { *m = AnnotateTextReques
 func (m *AnnotateTextRequest_Features) String() string { return proto.CompactTextString(m) }
 func (*AnnotateTextRequest_Features) ProtoMessage()    {}
 func (*AnnotateTextRequest_Features) Descriptor() ([]byte, []int) {
-	return fileDescriptor0, []int{13, 0}
+	return fileDescriptor0, []int{15, 0}
 }
 
 // The text annotations response message.
@@ -1023,13 +1610,14 @@ type AnnotateTextResponse struct {
 	DocumentSentiment *Sentiment `protobuf:"bytes,4,opt,name=document_sentiment,json=documentSentiment" json:"document_sentiment,omitempty"`
 	// The language of the text, which will be the same as the language specified
 	// in the request or, if not specified, the automatically-detected language.
+	// See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
 	Language string `protobuf:"bytes,5,opt,name=language" json:"language,omitempty"`
 }
 
 func (m *AnnotateTextResponse) Reset()                    { *m = AnnotateTextResponse{} }
 func (m *AnnotateTextResponse) String() string            { return proto.CompactTextString(m) }
 func (*AnnotateTextResponse) ProtoMessage()               {}
-func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{14} }
+func (*AnnotateTextResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{16} }
 
 func (m *AnnotateTextResponse) GetSentences() []*Sentence {
 	if m != nil {
@@ -1073,6 +1661,8 @@ func init() {
 	proto.RegisterType((*AnalyzeSentimentResponse)(nil), "google.cloud.language.v1beta1.AnalyzeSentimentResponse")
 	proto.RegisterType((*AnalyzeEntitiesRequest)(nil), "google.cloud.language.v1beta1.AnalyzeEntitiesRequest")
 	proto.RegisterType((*AnalyzeEntitiesResponse)(nil), "google.cloud.language.v1beta1.AnalyzeEntitiesResponse")
+	proto.RegisterType((*AnalyzeSyntaxRequest)(nil), "google.cloud.language.v1beta1.AnalyzeSyntaxRequest")
+	proto.RegisterType((*AnalyzeSyntaxResponse)(nil), "google.cloud.language.v1beta1.AnalyzeSyntaxResponse")
 	proto.RegisterType((*AnnotateTextRequest)(nil), "google.cloud.language.v1beta1.AnnotateTextRequest")
 	proto.RegisterType((*AnnotateTextRequest_Features)(nil), "google.cloud.language.v1beta1.AnnotateTextRequest.Features")
 	proto.RegisterType((*AnnotateTextResponse)(nil), "google.cloud.language.v1beta1.AnnotateTextResponse")
@@ -1080,7 +1670,19 @@ func init() {
 	proto.RegisterEnum("google.cloud.language.v1beta1.Document_Type", Document_Type_name, Document_Type_value)
 	proto.RegisterEnum("google.cloud.language.v1beta1.Entity_Type", Entity_Type_name, Entity_Type_value)
 	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Tag", PartOfSpeech_Tag_name, PartOfSpeech_Tag_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Aspect", PartOfSpeech_Aspect_name, PartOfSpeech_Aspect_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Case", PartOfSpeech_Case_name, PartOfSpeech_Case_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Form", PartOfSpeech_Form_name, PartOfSpeech_Form_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Gender", PartOfSpeech_Gender_name, PartOfSpeech_Gender_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Mood", PartOfSpeech_Mood_name, PartOfSpeech_Mood_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Number", PartOfSpeech_Number_name, PartOfSpeech_Number_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Person", PartOfSpeech_Person_name, PartOfSpeech_Person_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Proper", PartOfSpeech_Proper_name, PartOfSpeech_Proper_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Reciprocity", PartOfSpeech_Reciprocity_name, PartOfSpeech_Reciprocity_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Tense", PartOfSpeech_Tense_name, PartOfSpeech_Tense_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.PartOfSpeech_Voice", PartOfSpeech_Voice_name, PartOfSpeech_Voice_value)
 	proto.RegisterEnum("google.cloud.language.v1beta1.DependencyEdge_Label", DependencyEdge_Label_name, DependencyEdge_Label_value)
+	proto.RegisterEnum("google.cloud.language.v1beta1.EntityMention_Type", EntityMention_Type_name, EntityMention_Type_value)
 }
 
 // Reference imports to suppress errors if they are not otherwise used.
@@ -1099,10 +1701,12 @@ type LanguageServiceClient interface {
 	// Finds named entities (currently finds proper names) in the text,
 	// entity types, salience, mentions for each entity, and other properties.
 	AnalyzeEntities(ctx context.Context, in *AnalyzeEntitiesRequest, opts ...grpc.CallOption) (*AnalyzeEntitiesResponse, error)
-	// Advanced API that analyzes the document and provides a full set of text
-	// annotations, including semantic, syntactic, and sentiment information. This
-	// API is intended for users who are familiar with machine learning and need
-	// in-depth text features to build upon.
+	// Analyzes the syntax of the text and provides sentence boundaries and
+	// tokenization along with part of speech tags, dependency trees, and other
+	// properties.
+	AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error)
+	// A convenience method that provides all the features that analyzeSentiment,
+	// analyzeEntities, and analyzeSyntax provide in one call.
 	AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error)
 }
 
@@ -1132,6 +1736,15 @@ func (c *languageServiceClient) AnalyzeEntities(ctx context.Context, in *Analyze
 	return out, nil
 }
 
+func (c *languageServiceClient) AnalyzeSyntax(ctx context.Context, in *AnalyzeSyntaxRequest, opts ...grpc.CallOption) (*AnalyzeSyntaxResponse, error) {
+	out := new(AnalyzeSyntaxResponse)
+	err := grpc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnalyzeSyntax", in, out, c.cc, opts...)
+	if err != nil {
+		return nil, err
+	}
+	return out, nil
+}
+
 func (c *languageServiceClient) AnnotateText(ctx context.Context, in *AnnotateTextRequest, opts ...grpc.CallOption) (*AnnotateTextResponse, error) {
 	out := new(AnnotateTextResponse)
 	err := grpc.Invoke(ctx, "/google.cloud.language.v1beta1.LanguageService/AnnotateText", in, out, c.cc, opts...)
@@ -1149,10 +1762,12 @@ type LanguageServiceServer interface {
 	// Finds named entities (currently finds proper names) in the text,
 	// entity types, salience, mentions for each entity, and other properties.
 	AnalyzeEntities(context.Context, *AnalyzeEntitiesRequest) (*AnalyzeEntitiesResponse, error)
-	// Advanced API that analyzes the document and provides a full set of text
-	// annotations, including semantic, syntactic, and sentiment information. This
-	// API is intended for users who are familiar with machine learning and need
-	// in-depth text features to build upon.
+	// Analyzes the syntax of the text and provides sentence boundaries and
+	// tokenization along with part of speech tags, dependency trees, and other
+	// properties.
+	AnalyzeSyntax(context.Context, *AnalyzeSyntaxRequest) (*AnalyzeSyntaxResponse, error)
+	// A convenience method that provides all the features that analyzeSentiment,
+	// analyzeEntities, and analyzeSyntax provide in one call.
 	AnnotateText(context.Context, *AnnotateTextRequest) (*AnnotateTextResponse, error)
 }
 
@@ -1196,6 +1811,24 @@ func _LanguageService_AnalyzeEntities_Handler(srv interface{}, ctx context.Conte
 	return interceptor(ctx, in, info, handler)
 }
 
+func _LanguageService_AnalyzeSyntax_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
+	in := new(AnalyzeSyntaxRequest)
+	if err := dec(in); err != nil {
+		return nil, err
+	}
+	if interceptor == nil {
+		return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, in)
+	}
+	info := &grpc.UnaryServerInfo{
+		Server:     srv,
+		FullMethod: "/google.cloud.language.v1beta1.LanguageService/AnalyzeSyntax",
+	}
+	handler := func(ctx context.Context, req interface{}) (interface{}, error) {
+		return srv.(LanguageServiceServer).AnalyzeSyntax(ctx, req.(*AnalyzeSyntaxRequest))
+	}
+	return interceptor(ctx, in, info, handler)
+}
+
 func _LanguageService_AnnotateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
 	in := new(AnnotateTextRequest)
 	if err := dec(in); err != nil {
@@ -1226,6 +1859,10 @@ var _LanguageService_serviceDesc = grpc.ServiceDesc{
 			MethodName: "AnalyzeEntities",
 			Handler:    _LanguageService_AnalyzeEntities_Handler,
 		},
+		{
+			MethodName: "AnalyzeSyntax",
+			Handler:    _LanguageService_AnalyzeSyntax_Handler,
+		},
 		{
 			MethodName: "AnnotateText",
 			Handler:    _LanguageService_AnnotateText_Handler,
@@ -1240,123 +1877,177 @@ func init() {
 }
 
 var fileDescriptor0 = []byte{
-	// 1873 bytes of a gzipped FileDescriptorProto
-	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xac, 0x58, 0xcd, 0x72, 0x1b, 0xc7,
-	0x11, 0xd6, 0xe2, 0x8f, 0xc0, 0xf0, 0xaf, 0x35, 0x66, 0x6c, 0x04, 0xb1, 0x63, 0x69, 0x2d, 0x45,
-	0x34, 0x25, 0x81, 0x11, 0x19, 0xd3, 0x0a, 0xa5, 0x38, 0x5e, 0x2c, 0x16, 0xe4, 0x92, 0xc0, 0xee,
-	0x7a, 0x76, 0x17, 0x44, 0x7c, 0x08, 0x6a, 0x05, 0x0c, 0x61, 0x94, 0xc1, 0x5d, 0x04, 0x58, 0xaa,
-	0x88, 0x1c, 0x53, 0x95, 0x63, 0x2a, 0x87, 0xdc, 0x72, 0x49, 0x55, 0xf2, 0x00, 0x3e, 0xe4, 0x31,
-	0x72, 0xd4, 0x13, 0xa4, 0x2a, 0xc7, 0x1c, 0x72, 0xc8, 0x03, 0xa4, 0x7a, 0xf6, 0x87, 0x3f, 0x62,
-	0x44, 0xd2, 0xd2, 0xad, 0xa7, 0xb7, 0xbf, 0xee, 0x9e, 0xee, 0x6f, 0x66, 0x1a, 0x20, 0xf6, 0x20,
-	0x08, 0x06, 0x23, 0x5e, 0x1d, 0x04, 0x23, 0xcf, 0x1f, 0x54, 0x83, 0xc9, 0x60, 0x7d, 0xc0, 0xfd,
-	0xf1, 0x24, 0x08, 0x83, 0xf5, 0xe8, 0x93, 0x37, 0x1e, 0x4e, 0xd7, 0x7b, 0xa3, 0xe0, 0xb8, 0xbf,
-	0x8e, 0x26, 0xc7, 0xde, 0x80, 0xaf, 0xbf, 0x7c, 0xf2, 0x82, 0x87, 0xde, 0x93, 0x54, 0xd1, 0x9d,
-	0xf2, 0xc9, 0xcb, 0x61, 0x8f, 0x57, 0x05, 0x90, 0x7e, 0x14, 0x3b, 0x15, 0xa8, 0x6a, 0x62, 0x54,
-	0x8d, 0x51, 0x15, 0xfd, 0x7a, 0x31, 0xbd, 0xf1, 0x70, 0x3d, 0xf6, 0xdb, 0x0b, 0xfc, 0xc3, 0xe1,
-	0x60, 0xdd, 0xf3, 0xfd, 0x20, 0xf4, 0xc2, 0x61, 0xe0, 0x4f, 0xa3, 0x48, 0xf2, 0x7f, 0x24, 0x52,
-	0xac, 0x07, 0xbd, 0xe3, 0x23, 0xee, 0x87, 0xf4, 0x4b, 0x92, 0x0b, 0x67, 0x63, 0x5e, 0x96, 0xee,
-	0x48, 0xab, 0x4b, 0x1b, 0x8f, 0xaa, 0x6f, 0xcc, 0xa2, 0x9a, 0xc0, 0xaa, 0xce, 0x6c, 0xcc, 0x99,
-	0x40, 0xd2, 0x0a, 0x99, 0xeb, 0x05, 0x7e, 0xc8, 0xfd, 0xb0, 0x9c, 0xb9, 0x23, 0xad, 0x96, 0x76,
-	0x6f, 0xb1, 0x44, 0x41, 0x57, 0xc9, 0xf2, 0xa0, 0x37, 0xed, 0xc6, 0xcb, 0xee, 0xf1, 0x64, 0x58,
-	0xce, 0xc6, 0x36, 0x8b, 0x83, 0xde, 0x54, 0x8d, 0xf4, 0xee, 0x64, 0x48, 0x2b, 0xa4, 0x98, 0x44,
-	0x2b, 0xe7, 0xd0, 0x84, 0xa5, 0x6b, 0x79, 0x8b, 0xe4, 0x30, 0x1e, 0x5d, 0x21, 0xe0, 0xfc, 0xca,
-	0xd2, 0xba, 0xae, 0x61, 0x5b, 0x9a, 0xaa, 0x37, 0x74, 0xad, 0x0e, 0xb7, 0xe8, 0x12, 0x21, 0x56,
-	0x53, 0xd1, 0x8d, 0xae, 0xa3, 0x75, 0x1c, 0x90, 0x68, 0x91, 0xe4, 0x76, 0x9d, 0x56, 0x13, 0x32,
-	0xb5, 0x22, 0x29, 0x4c, 0x83, 0xe3, 0x49, 0x8f, 0xcb, 0x3b, 0xa4, 0x68, 0x73, 0x8c, 0xd5, 0xe3,
-	0xf4, 0x19, 0xc9, 0x85, 0xfc, 0x24, 0x14, 0x3b, 0x9e, 0xdf, 0x78, 0x70, 0xc5, 0x8e, 0x1d, 0x7e,
-	0x12, 0xda, 0x63, 0xcf, 0x67, 0x02, 0x24, 0xff, 0x23, 0x4b, 0x0a, 0x9a, 0x1f, 0x0e, 0xc3, 0x19,
-	0xa5, 0x24, 0xe7, 0x7b, 0x47, 0x51, 0xe5, 0x4a, 0x4c, 0xc8, 0xf4, 0x8b, 0xb8, 0x9a, 0x19, 0x51,
-	0xcd, 0xb5, 0x2b, 0x7c, 0x47, 0x8e, 0xce, 0xd6, 0xd2, 0x24, 0xc5, 0x23, 0x1e, 0x7a, 0x7d, 0x2f,
-	0xf4, 0xca, 0xd9, 0x3b, 0xd9, 0xd5, 0xf9, 0x8d, 0xcd, 0xeb, 0xf9, 0x68, 0xc5, 0x28, 0xcd, 0x0f,
-	0x27, 0x33, 0x96, 0x3a, 0xc1, 0xb2, 0x4e, 0xbd, 0xd1, 0x10, 0x37, 0x2e, 0xca, 0x9a, 0x61, 0xe9,
-	0x9a, 0xee, 0x62, 0x30, 0x5f, 0x30, 0xa3, 0x9c, 0x17, 0xc1, 0x1e, 0x5d, 0x2b, 0x58, 0x2b, 0x02,
-	0xb1, 0x14, 0x5d, 0x79, 0x46, 0x16, 0xcf, 0x25, 0x40, 0x81, 0x64, 0xbf, 0xe5, 0xb3, 0xb8, 0x34,
-	0x28, 0xd2, 0x15, 0x92, 0x7f, 0xe9, 0x8d, 0x8e, 0xa3, 0xd2, 0x94, 0x58, 0xb4, 0xd8, 0xce, 0x3c,
-	0x95, 0xe4, 0x59, 0xdc, 0xdd, 0x79, 0x32, 0xe7, 0x1a, 0xfb, 0x86, 0x79, 0x60, 0xc0, 0x2d, 0x4a,
-	0x48, 0xc1, 0xd2, 0x98, 0x6d, 0x1a, 0x20, 0xd1, 0x05, 0x52, 0x6c, 0x9a, 0xaa, 0xe2, 0xe8, 0xa6,
-	0x01, 0x19, 0x0a, 0x64, 0xc1, 0x64, 0x3b, 0x8a, 0xa1, 0x7f, 0x1d, 0x69, 0xb2, 0xb4, 0x44, 0xf2,
-	0x5a, 0x5b, 0x33, 0x1c, 0xc8, 0xd1, 0x65, 0x32, 0x7f, 0x60, 0xb2, 0xfd, 0xae, 0xd9, 0xe8, 0x2a,
-	0xcc, 0x81, 0x3c, 0xbd, 0x4d, 0x16, 0x55, 0xd3, 0xb0, 0xdd, 0x96, 0xc6, 0xba, 0x3b, 0xa6, 0x59,
-	0x87, 0x02, 0x9a, 0x9b, 0xce, 0xae, 0xc6, 0x60, 0x4e, 0xfe, 0x7d, 0x86, 0xe4, 0x9d, 0xe0, 0x5b,
-	0xee, 0xbf, 0x15, 0x29, 0xe8, 0x57, 0x64, 0x69, 0xec, 0x4d, 0xc2, 0x6e, 0x70, 0xd8, 0x9d, 0x8e,
-	0x39, 0xef, 0x7d, 0x23, 0x36, 0x39, 0xbf, 0xf1, 0xf0, 0x0a, 0x37, 0x96, 0x37, 0x09, 0xcd, 0x43,
-	0x5b, 0x40, 0xd8, 0xc2, 0xf8, 0xcc, 0x8a, 0xb6, 0xc9, 0x72, 0x9f, 0x8f, 0xb9, 0xdf, 0xe7, 0x7e,
-	0x6f, 0xd6, 0xe5, 0xfd, 0x01, 0x17, 0x07, 0x67, 0x7e, 0xe3, 0xf1, 0x55, 0x27, 0x34, 0x45, 0x69,
-	0xfd, 0x01, 0x67, 0x4b, 0xfd, 0x73, 0x6b, 0x6c, 0xc3, 0x88, 0x1f, 0x1d, 0x79, 0xf1, 0x19, 0x8b,
-	0x16, 0xb2, 0x46, 0x4a, 0x78, 0x3c, 0x86, 0xe2, 0x46, 0xa8, 0x90, 0xe2, 0x38, 0x18, 0x79, 0x93,
-	0x61, 0x18, 0x35, 0x30, 0xc3, 0xd2, 0x35, 0xfd, 0x90, 0x94, 0x8e, 0xbc, 0x81, 0x3f, 0x0c, 0x8f,
-	0xfb, 0x51, 0x27, 0x33, 0xec, 0x54, 0x21, 0xff, 0x53, 0x22, 0x0b, 0x67, 0xf7, 0x44, 0x15, 0x92,
-	0x0d, 0xbd, 0x41, 0x7c, 0xb7, 0xac, 0xdf, 0xa0, 0x1a, 0x55, 0xc7, 0x1b, 0x30, 0xc4, 0xca, 0x7f,
-	0x90, 0x48, 0xd6, 0xf1, 0x06, 0xe7, 0xd9, 0x31, 0x47, 0xb2, 0x4a, 0x7d, 0x0f, 0xa4, 0x48, 0xb0,
-	0x20, 0x13, 0x09, 0x6d, 0xc8, 0xe2, 0xe9, 0x57, 0x4d, 0x63, 0x0f, 0x72, 0xa8, 0xaa, 0x6b, 0xc8,
-	0x81, 0x22, 0xc9, 0x19, 0xa6, 0x6b, 0x40, 0x01, 0x55, 0x86, 0xdb, 0x82, 0x39, 0x54, 0x59, 0xcc,
-	0x34, 0xa0, 0x88, 0x2a, 0x8b, 0x39, 0x50, 0x42, 0x5a, 0x58, 0xae, 0xa1, 0x3a, 0x40, 0xf0, 0x6b,
-	0x5b, 0x63, 0x35, 0x98, 0xa7, 0x79, 0x22, 0x75, 0x60, 0x01, 0xbf, 0x29, 0x8d, 0x86, 0xde, 0x81,
-	0x45, 0xf9, 0xbf, 0x73, 0x64, 0xe9, 0x7c, 0x8d, 0xe9, 0x2a, 0x81, 0x6f, 0xb8, 0xd7, 0xef, 0x86,
-	0xc8, 0xa4, 0xee, 0xd0, 0xef, 0xf3, 0x13, 0xb1, 0xe5, 0x3c, 0x5b, 0x42, 0xbd, 0x20, 0x98, 0x8e,
-	0x5a, 0xaa, 0x93, 0xfc, 0xc8, 0x7b, 0xc1, 0x47, 0xf1, 0xfd, 0xb0, 0x79, 0xa3, 0x5e, 0x56, 0x9b,
-	0x08, 0x65, 0x91, 0x07, 0xf9, 0xdf, 0x05, 0x92, 0x17, 0x8a, 0xd7, 0xce, 0x8d, 0x52, 0xab, 0x31,
-	0xad, 0x0d, 0x92, 0xc8, 0x5a, 0x35, 0x5b, 0x58, 0x1e, 0x14, 0xeb, 0x6d, 0xb5, 0x09, 0x59, 0x61,
-	0x51, 0x6f, 0xb7, 0xcc, 0x3a, 0xe4, 0x70, 0xa3, 0x0a, 0x4a, 0x79, 0x61, 0x60, 0x59, 0xa6, 0x0d,
-	0x05, 0xa1, 0x74, 0x1c, 0x06, 0x73, 0xa2, 0xa8, 0x6e, 0x07, 0x8a, 0x18, 0x42, 0x71, 0x3b, 0x96,
-	0x62, 0xdb, 0x50, 0xa2, 0x05, 0x92, 0x51, 0x55, 0x20, 0x08, 0x51, 0x85, 0xfb, 0xf9, 0xb4, 0xe8,
-	0xa2, 0x52, 0xaa, 0xed, 0xd6, 0xf6, 0x60, 0x91, 0x2e, 0x92, 0x92, 0x10, 0x05, 0x6c, 0x29, 0x6a,
-	0x87, 0x05, 0xcb, 0x49, 0x5f, 0x00, 0x0d, 0xea, 0xba, 0xad, 0x9a, 0x2e, 0xb3, 0x35, 0xb8, 0x8d,
-	0x4e, 0xea, 0x66, 0x6d, 0x0f, 0x28, 0x4a, 0x5a, 0xc7, 0x6a, 0xc2, 0x7b, 0x78, 0xf4, 0x77, 0x4c,
-	0xcd, 0x3e, 0xd0, 0x9d, 0x5d, 0x58, 0x41, 0xbd, 0x8e, 0x16, 0x3f, 0x40, 0xa9, 0xa5, 0xb0, 0x7d,
-	0x78, 0x1f, 0xbd, 0xb5, 0x0e, 0x34, 0xf8, 0x20, 0x12, 0xda, 0x50, 0x16, 0x4d, 0xd6, 0x76, 0xe0,
-	0x87, 0x98, 0xa8, 0x61, 0x40, 0x05, 0x9d, 0x18, 0x56, 0xbc, 0xe7, 0x1f, 0x61, 0x86, 0x86, 0xc8,
-	0xf0, 0x43, 0x4c, 0xc0, 0x48, 0x33, 0xfc, 0x28, 0x61, 0xc7, 0x8f, 0xb1, 0x44, 0x86, 0xdb, 0xaa,
-	0x69, 0x0c, 0x3e, 0x46, 0x06, 0x58, 0x70, 0x07, 0x4d, 0x2d, 0x85, 0x29, 0x8e, 0xd2, 0xd1, 0x6d,
-	0xb8, 0x8b, 0x05, 0xb1, 0x14, 0xe6, 0xa0, 0x47, 0x59, 0x30, 0x47, 0x14, 0xe2, 0x13, 0xc1, 0x2b,
-	0xcc, 0xf0, 0x5e, 0x24, 0xd9, 0x36, 0xdc, 0x17, 0xb6, 0xa6, 0xed, 0x60, 0x4e, 0x3f, 0x11, 0x0b,
-	0xa6, 0x09, 0xeb, 0x07, 0xe9, 0xc2, 0xd8, 0x83, 0x55, 0x71, 0xe3, 0x31, 0x0d, 0x2b, 0xf3, 0x69,
-	0x44, 0x4f, 0xad, 0x01, 0x6b, 0xb1, 0x64, 0xc1, 0x43, 0x11, 0x85, 0x99, 0x46, 0x13, 0x1e, 0x25,
-	0x9c, 0x7d, 0x8c, 0x3b, 0xb4, 0x6c, 0xa8, 0xe2, 0x0e, 0xbf, 0x72, 0x15, 0x43, 0xe4, 0xb3, 0x8e,
-	0x96, 0x4c, 0x45, 0xf1, 0xa7, 0xf8, 0x41, 0x88, 0x4c, 0x6b, 0xc2, 0x13, 0xf1, 0xa1, 0xce, 0x4c,
-	0x0b, 0x36, 0xd0, 0x05, 0x06, 0xd8, 0xc4, 0x1c, 0x98, 0xd6, 0x32, 0x14, 0xc3, 0x81, 0x9f, 0xe1,
-	0x53, 0xca, 0x34, 0xdc, 0xa7, 0x51, 0x77, 0x5b, 0xf0, 0x19, 0x46, 0x67, 0xa6, 0xe9, 0xc0, 0x16,
-	0x4a, 0x36, 0x16, 0xe7, 0x73, 0x21, 0xb9, 0x8d, 0x06, 0x3c, 0x45, 0x49, 0x44, 0xfc, 0x39, 0x3a,
-	0x76, 0x4c, 0x4b, 0x57, 0x61, 0x5b, 0x9c, 0x1d, 0x54, 0x3e, 0xc3, 0xd8, 0x6d, 0x71, 0x6d, 0xb7,
-	0x35, 0x78, 0x8e, 0x26, 0x1d, 0xb1, 0xed, 0x5f, 0xe0, 0x4e, 0xd1, 0x83, 0xde, 0x81, 0x2f, 0x04,
-	0x52, 0x77, 0x9a, 0x1a, 0xfc, 0x12, 0xed, 0x95, 0x7a, 0xdb, 0xda, 0x45, 0xf4, 0x97, 0x31, 0xe5,
-	0x54, 0xc5, 0xb5, 0x41, 0x11, 0xec, 0x74, 0x3b, 0xed, 0x36, 0xd4, 0x50, 0xac, 0x8b, 0xa8, 0x2a,
-	0x9a, 0x34, 0x4c, 0xa6, 0xe9, 0x3b, 0x06, 0xd4, 0xb1, 0x14, 0xfb, 0x07, 0xa0, 0x61, 0xfc, 0xa6,
-	0x6e, 0x3b, 0xd0, 0x88, 0x8e, 0x7d, 0x4b, 0x85, 0x1d, 0x41, 0x00, 0xb3, 0x15, 0xf1, 0x72, 0x17,
-	0x1f, 0x90, 0x64, 0x25, 0x1a, 0xaf, 0x0b, 0x4b, 0xb7, 0xa5, 0xc2, 0x1e, 0x96, 0x45, 0x35, 0x2d,
-	0xd8, 0xc7, 0x4a, 0xd4, 0x75, 0x5b, 0x3c, 0x3b, 0x5a, 0x1d, 0x9a, 0x72, 0x93, 0x2c, 0x9e, 0x7b,
-	0xfc, 0xde, 0x6e, 0x8a, 0xd8, 0x21, 0xc5, 0x44, 0x43, 0xcb, 0xa7, 0xe3, 0x53, 0xf4, 0x5c, 0xa6,
-	0xc3, 0xd3, 0x5d, 0xb2, 0xf0, 0x82, 0x0f, 0x86, 0x7e, 0x37, 0x38, 0x3c, 0x9c, 0xf2, 0x68, 0xba,
-	0xca, 0xb3, 0x79, 0xa1, 0x33, 0x85, 0x4a, 0xfe, 0x35, 0xf9, 0x40, 0xf1, 0xbd, 0xd1, 0xec, 0xb7,
-	0x3c, 0xbd, 0xbf, 0x19, 0xff, 0xcd, 0x31, 0x9f, 0x86, 0x54, 0x25, 0xc5, 0x7e, 0x3c, 0xad, 0x5d,
-	0x33, 0xc9, 0x64, 0xb8, 0x63, 0x29, 0x50, 0xfe, 0xa3, 0x44, 0xca, 0xaf, 0x07, 0x98, 0x8e, 0x03,
-	0x7f, 0xca, 0xe9, 0x01, 0xa1, 0x89, 0x61, 0x77, 0x9a, 0x7c, 0x8d, 0x63, 0xad, 0x5e, 0x11, 0xeb,
-	0xd4, 0xdb, 0xed, 0xc4, 0xc7, 0xb9, 0x17, 0x28, 0x9d, 0x05, 0x33, 0x17, 0x66, 0xc1, 0xef, 0x24,
-	0xf2, 0x7e, 0x9c, 0x91, 0x68, 0xc8, 0x90, 0x4f, 0xdf, 0xe5, 0x8e, 0xa9, 0x45, 0x16, 0xb9, 0xdf,
-	0x0b, 0xfa, 0x43, 0x7f, 0xd0, 0x3d, 0x33, 0xca, 0x3d, 0xbc, 0x72, 0x32, 0x8a, 0x30, 0x62, 0x96,
-	0x5b, 0xe0, 0x67, 0x56, 0xf2, 0x49, 0xda, 0xa3, 0xd3, 0x84, 0xe3, 0x0a, 0x2a, 0xa4, 0xc8, 0x63,
-	0x5d, 0x59, 0x12, 0x13, 0xd8, 0xfd, 0x6b, 0x4d, 0x60, 0x2c, 0x85, 0xbd, 0xb1, 0x56, 0x7f, 0xcd,
-	0x92, 0xf7, 0x94, 0x68, 0xfc, 0xe7, 0xc8, 0xb7, 0x77, 0x5a, 0xa8, 0x03, 0x52, 0x3c, 0xe4, 0x5e,
-	0x78, 0x3c, 0xe1, 0xd3, 0x78, 0xdc, 0x79, 0x76, 0x85, 0x93, 0x4b, 0x52, 0xa9, 0x36, 0x62, 0x17,
-	0x2c, 0x75, 0xf6, 0x7a, 0x07, 0xb2, 0x6f, 0xd9, 0x81, 0xca, 0x9f, 0x25, 0x52, 0x4c, 0x02, 0xd1,
-	0xfb, 0x64, 0x89, 0x9f, 0x84, 0x13, 0xaf, 0x17, 0x76, 0xa7, 0x33, 0x3f, 0xf4, 0xa2, 0xb7, 0xba,
-	0xc8, 0x16, 0x63, 0xad, 0x2d, 0x94, 0xf4, 0x53, 0x02, 0x89, 0x59, 0xda, 0xa2, 0x8c, 0x30, 0x5c,
-	0x8e, 0xf5, 0x49, 0x37, 0xe9, 0x73, 0x52, 0x49, 0x4c, 0x2f, 0x39, 0x0f, 0x59, 0x01, 0x2a, 0xc7,
-	0x16, 0xf5, 0x8b, 0x64, 0x97, 0x5f, 0x65, 0xc8, 0xca, 0xf9, 0xca, 0xc4, 0xe4, 0xd0, 0x48, 0x69,
-	0x1a, 0xff, 0x66, 0x49, 0xd8, 0xf1, 0xe0, 0x1a, 0xa7, 0x0a, 0xed, 0xd9, 0x29, 0x92, 0x3e, 0x27,
-	0x05, 0x31, 0x98, 0x60, 0xfa, 0xe8, 0xe3, 0xde, 0x55, 0x57, 0x15, 0x1a, 0xb3, 0x18, 0x73, 0x8e,
-	0xa1, 0xd9, 0xef, 0xc7, 0xd0, 0xcb, 0xaf, 0x89, 0xdc, 0xbb, 0xbd, 0x26, 0xf2, 0xe7, 0xa9, 0xbf,
-	0xf6, 0x94, 0x2c, 0x9c, 0x25, 0x44, 0xf4, 0x18, 0x18, 0x1a, 0xdc, 0x42, 0xc9, 0x75, 0x1a, 0x4f,
-	0xa3, 0xf9, 0xc8, 0x75, 0x1a, 0x4f, 0xb6, 0xa2, 0xf9, 0xc8, 0x75, 0x1a, 0x9b, 0x1b, 0x90, 0xdd,
-	0xf8, 0x4b, 0x8e, 0x2c, 0x37, 0x63, 0x37, 0x76, 0xf4, 0x4b, 0x9a, 0xfe, 0x5d, 0x22, 0x70, 0xf1,
-	0x1a, 0xa4, 0x5b, 0x57, 0xd2, 0xfd, 0xd2, 0x8b, 0xb9, 0xf2, 0xf9, 0x8d, 0x71, 0x11, 0x21, 0xe4,
-	0xea, 0xef, 0x5e, 0xfd, 0xeb, 0x4f, 0x99, 0x55, 0xf9, 0x93, 0xf4, 0xaf, 0x84, 0xa4, 0x26, 0xd3,
-	0x6d, 0xef, 0x02, 0x68, 0x5b, 0x5a, 0xa3, 0xdf, 0x49, 0x64, 0xf9, 0xc2, 0xcd, 0x43, 0x3f, 0xbb,
-	0x5e, 0xf0, 0x0b, 0x57, 0x6b, 0x65, 0xeb, 0xa6, 0xb0, 0x38, 0xe5, 0xc7, 0x22, 0xe5, 0x07, 0xb2,
-	0xfc, 0xff, 0x53, 0x4e, 0x30, 0x98, 0xf1, 0xdf, 0x24, 0xb2, 0x70, 0xf6, 0x2c, 0xd0, 0x8d, 0x9b,
-	0x5f, 0x29, 0x95, 0xcd, 0x1b, 0x61, 0xe2, 0x44, 0xd7, 0x44, 0xa2, 0xf7, 0xe4, 0x8f, 0x2f, 0x4d,
-	0xf4, 0x14, 0xb0, 0x2d, 0xad, 0xd5, 0x66, 0xe4, 0x6e, 0x2f, 0x38, 0x7a, 0x73, 0x94, 0xda, 0xca,
-	0x05, 0x0e, 0x59, 0x93, 0x20, 0x0c, 0x2c, 0xe9, 0xeb, 0xe7, 0x6f, 0xf3, 0xdf, 0xd1, 0x8b, 0x82,
-	0x30, 0xdc, 0xfc, 0x5f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xee, 0x69, 0x85, 0xb5, 0x82, 0x12, 0x00,
-	0x00,
+	// 2752 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xcc, 0x59, 0xcf, 0x73, 0xdb, 0xc6,
+	0xf5, 0x37, 0xf8, 0x4b, 0xe4, 0x92, 0x92, 0xd6, 0x88, 0x93, 0xf0, 0xab, 0x6f, 0xf2, 0x8d, 0x83,
+	0xc4, 0x5f, 0x2b, 0x76, 0x42, 0xc5, 0x52, 0xe2, 0xb8, 0x76, 0x9a, 0x06, 0x02, 0x96, 0x14, 0x64,
+	0x10, 0x40, 0x16, 0x00, 0x25, 0xa7, 0x07, 0x0e, 0x4c, 0xae, 0x18, 0x4e, 0x24, 0x80, 0x25, 0x21,
+	0x8f, 0xd5, 0x4b, 0x67, 0x3a, 0xd3, 0x63, 0xa7, 0x87, 0xde, 0x7a, 0x6c, 0x0f, 0x3d, 0x75, 0xd2,
+	0x99, 0x5e, 0xda, 0x3f, 0xa0, 0x87, 0x4e, 0x8f, 0x99, 0xe9, 0x5f, 0xd0, 0x63, 0x0f, 0x3d, 0xf4,
+	0xd0, 0x63, 0xe7, 0xed, 0x2e, 0xf8, 0x43, 0x71, 0x2d, 0x31, 0xc9, 0x21, 0xb7, 0xdd, 0xc7, 0xf7,
+	0x3e, 0xef, 0xe7, 0xbe, 0xb7, 0x58, 0x22, 0x7f, 0x90, 0x24, 0x83, 0x63, 0xd6, 0x18, 0x24, 0xc7,
+	0x51, 0x3c, 0x68, 0x24, 0xe3, 0xc1, 0xd6, 0x80, 0xc5, 0xa3, 0x71, 0x92, 0x26, 0x5b, 0xe2, 0xa7,
+	0x68, 0x34, 0x9c, 0x6c, 0xf5, 0x8e, 0x93, 0xd3, 0xfe, 0x16, 0xb0, 0x9c, 0x46, 0x03, 0xb6, 0xf5,
+	0xe4, 0xce, 0x63, 0x96, 0x46, 0x77, 0xa6, 0x84, 0xee, 0x84, 0x8d, 0x9f, 0x0c, 0x7b, 0xac, 0xc1,
+	0x05, 0xd5, 0x57, 0x25, 0x28, 0x97, 0x6a, 0x64, 0x4c, 0x0d, 0x29, 0xb5, 0x61, 0x5d, 0x4e, 0x67,
+	0x34, 0x1a, 0x6e, 0x49, 0xdc, 0x5e, 0x12, 0x1f, 0x0d, 0x07, 0x5b, 0x51, 0x1c, 0x27, 0x69, 0x94,
+	0x0e, 0x93, 0x78, 0x22, 0x34, 0x69, 0xff, 0x54, 0x50, 0xd9, 0x4c, 0x7a, 0xa7, 0x27, 0x2c, 0x4e,
+	0xd5, 0x8f, 0x51, 0x21, 0x3d, 0x1b, 0xb1, 0xba, 0x72, 0x5d, 0xd9, 0x5c, 0xdb, 0x7e, 0xbb, 0xf1,
+	0x5c, 0x2b, 0x1a, 0x99, 0x58, 0x23, 0x38, 0x1b, 0x31, 0xca, 0x25, 0xd5, 0x0d, 0xb4, 0xd2, 0x4b,
+	0xe2, 0x94, 0xc5, 0x69, 0x3d, 0x77, 0x5d, 0xd9, 0xac, 0xec, 0x5d, 0xa1, 0x19, 0x41, 0xdd, 0x44,
+	0xeb, 0x83, 0xde, 0xa4, 0x2b, 0xb7, 0xdd, 0xd3, 0xf1, 0xb0, 0x9e, 0x97, 0x3c, 0xab, 0x83, 0xde,
+	0xc4, 0x10, 0xf4, 0x70, 0x3c, 0x54, 0x37, 0x50, 0x39, 0xd3, 0x56, 0x2f, 0x00, 0x0b, 0x9d, 0xee,
+	0xb5, 0xbb, 0xa8, 0x00, 0xfa, 0xd4, 0x6b, 0x08, 0x07, 0x8f, 0x3c, 0xd2, 0x0d, 0x1d, 0xdf, 0x23,
+	0x86, 0xd5, 0xb4, 0x88, 0x89, 0xaf, 0xa8, 0x6b, 0x08, 0x79, 0xb6, 0x6e, 0x39, 0xdd, 0x80, 0x1c,
+	0x06, 0x58, 0x51, 0xcb, 0xa8, 0xb0, 0x17, 0xb4, 0x6d, 0x9c, 0xdb, 0x2d, 0xa3, 0xd2, 0x24, 0x39,
+	0x1d, 0xf7, 0x98, 0xf6, 0x0b, 0x05, 0x95, 0x7d, 0x06, 0xca, 0x7a, 0x4c, 0x7d, 0x80, 0x0a, 0x29,
+	0x7b, 0x9a, 0x72, 0x97, 0xab, 0xdb, 0x37, 0x2f, 0x70, 0x39, 0x60, 0x4f, 0x53, 0x7f, 0x14, 0xc5,
+	0x94, 0x0b, 0xa9, 0x4d, 0x54, 0x99, 0xb0, 0x38, 0x1d, 0x9e, 0x64, 0xfe, 0x56, 0xb7, 0x37, 0x2f,
+	0x40, 0xf0, 0x33, 0x7e, 0x3a, 0x13, 0xd5, 0xfe, 0x9a, 0x47, 0x25, 0x12, 0xa7, 0xc3, 0xf4, 0x4c,
+	0x55, 0x51, 0x21, 0x8e, 0x4e, 0x44, 0x0a, 0x2a, 0x94, 0xaf, 0xd5, 0x8f, 0x64, 0x5a, 0x72, 0x3c,
+	0x2d, 0xb7, 0x2e, 0xd0, 0x20, 0x80, 0xe6, 0x93, 0xe2, 0xa2, 0xf2, 0x09, 0x4b, 0xa3, 0x7e, 0x94,
+	0x46, 0xf5, 0xfc, 0xf5, 0xfc, 0x66, 0x75, 0x7b, 0xe7, 0x72, 0x18, 0x6d, 0x29, 0x45, 0xe2, 0x74,
+	0x7c, 0x46, 0xa7, 0x20, 0x90, 0x9f, 0x49, 0x74, 0x3c, 0x84, 0x00, 0xf2, 0xfc, 0xe4, 0xe8, 0x74,
+	0xaf, 0xee, 0x81, 0xb2, 0x98, 0x97, 0x58, 0xbd, 0xc8, 0x95, 0xbd, 0x7d, 0x29, 0x65, 0x6d, 0x21,
+	0x44, 0xa7, 0xd2, 0x1b, 0x0f, 0xd0, 0xea, 0x82, 0x01, 0x2a, 0x46, 0xf9, 0xcf, 0xd9, 0x99, 0x0c,
+	0x0d, 0x2c, 0xd5, 0x6b, 0xa8, 0xf8, 0x24, 0x3a, 0x3e, 0x15, 0xa1, 0xa9, 0x50, 0xb1, 0xb9, 0x9f,
+	0xbb, 0xa7, 0x68, 0x67, 0xb2, 0x4c, 0xaa, 0x68, 0x25, 0x74, 0x1e, 0x3a, 0xee, 0x81, 0x83, 0xaf,
+	0xa8, 0x08, 0x95, 0x3c, 0x42, 0x7d, 0xd7, 0xc1, 0x8a, 0x5a, 0x43, 0x65, 0xdb, 0x35, 0xf4, 0xc0,
+	0x72, 0x1d, 0x9c, 0x53, 0x31, 0xaa, 0xb9, 0xb4, 0xa5, 0x3b, 0xd6, 0xa7, 0x82, 0x92, 0x57, 0x2b,
+	0xa8, 0x48, 0x3a, 0xc4, 0x09, 0x70, 0x41, 0x5d, 0x47, 0xd5, 0x03, 0x97, 0x3e, 0xec, 0xba, 0xcd,
+	0xae, 0x4e, 0x03, 0x5c, 0x54, 0xaf, 0xa2, 0x55, 0xc3, 0x75, 0xfc, 0xb0, 0x4d, 0x68, 0xb7, 0xe5,
+	0xba, 0x26, 0x2e, 0x01, 0xbb, 0x1b, 0xec, 0x11, 0x8a, 0x57, 0xb4, 0x9f, 0xe5, 0x50, 0x31, 0x48,
+	0x3e, 0x67, 0xf1, 0x37, 0x2b, 0xae, 0x4f, 0xd0, 0xda, 0x28, 0x1a, 0xa7, 0xdd, 0xe4, 0xa8, 0x3b,
+	0x19, 0x31, 0xd6, 0xfb, 0x4c, 0x56, 0xd8, 0xed, 0x0b, 0x60, 0xbc, 0x68, 0x9c, 0xba, 0x47, 0x3e,
+	0x17, 0xa1, 0xb5, 0xd1, 0xdc, 0x4e, 0xed, 0xa0, 0xf5, 0x3e, 0x1b, 0xb1, 0xb8, 0xcf, 0xe2, 0xde,
+	0x59, 0x97, 0xf5, 0x07, 0x8c, 0x9f, 0xc0, 0xea, 0xf6, 0x3b, 0x17, 0x1d, 0xf5, 0xa9, 0x14, 0xe9,
+	0x0f, 0x18, 0x5d, 0xeb, 0x2f, 0xec, 0x21, 0x0d, 0xc7, 0xec, 0xe4, 0x24, 0x92, 0x87, 0x55, 0x6c,
+	0xb4, 0x1f, 0xa2, 0xca, 0xb4, 0xda, 0xa1, 0x64, 0x46, 0xc9, 0x71, 0x34, 0x1e, 0xa6, 0x22, 0x81,
+	0x39, 0x3a, 0xdd, 0xab, 0xaf, 0xa0, 0xca, 0x49, 0x34, 0x88, 0x87, 0xe9, 0x69, 0x5f, 0x64, 0x32,
+	0x47, 0x67, 0x04, 0x00, 0x9f, 0xf4, 0x92, 0xb1, 0x30, 0x35, 0x47, 0xc5, 0x46, 0xfb, 0xd3, 0x55,
+	0x54, 0x9b, 0xf7, 0x54, 0xd5, 0x51, 0x3e, 0x8d, 0x06, 0xb2, 0x75, 0x6d, 0x2d, 0x11, 0xa3, 0x46,
+	0x10, 0x0d, 0x28, 0xc8, 0xaa, 0xfb, 0xa8, 0x14, 0x4d, 0x46, 0xac, 0x97, 0xca, 0x93, 0xb6, 0xbd,
+	0x0c, 0x8a, 0xce, 0x25, 0xa9, 0x44, 0x50, 0x4d, 0x54, 0xe8, 0x45, 0x13, 0x61, 0xf4, 0xda, 0xf6,
+	0xbb, 0xcb, 0x20, 0x19, 0xd1, 0x84, 0x51, 0x2e, 0x0d, 0x28, 0x47, 0xc9, 0xf8, 0x84, 0xc7, 0x75,
+	0x49, 0x94, 0x66, 0x32, 0x3e, 0xa1, 0x5c, 0x1a, 0xfc, 0x1a, 0x40, 0xba, 0xc6, 0xf5, 0xe2, 0xf2,
+	0x7e, 0xb5, 0xb8, 0x24, 0x95, 0x08, 0x60, 0xd1, 0x49, 0x92, 0xf4, 0xeb, 0xa5, 0xe5, 0x2d, 0x6a,
+	0x27, 0x49, 0x9f, 0x72, 0x69, 0xb0, 0x28, 0x3e, 0x3d, 0x79, 0xcc, 0xc6, 0xf5, 0x95, 0xe5, 0x2d,
+	0x72, 0xb8, 0x24, 0x95, 0x08, 0x80, 0x35, 0x62, 0xe3, 0x49, 0x12, 0xd7, 0xcb, 0xcb, 0x63, 0x79,
+	0x5c, 0x92, 0x4a, 0x04, 0x8e, 0x35, 0x4e, 0x46, 0x6c, 0x5c, 0xaf, 0x7c, 0x0d, 0x2c, 0x2e, 0x49,
+	0x25, 0x82, 0xfa, 0x08, 0x55, 0xc7, 0xac, 0x37, 0x1c, 0x8d, 0x93, 0x1e, 0x14, 0x3d, 0xe2, 0x80,
+	0x1f, 0x2c, 0x03, 0x48, 0x67, 0xe2, 0x74, 0x1e, 0x4b, 0x6d, 0xa1, 0x62, 0xca, 0xe2, 0x09, 0xab,
+	0x57, 0x39, 0xe8, 0x9d, 0xa5, 0xaa, 0x1d, 0x04, 0xa9, 0x90, 0x07, 0xa0, 0x27, 0xc9, 0xb0, 0xc7,
+	0xea, 0xb5, 0xe5, 0x81, 0x3a, 0x20, 0x48, 0x85, 0xbc, 0xf6, 0x73, 0x05, 0xe5, 0x83, 0x68, 0xb0,
+	0xd8, 0x6e, 0x57, 0x50, 0x5e, 0x37, 0xf7, 0xb1, 0x22, 0x16, 0x1e, 0xce, 0x89, 0x45, 0x07, 0xe7,
+	0x61, 0x2e, 0x1b, 0xae, 0xb3, 0x8f, 0x0b, 0x40, 0x32, 0x09, 0x34, 0xd5, 0x32, 0x2a, 0x38, 0x6e,
+	0xe8, 0xe0, 0x12, 0x90, 0x9c, 0xb0, 0x8d, 0x57, 0x80, 0xe4, 0x51, 0xd7, 0xc1, 0x65, 0x20, 0x79,
+	0x34, 0xc0, 0x15, 0xe8, 0xb3, 0x5e, 0xe8, 0x18, 0x01, 0x46, 0xf0, 0x6b, 0x87, 0xd0, 0x5d, 0x5c,
+	0x55, 0x8b, 0x48, 0x39, 0xc4, 0x35, 0xf8, 0x4d, 0x6f, 0x36, 0xad, 0x43, 0xbc, 0xaa, 0xb9, 0xa8,
+	0x24, 0x0e, 0xa4, 0xaa, 0xa2, 0x35, 0x1d, 0x6e, 0x08, 0x41, 0x77, 0x66, 0x18, 0xdc, 0x12, 0x08,
+	0x6d, 0x12, 0x23, 0xb0, 0x3a, 0x04, 0x2b, 0xd0, 0xfd, 0xad, 0xf6, 0x1c, 0x25, 0x07, 0x2d, 0xdf,
+	0xa3, 0x6e, 0x8b, 0x12, 0xdf, 0x07, 0x42, 0x5e, 0xfb, 0xb7, 0x82, 0x0a, 0x70, 0x30, 0x81, 0xd7,
+	0xd0, 0x7d, 0xb2, 0x88, 0xa6, 0x1b, 0x46, 0xe8, 0xeb, 0x12, 0x6d, 0x15, 0x55, 0x74, 0x13, 0x2c,
+	0xb3, 0x74, 0x1b, 0xe7, 0xc4, 0xb0, 0x68, 0x7b, 0x36, 0x69, 0x13, 0x87, 0x73, 0xe4, 0x61, 0x0e,
+	0x99, 0x82, 0xbb, 0x00, 0x73, 0xa8, 0x45, 0x1c, 0x8b, 0xef, 0x8a, 0xdc, 0x12, 0xc7, 0x0f, 0x68,
+	0x08, 0xcc, 0xba, 0x8d, 0x4b, 0xb3, 0x39, 0xd5, 0x21, 0x78, 0x05, 0x74, 0x39, 0x6e, 0xdb, 0x72,
+	0xc4, 0xbe, 0x0c, 0xf1, 0x76, 0x77, 0x6d, 0xeb, 0x93, 0x90, 0xe0, 0x0a, 0x28, 0xf6, 0x74, 0x1a,
+	0x08, 0x2c, 0x04, 0x8a, 0x3d, 0x4a, 0x3c, 0xd7, 0xb7, 0x60, 0xa4, 0xe9, 0x36, 0xae, 0x42, 0x30,
+	0x28, 0x69, 0xda, 0xe4, 0xd0, 0xea, 0x90, 0x2e, 0xb8, 0x81, 0x6b, 0xc0, 0x46, 0x89, 0xcd, 0x01,
+	0x05, 0x69, 0x15, 0x74, 0x76, 0x32, 0x9d, 0x6b, 0xda, 0x17, 0x0a, 0x2a, 0x40, 0x37, 0x01, 0xe3,
+	0x9a, 0x2e, 0x6d, 0xcf, 0xb9, 0x5e, 0x43, 0x65, 0xdd, 0x04, 0x83, 0x74, 0x5b, 0x3a, 0x1e, 0x1e,
+	0x5a, 0xb6, 0xa5, 0xd3, 0x47, 0x38, 0x07, 0xca, 0xe6, 0x1c, 0xff, 0x94, 0x50, 0x9c, 0xe7, 0x10,
+	0x96, 0xa3, 0xdb, 0x5d, 0xe2, 0x98, 0x96, 0xd3, 0xc2, 0x05, 0x88, 0x45, 0x8b, 0xd0, 0xd0, 0x31,
+	0x71, 0x11, 0xd6, 0x94, 0xe8, 0xb6, 0xe5, 0x0b, 0xbf, 0x2d, 0x2a, 0x77, 0x2b, 0x90, 0x5a, 0x7f,
+	0xcf, 0xa5, 0x01, 0x2e, 0x43, 0xda, 0x6d, 0xd7, 0x69, 0x89, 0x5a, 0x70, 0xa9, 0x49, 0x28, 0x46,
+	0xc0, 0x2d, 0xaf, 0x81, 0x06, 0xae, 0x6a, 0x04, 0x95, 0x44, 0xdb, 0x02, 0x1b, 0x5a, 0xc4, 0x31,
+	0x09, 0x5d, 0x34, 0xba, 0x49, 0xda, 0x96, 0x63, 0x39, 0x32, 0x5b, 0x6d, 0xdd, 0x37, 0x42, 0x1b,
+	0xb6, 0x39, 0x30, 0xc1, 0x21, 0x61, 0x00, 0xc6, 0x6a, 0x3f, 0x41, 0x05, 0xe8, 0x59, 0x60, 0x74,
+	0xdb, 0x75, 0xcd, 0x39, 0x88, 0x6b, 0x08, 0x1b, 0xae, 0x63, 0xca, 0xc0, 0x76, 0xe1, 0x57, 0xac,
+	0x40, 0x72, 0x78, 0x19, 0xe9, 0xb2, 0x88, 0x60, 0xef, 0x98, 0x96, 0x0c, 0x64, 0x1e, 0x22, 0x6d,
+	0x39, 0x01, 0xa1, 0xd4, 0x6d, 0x65, 0xd9, 0xaf, 0xa2, 0x95, 0xfd, 0x50, 0xd4, 0x58, 0x11, 0x8a,
+	0xce, 0x0f, 0x77, 0xf7, 0xa1, 0xbc, 0x81, 0x50, 0xd2, 0x3e, 0x46, 0x25, 0xd1, 0xec, 0xc0, 0x0f,
+	0x27, 0x6c, 0xef, 0x9e, 0xf7, 0xc3, 0xb7, 0x9c, 0x56, 0x68, 0xeb, 0x14, 0x2b, 0xfc, 0x6e, 0x63,
+	0x87, 0x94, 0x97, 0x5c, 0x19, 0x15, 0xcc, 0x50, 0xb7, 0x71, 0x5e, 0x0b, 0x50, 0x49, 0xb4, 0x38,
+	0x40, 0x10, 0x77, 0x9f, 0x39, 0x84, 0x0a, 0x2a, 0x36, 0x2d, 0xea, 0x07, 0x42, 0xdc, 0x27, 0xe0,
+	0x13, 0xce, 0x01, 0x39, 0xd8, 0xb3, 0xa8, 0x89, 0xf3, 0xe0, 0xe8, 0xac, 0x60, 0xe4, 0xdd, 0xa9,
+	0xa0, 0xdd, 0x43, 0x25, 0xd1, 0xec, 0x38, 0x2a, 0x75, 0xbd, 0x05, 0xbb, 0xc0, 0x12, 0x4e, 0x13,
+	0x21, 0x71, 0xdc, 0xa0, 0x2b, 0xf7, 0x39, 0x6d, 0x1f, 0x55, 0xe7, 0xba, 0x9a, 0xfa, 0x32, 0x7a,
+	0x81, 0x12, 0xc3, 0xf2, 0xa8, 0x6b, 0x58, 0xc1, 0xa3, 0xc5, 0x33, 0x95, 0xfd, 0xc0, 0x4b, 0x0b,
+	0xfc, 0x77, 0x9d, 0xee, 0x1c, 0x2d, 0xa7, 0x4d, 0x50, 0x91, 0x37, 0x33, 0x88, 0x6b, 0x40, 0x9c,
+	0x85, 0x33, 0xf9, 0x22, 0xba, 0x3a, 0x9f, 0x20, 0xfe, 0xb3, 0xf0, 0xb2, 0x19, 0x06, 0x21, 0x25,
+	0x22, 0x48, 0x9e, 0xee, 0x07, 0x38, 0x0f, 0x49, 0xf0, 0x28, 0xf1, 0xc5, 0x65, 0x6f, 0x15, 0x55,
+	0xa6, 0xbd, 0x00, 0x17, 0xc5, 0x07, 0x45, 0x98, 0xed, 0x4b, 0xda, 0x2e, 0x2a, 0xf2, 0xc6, 0x07,
+	0x4a, 0x3b, 0xae, 0x65, 0x90, 0x45, 0xc7, 0x75, 0x63, 0xd6, 0x04, 0x0c, 0x3d, 0xeb, 0x09, 0x39,
+	0xae, 0x42, 0xcf, 0x7a, 0xc9, 0xbf, 0x56, 0xd0, 0xda, 0xe2, 0x8d, 0x4a, 0xdd, 0x44, 0xf8, 0x33,
+	0x16, 0xf5, 0xbb, 0x29, 0xdc, 0x1b, 0xbb, 0xc3, 0xb8, 0xcf, 0x9e, 0xf2, 0xab, 0x4c, 0x91, 0xae,
+	0x01, 0x9d, 0x5f, 0x27, 0x2d, 0xa0, 0xaa, 0x16, 0x2a, 0x1e, 0x47, 0x8f, 0xd9, 0xb1, 0xbc, 0xa3,
+	0xec, 0x2c, 0x75, 0x73, 0x6b, 0xd8, 0x20, 0x4a, 0x05, 0x82, 0xf6, 0x8f, 0x12, 0x2a, 0x72, 0xc2,
+	0x57, 0x6e, 0xc9, 0xfa, 0xee, 0x2e, 0x25, 0x1d, 0xac, 0xf0, 0x96, 0x0a, 0x87, 0x58, 0x54, 0x85,
+	0x6e, 0x76, 0x0c, 0x5b, 0xf4, 0x2f, 0xdd, 0xec, 0xb4, 0x5d, 0x13, 0x17, 0x20, 0x8c, 0x3a, 0xac,
+	0x8a, 0x9c, 0xc1, 0xf3, 0x5c, 0x38, 0xbc, 0x40, 0x0c, 0x02, 0x8a, 0x57, 0x78, 0xc7, 0x0f, 0x0f,
+	0x45, 0xa7, 0xd2, 0xc3, 0x43, 0x08, 0x02, 0xae, 0xa8, 0x25, 0x94, 0x33, 0x0c, 0x8c, 0x40, 0xc4,
+	0xe0, 0xf0, 0xd5, 0xe9, 0x44, 0xe0, 0x6d, 0xdc, 0x80, 0x73, 0x80, 0x57, 0x79, 0x14, 0x61, 0xc9,
+	0xc5, 0xd6, 0xc4, 0xac, 0xf0, 0xf0, 0x7a, 0x36, 0x34, 0x30, 0x30, 0x98, 0x96, 0x6f, 0xb8, 0x21,
+	0xf5, 0x09, 0xbe, 0xca, 0x0b, 0xdf, 0xdd, 0xdd, 0xc7, 0x2a, 0xac, 0xc8, 0xa1, 0x67, 0xe3, 0x17,
+	0x78, 0x83, 0x75, 0x89, 0x7f, 0x60, 0x05, 0x7b, 0xf8, 0x1a, 0xd0, 0x2d, 0xe0, 0x78, 0x11, 0x56,
+	0x6d, 0x9d, 0x3e, 0xc4, 0x2f, 0x01, 0x5a, 0xfb, 0x80, 0xe0, 0x97, 0xc5, 0xa2, 0x83, 0xeb, 0x7c,
+	0x02, 0x91, 0x16, 0xfe, 0x1f, 0x30, 0xd4, 0x71, 0xf0, 0x06, 0x80, 0x38, 0x9e, 0xf4, 0xf9, 0x7f,
+	0xc1, 0x42, 0x87, 0x5b, 0xf8, 0x0a, 0x18, 0xe0, 0x4c, 0x2d, 0x7c, 0x35, 0x1b, 0x5d, 0xff, 0xc7,
+	0xfb, 0x08, 0x3f, 0xb0, 0xf8, 0x35, 0x18, 0x4f, 0x1e, 0xbe, 0x2e, 0xdb, 0xb3, 0x1e, 0xe8, 0x87,
+	0x96, 0x8f, 0x5f, 0x17, 0x25, 0x41, 0x03, 0x40, 0xd4, 0xf8, 0x58, 0xe3, 0x81, 0x78, 0x83, 0xd7,
+	0x25, 0x58, 0xf8, 0xa6, 0x58, 0xf9, 0x3e, 0xbe, 0xc1, 0x79, 0x5d, 0x3f, 0x00, 0x9b, 0xfe, 0x5f,
+	0x96, 0x2b, 0xe7, 0xbe, 0x39, 0xdd, 0x38, 0xfb, 0x78, 0x53, 0x9c, 0x3c, 0x02, 0x91, 0x79, 0x4b,
+	0xcc, 0x4e, 0xd2, 0xc4, 0xb7, 0xe4, 0xca, 0xc3, 0xb7, 0xb9, 0x16, 0xea, 0x3a, 0x36, 0x7e, 0x3b,
+	0x1b, 0xa8, 0xef, 0x80, 0x87, 0x9e, 0x8f, 0x1b, 0xe0, 0xe1, 0x27, 0xa1, 0xee, 0x70, 0x7b, 0xb6,
+	0x80, 0x93, 0x1a, 0xb0, 0x7c, 0x17, 0x7e, 0xe0, 0x4b, 0x4a, 0x6c, 0x7c, 0x87, 0xff, 0x60, 0x52,
+	0xd7, 0xc3, 0xdb, 0x00, 0x01, 0x0a, 0x76, 0xc0, 0x06, 0x4a, 0xda, 0x8e, 0xee, 0x04, 0xf8, 0x3d,
+	0x71, 0x72, 0xc1, 0x4f, 0xc7, 0x0c, 0xdb, 0xf8, 0x7d, 0xd0, 0x4e, 0x5d, 0x37, 0xc0, 0x77, 0x61,
+	0xe5, 0x43, 0x70, 0x3e, 0xe0, 0xab, 0xb0, 0xd9, 0xc4, 0xf7, 0x60, 0xc5, 0x35, 0x7e, 0x8f, 0x37,
+	0x1d, 0xd7, 0xb3, 0x0c, 0x7c, 0x9f, 0x0f, 0x76, 0x20, 0x3e, 0x58, 0x18, 0x44, 0x1f, 0x02, 0xcb,
+	0x21, 0x77, 0xfb, 0xfb, 0xbc, 0x5d, 0x85, 0x7c, 0xd6, 0x7f, 0xc4, 0x25, 0xad, 0xc0, 0x26, 0xf8,
+	0x07, 0x62, 0x1e, 0x75, 0xbc, 0x3d, 0x90, 0xfe, 0x58, 0x96, 0x1c, 0x1c, 0x43, 0xac, 0xf3, 0xea,
+	0x0c, 0x0f, 0x3b, 0x1d, 0xbc, 0x0b, 0x4b, 0x93, 0x6b, 0x35, 0x80, 0xa5, 0xe9, 0x52, 0x62, 0xb5,
+	0x1c, 0x6c, 0x42, 0x28, 0x1e, 0x1e, 0x60, 0xc2, 0x27, 0x8c, 0xe5, 0x07, 0xb8, 0x29, 0xee, 0x24,
+	0x6d, 0x03, 0xb7, 0x78, 0x01, 0xb8, 0x6d, 0x51, 0x97, 0x7b, 0x30, 0x11, 0xb2, 0x1d, 0x4f, 0xbc,
+	0xc5, 0x39, 0xc3, 0xb6, 0x81, 0xf7, 0x21, 0x2c, 0x86, 0xeb, 0xe1, 0x87, 0x10, 0x09, 0xd3, 0xf2,
+	0xf9, 0xf0, 0x26, 0x26, 0xb6, 0xb5, 0x3f, 0x2b, 0x68, 0x75, 0xe1, 0x5b, 0xf7, 0x9b, 0x7d, 0x1f,
+	0x92, 0x85, 0x57, 0x81, 0x3b, 0xcb, 0x7c, 0x64, 0xcf, 0x3d, 0x0e, 0x68, 0xef, 0xca, 0x0f, 0x65,
+	0x8c, 0x6a, 0xf2, 0x3d, 0xe5, 0x59, 0x7d, 0x1c, 0xa1, 0x92, 0xe1, 0xb6, 0xdb, 0xf0, 0xad, 0xac,
+	0xb5, 0x50, 0x39, 0x33, 0x45, 0xad, 0xcf, 0xde, 0x7b, 0xc4, 0x67, 0xf9, 0xf4, 0xb5, 0xe7, 0x75,
+	0x54, 0x7b, 0xcc, 0x06, 0xc3, 0xb8, 0x9b, 0x1c, 0x1d, 0x4d, 0x98, 0xf8, 0xa4, 0x2a, 0xd2, 0x2a,
+	0xa7, 0xb9, 0x9c, 0xa4, 0xfd, 0x5e, 0x41, 0x2f, 0xeb, 0x71, 0x74, 0x7c, 0xf6, 0x63, 0x36, 0x7b,
+	0x16, 0x61, 0x3f, 0x3a, 0x65, 0x93, 0x54, 0x35, 0x50, 0xb9, 0x2f, 0xdf, 0x97, 0x2e, 0x19, 0x9e,
+	0xec, 0x39, 0x8a, 0x4e, 0x05, 0x55, 0x0f, 0xad, 0xb2, 0xb8, 0x97, 0xf4, 0x87, 0xf1, 0xa0, 0x3b,
+	0x17, 0xab, 0xdb, 0x17, 0xc6, 0x4a, 0xc8, 0xf0, 0x28, 0xd5, 0xd8, 0xdc, 0x4e, 0xfb, 0x9b, 0x82,
+	0xea, 0x5f, 0x35, 0x79, 0x32, 0x4a, 0x60, 0x0e, 0x1d, 0x20, 0x35, 0x53, 0xdd, 0x9d, 0xbd, 0x0b,
+	0x29, 0x4b, 0xbe, 0x0b, 0x5d, 0xcd, 0x30, 0x16, 0x3e, 0x9e, 0xa7, 0xef, 0x61, 0xb9, 0xc5, 0xf7,
+	0x30, 0x95, 0x88, 0x37, 0x28, 0x16, 0xf7, 0xd8, 0x44, 0xbe, 0xee, 0xdc, 0xbc, 0x84, 0x2e, 0xe0,
+	0xa7, 0x33, 0x49, 0xb8, 0xe4, 0xbd, 0x24, 0x1d, 0xe3, 0xa5, 0x32, 0x64, 0x93, 0xef, 0x78, 0x2a,
+	0x9e, 0x4e, 0x8b, 0x67, 0x66, 0xb0, 0x4c, 0x84, 0x8e, 0xca, 0x4c, 0xd2, 0xea, 0x0a, 0x0f, 0xc9,
+	0x8d, 0x4b, 0x1d, 0x0f, 0x3a, 0x15, 0x7b, 0x5e, 0xc8, 0xb5, 0xdf, 0x29, 0xe8, 0x5a, 0x56, 0x04,
+	0x67, 0x71, 0x1a, 0x3d, 0xfd, 0x8e, 0x47, 0xea, 0x8f, 0x0a, 0x7a, 0xf1, 0x9c, 0xbd, 0x32, 0x50,
+	0x0b, 0xc5, 0xa3, 0x7c, 0xdd, 0xe2, 0x51, 0x3f, 0x44, 0x25, 0x7e, 0x71, 0x99, 0xd4, 0x73, 0x1c,
+	0xe3, 0xcd, 0x8b, 0x3a, 0x19, 0x30, 0x53, 0x29, 0xb3, 0x10, 0xea, 0xfc, 0xb9, 0x50, 0xff, 0x3a,
+	0x8f, 0x5e, 0xd0, 0xc5, 0xa3, 0x35, 0x83, 0xa6, 0xf3, 0xad, 0x46, 0xfa, 0x00, 0x95, 0x8f, 0x58,
+	0x94, 0x9e, 0x8e, 0xd9, 0x44, 0xbe, 0xad, 0x3d, 0xb8, 0x00, 0xe4, 0x19, 0xa6, 0x34, 0x9a, 0x12,
+	0x82, 0x4e, 0xc1, 0xbe, 0x9a, 0xc2, 0xfc, 0x37, 0x4c, 0xe1, 0xc6, 0xaf, 0x14, 0x54, 0xce, 0x14,
+	0xa9, 0x37, 0xd0, 0x1a, 0x7b, 0x9a, 0x8e, 0xa3, 0x5e, 0xda, 0x9d, 0xf0, 0x7c, 0xf2, 0x10, 0x94,
+	0xe9, 0xaa, 0xa4, 0x8a, 0x24, 0xab, 0x6f, 0x21, 0x9c, 0xb1, 0x4d, 0x4f, 0x43, 0x8e, 0x33, 0xae,
+	0x4b, 0x7a, 0x76, 0x70, 0xd4, 0x0f, 0xd1, 0x46, 0xc6, 0xfa, 0x8c, 0x0e, 0x96, 0xe7, 0x42, 0x75,
+	0xc9, 0x61, 0x9e, 0x6f, 0x4f, 0xda, 0x97, 0x39, 0x38, 0x0f, 0xf3, 0x91, 0xf9, 0x2e, 0x95, 0xd7,
+	0x7c, 0x33, 0xc8, 0x7f, 0xbd, 0x66, 0xf0, 0xec, 0xc6, 0x5e, 0xf8, 0x76, 0x1b, 0x7b, 0x71, 0xb1,
+	0xf4, 0x6f, 0xdd, 0x43, 0xb5, 0xf9, 0x82, 0x10, 0x77, 0x11, 0x87, 0xe0, 0x2b, 0xb0, 0x0a, 0x83,
+	0xe6, 0x3d, 0x71, 0x3d, 0x0f, 0x83, 0xe6, 0x9d, 0xbb, 0xe2, 0x7a, 0x1e, 0x06, 0xcd, 0x9d, 0x6d,
+	0x9c, 0xdf, 0xfe, 0x4b, 0x11, 0xad, 0xdb, 0x12, 0xc6, 0x17, 0xff, 0xff, 0xa8, 0x7f, 0x50, 0x10,
+	0x3e, 0x3f, 0xb8, 0xd4, 0xbb, 0x17, 0x96, 0xfb, 0x33, 0x87, 0xf3, 0xc6, 0x07, 0x4b, 0xcb, 0x89,
+	0x82, 0xd0, 0x1a, 0x3f, 0xfd, 0xf2, 0xef, 0xbf, 0xcc, 0x6d, 0x6a, 0x6f, 0x4c, 0xff, 0x00, 0xcb,
+	0x62, 0x32, 0xb9, 0x1f, 0x9d, 0x13, 0xba, 0xaf, 0xdc, 0x52, 0xbf, 0x50, 0xd0, 0xfa, 0xb9, 0x26,
+	0xaf, 0xbe, 0x7f, 0x39, 0xe5, 0xe7, 0xa6, 0xd8, 0xc6, 0xdd, 0x65, 0xc5, 0xa4, 0xc9, 0xef, 0x70,
+	0x93, 0x6f, 0x6a, 0xda, 0x7f, 0x37, 0x39, 0x93, 0x01, 0x8b, 0x7f, 0xab, 0xa0, 0xd5, 0x85, 0x5e,
+	0xab, 0xee, 0x5c, 0x32, 0x58, 0xf3, 0x93, 0x64, 0xe3, 0xbd, 0xe5, 0x84, 0xa4, 0xad, 0xb7, 0xb9,
+	0xad, 0x37, 0xb4, 0xeb, 0xcf, 0x09, 0x2f, 0x97, 0x00, 0x4b, 0x7f, 0xa3, 0xa0, 0xda, 0xfc, 0xa9,
+	0x55, 0xb7, 0x97, 0x6f, 0x7e, 0x1b, 0x3b, 0x4b, 0xc9, 0x48, 0x33, 0x6f, 0x71, 0x33, 0xdf, 0xd4,
+	0x5e, 0x7b, 0xa6, 0x99, 0x33, 0x81, 0xfb, 0xca, 0xad, 0xdd, 0x07, 0xe8, 0xf5, 0x5e, 0x72, 0xf2,
+	0x7c, 0x2d, 0xbb, 0xd7, 0xce, 0x55, 0xbb, 0x37, 0x4e, 0xd2, 0xc4, 0x53, 0x1e, 0x97, 0xf8, 0x7f,
+	0x9c, 0x3b, 0xff, 0x09, 0x00, 0x00, 0xff, 0xff, 0x57, 0xaf, 0x92, 0x3b, 0xa4, 0x1d, 0x00, 0x00,
 }
diff --git a/googleapis/cloud/language/v1beta1/language_service.proto b/googleapis/cloud/language/v1beta1/language_service.proto
index d753c3b49b4a1b30563e3a33efc90aca90672651..40b65c7f8d22582498a8d6178ae525540841d6f3 100644
--- a/googleapis/cloud/language/v1beta1/language_service.proto
+++ b/googleapis/cloud/language/v1beta1/language_service.proto
@@ -22,7 +22,6 @@ option java_multiple_files = true;
 option java_outer_classname = "LanguageServiceProto";
 option java_package = "com.google.cloud.language.v1beta1";
 
-option go_package = "google.golang.org/genproto/googleapis/cloud/language/v1beta1";
 
 // Provides text analysis operations such as sentiment analysis and entity
 // recognition.
@@ -38,10 +37,15 @@ service LanguageService {
     option (google.api.http) = { post: "/v1beta1/documents:analyzeEntities" body: "*" };
   }
 
-  // Advanced API that analyzes the document and provides a full set of text
-  // annotations, including semantic, syntactic, and sentiment information. This
-  // API is intended for users who are familiar with machine learning and need
-  // in-depth text features to build upon.
+  // Analyzes the syntax of the text and provides sentence boundaries and
+  // tokenization along with part of speech tags, dependency trees, and other
+  // properties.
+  rpc AnalyzeSyntax(AnalyzeSyntaxRequest) returns (AnalyzeSyntaxResponse) {
+    option (google.api.http) = { post: "/v1beta1/documents:analyzeSyntax" body: "*" };
+  }
+
+  // A convenience method that provides all the features that analyzeSentiment,
+  // analyzeEntities, and analyzeSyntax provide in one call.
   rpc AnnotateText(AnnotateTextRequest) returns (AnnotateTextResponse) {
     option (google.api.http) = { post: "/v1beta1/documents:annotateText" body: "*" };
   }
@@ -74,6 +78,9 @@ message Document {
     string content = 2;
 
     // The Google Cloud Storage URI where the file content is located.
+    // This URI must be of the form: gs://bucket_name/object_name. For more
+    // details, see https://cloud.google.com/storage/docs/reference-uris.
+    // NOTE: Cloud Storage object versioning is not supported.
     string gcs_content_uri = 3;
   }
 
@@ -95,6 +102,11 @@ message Document {
 message Sentence {
   // The sentence text.
   TextSpan text = 1;
+
+  // For calls to [AnalyzeSentiment][] or if
+  // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta1.AnnotateTextRequest.Features.extract_document_sentiment] is set to
+  // true, this field will contain the sentiment for the sentence.
+  Sentiment sentiment = 2;
 }
 
 // Represents a phrase in the text that is a known entity, such as
@@ -136,8 +148,8 @@ message Entity {
 
   // Metadata associated with the entity.
   //
-  // Currently, only Wikipedia URLs are provided, if available.
-  // The associated key is "wikipedia_url".
+  // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if
+  // available. The associated keys are "wikipedia_url" and "mid", respectively.
   map<string, string> metadata = 3;
 
   // The salience score associated with the entity in the [0, 1.0] range.
@@ -164,22 +176,26 @@ message Token {
   // Dependency tree parse for this token.
   DependencyEdge dependency_edge = 3;
 
-  // [Lemma](https://en.wikipedia.org/wiki/Lemma_(morphology))
-  // of the token.
+  // [Lemma](https://en.wikipedia.org/wiki/Lemma_%28morphology%29) of the token.
   string lemma = 4;
 }
 
 // Represents the feeling associated with the entire text or entities in
 // the text.
 message Sentiment {
-  // Polarity of the sentiment in the [-1.0, 1.0] range. Larger numbers
-  // represent more positive sentiments.
+  // DEPRECATED FIELD - This field is being deprecated in
+  // favor of score. Please refer to our documentation at
+  // https://cloud.google.com/natural-language/docs for more information.
   float polarity = 1;
 
   // A non-negative number in the [0, +inf) range, which represents
-  // the absolute magnitude of sentiment regardless of polarity (positive or
+  // the absolute magnitude of sentiment regardless of score (positive or
   // negative).
   float magnitude = 2;
+
+  // Sentiment score between -1.0 (negative sentiment) and 1.0
+  // (positive sentiment.)
+  float score = 3;
 }
 
 // Represents part of speech information for a token.
@@ -229,8 +245,285 @@ message PartOfSpeech {
     AFFIX = 13;
   }
 
+  // The characteristic of a verb that expresses time flow during an event.
+  enum Aspect {
+    // Aspect is not applicable in the analyzed language or is not predicted.
+    ASPECT_UNKNOWN = 0;
+
+    // Perfective
+    PERFECTIVE = 1;
+
+    // Imperfective
+    IMPERFECTIVE = 2;
+
+    // Progressive
+    PROGRESSIVE = 3;
+  }
+
+  // The grammatical function performed by a noun or pronoun in a phrase,
+  // clause, or sentence. In some languages, other parts of speech, such as
+  // adjective and determiner, take case inflection in agreement with the noun.
+  enum Case {
+    // Case is not applicable in the analyzed language or is not predicted.
+    CASE_UNKNOWN = 0;
+
+    // Accusative
+    ACCUSATIVE = 1;
+
+    // Adverbial
+    ADVERBIAL = 2;
+
+    // Complementive
+    COMPLEMENTIVE = 3;
+
+    // Dative
+    DATIVE = 4;
+
+    // Genitive
+    GENITIVE = 5;
+
+    // Instrumental
+    INSTRUMENTAL = 6;
+
+    // Locative
+    LOCATIVE = 7;
+
+    // Nominative
+    NOMINATIVE = 8;
+
+    // Oblique
+    OBLIQUE = 9;
+
+    // Partitive
+    PARTITIVE = 10;
+
+    // Prepositional
+    PREPOSITIONAL = 11;
+
+    // Reflexive
+    REFLEXIVE_CASE = 12;
+
+    // Relative
+    RELATIVE_CASE = 13;
+
+    // Vocative
+    VOCATIVE = 14;
+  }
+
+  // Depending on the language, Form can be categorizing different forms of
+  // verbs, adjectives, adverbs, etc. For example, categorizing inflected
+  // endings of verbs and adjectives or distinguishing between short and long
+  // forms of adjectives and participles
+  enum Form {
+    // Form is not applicable in the analyzed language or is not predicted.
+    FORM_UNKNOWN = 0;
+
+    // Adnomial
+    ADNOMIAL = 1;
+
+    // Auxiliary
+    AUXILIARY = 2;
+
+    // Complementizer
+    COMPLEMENTIZER = 3;
+
+    // Final ending
+    FINAL_ENDING = 4;
+
+    // Gerund
+    GERUND = 5;
+
+    // Realis
+    REALIS = 6;
+
+    // Irrealis
+    IRREALIS = 7;
+
+    // Short form
+    SHORT = 8;
+
+    // Long form
+    LONG = 9;
+
+    // Order form
+    ORDER = 10;
+
+    // Specific form
+    SPECIFIC = 11;
+  }
+
+  // Gender classes of nouns reflected in the behaviour of associated words.
+  enum Gender {
+    // Gender is not applicable in the analyzed language or is not predicted.
+    GENDER_UNKNOWN = 0;
+
+    // Feminine
+    FEMININE = 1;
+
+    // Masculine
+    MASCULINE = 2;
+
+    // Neuter
+    NEUTER = 3;
+  }
+
+  // The grammatical feature of verbs, used for showing modality and attitude.
+  enum Mood {
+    // Mood is not applicable in the analyzed language or is not predicted.
+    MOOD_UNKNOWN = 0;
+
+    // Conditional
+    CONDITIONAL_MOOD = 1;
+
+    // Imperative
+    IMPERATIVE = 2;
+
+    // Indicative
+    INDICATIVE = 3;
+
+    // Interrogative
+    INTERROGATIVE = 4;
+
+    // Jussive
+    JUSSIVE = 5;
+
+    // Subjunctive
+    SUBJUNCTIVE = 6;
+  }
+
+  // Count distinctions.
+  enum Number {
+    // Number is not applicable in the analyzed language or is not predicted.
+    NUMBER_UNKNOWN = 0;
+
+    // Singular
+    SINGULAR = 1;
+
+    // Plural
+    PLURAL = 2;
+
+    // Dual
+    DUAL = 3;
+  }
+
+  // The distinction between the speaker, second person, third person, etc.
+  enum Person {
+    // Person is not applicable in the analyzed language or is not predicted.
+    PERSON_UNKNOWN = 0;
+
+    // First
+    FIRST = 1;
+
+    // Second
+    SECOND = 2;
+
+    // Third
+    THIRD = 3;
+
+    // Reflexive
+    REFLEXIVE_PERSON = 4;
+  }
+
+  // This category shows if the token is part of a proper name.
+  enum Proper {
+    // Proper is not applicable in the analyzed language or is not predicted.
+    PROPER_UNKNOWN = 0;
+
+    // Proper
+    PROPER = 1;
+
+    // Not proper
+    NOT_PROPER = 2;
+  }
+
+  // Reciprocal features of a pronoun.
+  enum Reciprocity {
+    // Reciprocity is not applicable in the analyzed language or is not
+    // predicted.
+    RECIPROCITY_UNKNOWN = 0;
+
+    // Reciprocal
+    RECIPROCAL = 1;
+
+    // Non-reciprocal
+    NON_RECIPROCAL = 2;
+  }
+
+  // Time reference.
+  enum Tense {
+    // Tense is not applicable in the analyzed language or is not predicted.
+    TENSE_UNKNOWN = 0;
+
+    // Conditional
+    CONDITIONAL_TENSE = 1;
+
+    // Future
+    FUTURE = 2;
+
+    // Past
+    PAST = 3;
+
+    // Present
+    PRESENT = 4;
+
+    // Imperfect
+    IMPERFECT = 5;
+
+    // Pluperfect
+    PLUPERFECT = 6;
+  }
+
+  // The relationship between the action that a verb expresses and the
+  // participants identified by its arguments.
+  enum Voice {
+    // Voice is not applicable in the analyzed language or is not predicted.
+    VOICE_UNKNOWN = 0;
+
+    // Active
+    ACTIVE = 1;
+
+    // Causative
+    CAUSATIVE = 2;
+
+    // Passive
+    PASSIVE = 3;
+  }
+
   // The part of speech tag.
   Tag tag = 1;
+
+  // The grammatical aspect.
+  Aspect aspect = 2;
+
+  // The grammatical case.
+  Case case = 3;
+
+  // The grammatical form.
+  Form form = 4;
+
+  // The grammatical gender.
+  Gender gender = 5;
+
+  // The grammatical mood.
+  Mood mood = 6;
+
+  // The grammatical number.
+  Number number = 7;
+
+  // The grammatical person.
+  Person person = 8;
+
+  // The grammatical properness.
+  Proper proper = 9;
+
+  // The grammatical reciprocity.
+  Reciprocity reciprocity = 10;
+
+  // The grammatical tense.
+  Tense tense = 11;
+
+  // The grammatical voice.
+  Voice voice = 12;
 }
 
 // Represents dependency parse tree information for a token.
@@ -483,8 +776,23 @@ message DependencyEdge {
 // Represents a mention for an entity in the text. Currently, proper noun
 // mentions are supported.
 message EntityMention {
+  // The supported types of mentions.
+  enum Type {
+    // Unknown
+    TYPE_UNKNOWN = 0;
+
+    // Proper name
+    PROPER = 1;
+
+    // Common noun (or noun compound)
+    COMMON = 2;
+  }
+
   // The mention text.
   TextSpan text = 1;
+
+  // The type of the entity mention.
+  Type type = 2;
 }
 
 // Represents an output piece of text.
@@ -502,6 +810,10 @@ message AnalyzeSentimentRequest {
   // Input document. Currently, `analyzeSentiment` only supports English text
   // ([Document.language][google.cloud.language.v1beta1.Document.language]="EN").
   Document document = 1;
+
+  // The encoding type used by the API to calculate sentence offsets for the
+  // sentence sentiment.
+  EncodingType encoding_type = 2;
 }
 
 // The sentiment analysis response message.
@@ -512,6 +824,9 @@ message AnalyzeSentimentResponse {
   // The language of the text, which will be the same as the language specified
   // in the request or, if not specified, the automatically-detected language.
   string language = 2;
+
+  // The sentiment for all the sentences in the document.
+  repeated Sentence sentences = 3;
 }
 
 // The entity analysis request message.
@@ -530,11 +845,35 @@ message AnalyzeEntitiesResponse {
 
   // The language of the text, which will be the same as the language specified
   // in the request or, if not specified, the automatically-detected language.
+  // See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
   string language = 2;
 }
 
-// The request message for the advanced text annotation API, which performs all
-// the above plus syntactic analysis.
+// The syntax analysis request message.
+message AnalyzeSyntaxRequest {
+  // Input document.
+  Document document = 1;
+
+  // The encoding type used by the API to calculate offsets.
+  EncodingType encoding_type = 2;
+}
+
+// The syntax analysis response message.
+message AnalyzeSyntaxResponse {
+  // Sentences in the input document.
+  repeated Sentence sentences = 1;
+
+  // Tokens, along with their syntactic information, in the input document.
+  repeated Token tokens = 2;
+
+  // The language of the text, which will be the same as the language specified
+  // in the request or, if not specified, the automatically-detected language.
+  // See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
+  string language = 3;
+}
+
+// The request message for the text annotation API, which can perform multiple
+// analysis types (sentiment, entities, and syntax) in one call.
 message AnnotateTextRequest {
   // All available features for sentiment, syntax, and semantic analysis.
   // Setting each one to true will enable that specific analysis for the input.
@@ -581,6 +920,7 @@ message AnnotateTextResponse {
 
   // The language of the text, which will be the same as the language specified
   // in the request or, if not specified, the automatically-detected language.
+  // See [Document.language][google.cloud.language.v1beta1.Document.language] field for more details.
   string language = 5;
 }
 
diff --git a/googleapis/tracing/trace.pb.go b/googleapis/tracing/trace.pb.go
new file mode 100644
index 0000000000000000000000000000000000000000..18183ca852949f5a6336231c4bd4d2f0d5379739
--- /dev/null
+++ b/googleapis/tracing/trace.pb.go
@@ -0,0 +1,753 @@
+// Code generated by protoc-gen-go.
+// source: google.golang.org/genproto/googleapis/tracing/trace.proto
+// DO NOT EDIT!
+
+/*
+Package google_tracing_v1 is a generated protocol buffer package.
+
+It is generated from these files:
+	google.golang.org/genproto/googleapis/tracing/trace.proto
+
+It has these top-level messages:
+	TraceId
+	Module
+	StackTrace
+	LabelValue
+	Span
+	Trace
+*/
+package google_tracing_v1 // import "google.golang.org/genproto/googleapis/tracing"
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+import _ "google.golang.org/genproto/googleapis/api/serviceconfig"
+import google_protobuf1 "github.com/golang/protobuf/ptypes/timestamp"
+import google_rpc "google.golang.org/genproto/googleapis/rpc/status"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+// The type of the network event. SENT or RECV event.
+type Span_TimeEvent_NetworkEvent_Type int32
+
+const (
+	Span_TimeEvent_NetworkEvent_UNSPECIFIED Span_TimeEvent_NetworkEvent_Type = 0
+	Span_TimeEvent_NetworkEvent_SENT        Span_TimeEvent_NetworkEvent_Type = 1
+	Span_TimeEvent_NetworkEvent_RECV        Span_TimeEvent_NetworkEvent_Type = 2
+)
+
+var Span_TimeEvent_NetworkEvent_Type_name = map[int32]string{
+	0: "UNSPECIFIED",
+	1: "SENT",
+	2: "RECV",
+}
+var Span_TimeEvent_NetworkEvent_Type_value = map[string]int32{
+	"UNSPECIFIED": 0,
+	"SENT":        1,
+	"RECV":        2,
+}
+
+func (x Span_TimeEvent_NetworkEvent_Type) String() string {
+	return proto.EnumName(Span_TimeEvent_NetworkEvent_Type_name, int32(x))
+}
+func (Span_TimeEvent_NetworkEvent_Type) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{4, 0, 1, 0}
+}
+
+// The type of the link.
+type Span_Link_Type int32
+
+const (
+	Span_Link_UNSPECIFIED Span_Link_Type = 0
+	Span_Link_CHILD       Span_Link_Type = 1
+	Span_Link_PARENT      Span_Link_Type = 2
+)
+
+var Span_Link_Type_name = map[int32]string{
+	0: "UNSPECIFIED",
+	1: "CHILD",
+	2: "PARENT",
+}
+var Span_Link_Type_value = map[string]int32{
+	"UNSPECIFIED": 0,
+	"CHILD":       1,
+	"PARENT":      2,
+}
+
+func (x Span_Link_Type) String() string {
+	return proto.EnumName(Span_Link_Type_name, int32(x))
+}
+func (Span_Link_Type) EnumDescriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 1, 0} }
+
+// A TraceId uniquely identifies a Trace. It is conceptually a 128-bit value,
+// represented as a string, containing the hex-encoded value.
+type TraceId struct {
+	// Trace ID specified as a hex-encoded string. *Must* be 32 bytes long.
+	HexEncoded string `protobuf:"bytes,1,opt,name=hex_encoded,json=hexEncoded" json:"hex_encoded,omitempty"`
+}
+
+func (m *TraceId) Reset()                    { *m = TraceId{} }
+func (m *TraceId) String() string            { return proto.CompactTextString(m) }
+func (*TraceId) ProtoMessage()               {}
+func (*TraceId) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
+
+type Module struct {
+	// Binary module.
+	// E.g. main binary, kernel modules, and dynamic libraries
+	// such as libc.so, sharedlib.so
+	Module string `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	// Build_id is a unique identifier for the module,
+	// probably a hash of its contents
+	BuildId string `protobuf:"bytes,2,opt,name=build_id,json=buildId" json:"build_id,omitempty"`
+}
+
+func (m *Module) Reset()                    { *m = Module{} }
+func (m *Module) String() string            { return proto.CompactTextString(m) }
+func (*Module) ProtoMessage()               {}
+func (*Module) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
+
+type StackTrace struct {
+	// Stack frames of this stack trace.
+	StackFrame []*StackTrace_StackFrame `protobuf:"bytes,1,rep,name=stack_frame,json=stackFrame" json:"stack_frame,omitempty"`
+	// User can choose to use his own hash function to hash large labels to save
+	// network bandwidth and storage.
+	// Typical usage is to pass both initially to inform the storage of the
+	// mapping. And in subsequent calls, pass in stack_trace_hash_id only.
+	// User shall verify the hash value is successfully stored.
+	StackTraceHashId uint64 `protobuf:"varint,2,opt,name=stack_trace_hash_id,json=stackTraceHashId" json:"stack_trace_hash_id,omitempty"`
+}
+
+func (m *StackTrace) Reset()                    { *m = StackTrace{} }
+func (m *StackTrace) String() string            { return proto.CompactTextString(m) }
+func (*StackTrace) ProtoMessage()               {}
+func (*StackTrace) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
+
+func (m *StackTrace) GetStackFrame() []*StackTrace_StackFrame {
+	if m != nil {
+		return m.StackFrame
+	}
+	return nil
+}
+
+// Presents a single stack frame in a stack trace.
+type StackTrace_StackFrame struct {
+	// Fully qualified names which uniquely identify function/method/etc.
+	FunctionName string `protobuf:"bytes,1,opt,name=function_name,json=functionName" json:"function_name,omitempty"`
+	// Used when function name is ‘mangled’. Not guaranteed to be fully
+	// qualified but usually it is.
+	OrigFunctionName string `protobuf:"bytes,2,opt,name=orig_function_name,json=origFunctionName" json:"orig_function_name,omitempty"`
+	// File name of the frame.
+	FileName string `protobuf:"bytes,3,opt,name=file_name,json=fileName" json:"file_name,omitempty"`
+	// Line number of the frame.
+	LineNumber int64 `protobuf:"varint,4,opt,name=line_number,json=lineNumber" json:"line_number,omitempty"`
+	// Column number is important in JavaScript(anonymous functions),
+	// Might not be available in some languages.
+	ColumnNumber int64 `protobuf:"varint,5,opt,name=column_number,json=columnNumber" json:"column_number,omitempty"`
+	// Binary module the code is loaded from.
+	LoadModule *Module `protobuf:"bytes,6,opt,name=load_module,json=loadModule" json:"load_module,omitempty"`
+	// source_version is deployment specific. It might be
+	// better to be stored in deployment metadata.
+	// However, in distributed tracing, it’s hard to keep track of
+	// source/binary versions at one place for all spans.
+	SourceVersion string `protobuf:"bytes,7,opt,name=source_version,json=sourceVersion" json:"source_version,omitempty"`
+}
+
+func (m *StackTrace_StackFrame) Reset()                    { *m = StackTrace_StackFrame{} }
+func (m *StackTrace_StackFrame) String() string            { return proto.CompactTextString(m) }
+func (*StackTrace_StackFrame) ProtoMessage()               {}
+func (*StackTrace_StackFrame) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2, 0} }
+
+func (m *StackTrace_StackFrame) GetLoadModule() *Module {
+	if m != nil {
+		return m.LoadModule
+	}
+	return nil
+}
+
+// Allowed label values.
+type LabelValue struct {
+	// The value of the label.
+	//
+	// Types that are valid to be assigned to Value:
+	//	*LabelValue_StringValue
+	//	*LabelValue_IntValue
+	//	*LabelValue_BoolValue
+	Value isLabelValue_Value `protobuf_oneof:"value"`
+}
+
+func (m *LabelValue) Reset()                    { *m = LabelValue{} }
+func (m *LabelValue) String() string            { return proto.CompactTextString(m) }
+func (*LabelValue) ProtoMessage()               {}
+func (*LabelValue) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
+
+type isLabelValue_Value interface {
+	isLabelValue_Value()
+}
+
+type LabelValue_StringValue struct {
+	StringValue string `protobuf:"bytes,1,opt,name=string_value,json=stringValue,oneof"`
+}
+type LabelValue_IntValue struct {
+	IntValue int64 `protobuf:"varint,2,opt,name=int_value,json=intValue,oneof"`
+}
+type LabelValue_BoolValue struct {
+	BoolValue bool `protobuf:"varint,3,opt,name=bool_value,json=boolValue,oneof"`
+}
+
+func (*LabelValue_StringValue) isLabelValue_Value() {}
+func (*LabelValue_IntValue) isLabelValue_Value()    {}
+func (*LabelValue_BoolValue) isLabelValue_Value()   {}
+
+func (m *LabelValue) GetValue() isLabelValue_Value {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *LabelValue) GetStringValue() string {
+	if x, ok := m.GetValue().(*LabelValue_StringValue); ok {
+		return x.StringValue
+	}
+	return ""
+}
+
+func (m *LabelValue) GetIntValue() int64 {
+	if x, ok := m.GetValue().(*LabelValue_IntValue); ok {
+		return x.IntValue
+	}
+	return 0
+}
+
+func (m *LabelValue) GetBoolValue() bool {
+	if x, ok := m.GetValue().(*LabelValue_BoolValue); ok {
+		return x.BoolValue
+	}
+	return false
+}
+
+// XXX_OneofFuncs is for the internal use of the proto package.
+func (*LabelValue) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
+	return _LabelValue_OneofMarshaler, _LabelValue_OneofUnmarshaler, _LabelValue_OneofSizer, []interface{}{
+		(*LabelValue_StringValue)(nil),
+		(*LabelValue_IntValue)(nil),
+		(*LabelValue_BoolValue)(nil),
+	}
+}
+
+func _LabelValue_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
+	m := msg.(*LabelValue)
+	// value
+	switch x := m.Value.(type) {
+	case *LabelValue_StringValue:
+		b.EncodeVarint(1<<3 | proto.WireBytes)
+		b.EncodeStringBytes(x.StringValue)
+	case *LabelValue_IntValue:
+		b.EncodeVarint(2<<3 | proto.WireVarint)
+		b.EncodeVarint(uint64(x.IntValue))
+	case *LabelValue_BoolValue:
+		t := uint64(0)
+		if x.BoolValue {
+			t = 1
+		}
+		b.EncodeVarint(3<<3 | proto.WireVarint)
+		b.EncodeVarint(t)
+	case nil:
+	default:
+		return fmt.Errorf("LabelValue.Value has unexpected type %T", x)
+	}
+	return nil
+}
+
+func _LabelValue_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
+	m := msg.(*LabelValue)
+	switch tag {
+	case 1: // value.string_value
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeStringBytes()
+		m.Value = &LabelValue_StringValue{x}
+		return true, err
+	case 2: // value.int_value
+		if wire != proto.WireVarint {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeVarint()
+		m.Value = &LabelValue_IntValue{int64(x)}
+		return true, err
+	case 3: // value.bool_value
+		if wire != proto.WireVarint {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeVarint()
+		m.Value = &LabelValue_BoolValue{x != 0}
+		return true, err
+	default:
+		return false, nil
+	}
+}
+
+func _LabelValue_OneofSizer(msg proto.Message) (n int) {
+	m := msg.(*LabelValue)
+	// value
+	switch x := m.Value.(type) {
+	case *LabelValue_StringValue:
+		n += proto.SizeVarint(1<<3 | proto.WireBytes)
+		n += proto.SizeVarint(uint64(len(x.StringValue)))
+		n += len(x.StringValue)
+	case *LabelValue_IntValue:
+		n += proto.SizeVarint(2<<3 | proto.WireVarint)
+		n += proto.SizeVarint(uint64(x.IntValue))
+	case *LabelValue_BoolValue:
+		n += proto.SizeVarint(3<<3 | proto.WireVarint)
+		n += 1
+	case nil:
+	default:
+		panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
+	}
+	return n
+}
+
+// A span represents a single operation within a trace. Spans can be nested
+// and form a trace tree. Often, a trace contains a root span that describes the
+// end-to-end latency and, optionally, one or more subspans for
+// its sub-operations. Spans do not need to be contiguous. There may be gaps
+// between spans in a trace.
+type Span struct {
+	// Identifier for the span. Must be a 64-bit integer other than 0 and
+	// unique within a trace.
+	Id uint64 `protobuf:"fixed64,1,opt,name=id" json:"id,omitempty"`
+	// Name of the span. The span name is sanitized and displayed in the
+	// Stackdriver Trace tool in the {% dynamic print site_values.console_name %}.
+	// The name may be a method name or some other per-call site name.
+	// For the same executable and the same call point, a best practice is
+	// to use a consistent name, which makes it easier to correlate
+	// cross-trace spans.
+	Name string `protobuf:"bytes,2,opt,name=name" json:"name,omitempty"`
+	// ID of parent span. 0 or missing if this is a root span.
+	ParentId uint64 `protobuf:"fixed64,3,opt,name=parent_id,json=parentId" json:"parent_id,omitempty"`
+	// Local machine clock in nanoseconds from the UNIX epoch,
+	// at which span execution started.
+	// On the server side these are the times when the server application
+	// handler starts running.
+	LocalStartTime *google_protobuf1.Timestamp `protobuf:"bytes,4,opt,name=local_start_time,json=localStartTime" json:"local_start_time,omitempty"`
+	// Local machine clock in nanoseconds from the UNIX epoch,
+	// at which span execution ended.
+	// On the server side these are the times when the server application
+	// handler finishes running.
+	LocalEndTime *google_protobuf1.Timestamp `protobuf:"bytes,5,opt,name=local_end_time,json=localEndTime" json:"local_end_time,omitempty"`
+	// Properties of a span. Labels at the span level.
+	// E.g.
+	// "/instance_id": "my-instance"
+	// "/zone": "us-central1-a"
+	// "/grpc/peer_address": "ip:port" (dns, etc.)
+	// "/grpc/deadline": "Duration"
+	// "/http/user_agent"
+	// "/http/request_bytes": 300
+	// "/http/response_bytes": 1200
+	// "/http/url": google.com/apis
+	// "/pid"
+	// "abc.com/mylabel": "my label value"
+	Labels map[string]*LabelValue `protobuf:"bytes,6,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+	// Stack trace captured at the start of the span. This is optional.
+	StackTrace *StackTrace `protobuf:"bytes,7,opt,name=stack_trace,json=stackTrace" json:"stack_trace,omitempty"`
+	// A collection of time-stamped events.
+	TimeEvents []*Span_TimeEvent `protobuf:"bytes,8,rep,name=time_events,json=timeEvents" json:"time_events,omitempty"`
+	// A collection of links.
+	Links []*Span_Link `protobuf:"bytes,9,rep,name=links" json:"links,omitempty"`
+	// The final status of the Span. This is optional.
+	Status *google_rpc.Status `protobuf:"bytes,10,opt,name=status" json:"status,omitempty"`
+	// True if this Span has a remote parent (is an RPC server Span).
+	HasRemoteParent bool `protobuf:"varint,11,opt,name=has_remote_parent,json=hasRemoteParent" json:"has_remote_parent,omitempty"`
+}
+
+func (m *Span) Reset()                    { *m = Span{} }
+func (m *Span) String() string            { return proto.CompactTextString(m) }
+func (*Span) ProtoMessage()               {}
+func (*Span) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4} }
+
+func (m *Span) GetLocalStartTime() *google_protobuf1.Timestamp {
+	if m != nil {
+		return m.LocalStartTime
+	}
+	return nil
+}
+
+func (m *Span) GetLocalEndTime() *google_protobuf1.Timestamp {
+	if m != nil {
+		return m.LocalEndTime
+	}
+	return nil
+}
+
+func (m *Span) GetLabels() map[string]*LabelValue {
+	if m != nil {
+		return m.Labels
+	}
+	return nil
+}
+
+func (m *Span) GetStackTrace() *StackTrace {
+	if m != nil {
+		return m.StackTrace
+	}
+	return nil
+}
+
+func (m *Span) GetTimeEvents() []*Span_TimeEvent {
+	if m != nil {
+		return m.TimeEvents
+	}
+	return nil
+}
+
+func (m *Span) GetLinks() []*Span_Link {
+	if m != nil {
+		return m.Links
+	}
+	return nil
+}
+
+func (m *Span) GetStatus() *google_rpc.Status {
+	if m != nil {
+		return m.Status
+	}
+	return nil
+}
+
+// A time-stamped annotation in the Span.
+type Span_TimeEvent struct {
+	// The local machine absolute timestamp when this event happened.
+	LocalTime *google_protobuf1.Timestamp `protobuf:"bytes,1,opt,name=local_time,json=localTime" json:"local_time,omitempty"`
+	// Types that are valid to be assigned to Value:
+	//	*Span_TimeEvent_Annotation_
+	//	*Span_TimeEvent_NetworkEvent_
+	Value isSpan_TimeEvent_Value `protobuf_oneof:"value"`
+}
+
+func (m *Span_TimeEvent) Reset()                    { *m = Span_TimeEvent{} }
+func (m *Span_TimeEvent) String() string            { return proto.CompactTextString(m) }
+func (*Span_TimeEvent) ProtoMessage()               {}
+func (*Span_TimeEvent) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0} }
+
+type isSpan_TimeEvent_Value interface {
+	isSpan_TimeEvent_Value()
+}
+
+type Span_TimeEvent_Annotation_ struct {
+	Annotation *Span_TimeEvent_Annotation `protobuf:"bytes,2,opt,name=annotation,oneof"`
+}
+type Span_TimeEvent_NetworkEvent_ struct {
+	NetworkEvent *Span_TimeEvent_NetworkEvent `protobuf:"bytes,3,opt,name=network_event,json=networkEvent,oneof"`
+}
+
+func (*Span_TimeEvent_Annotation_) isSpan_TimeEvent_Value()   {}
+func (*Span_TimeEvent_NetworkEvent_) isSpan_TimeEvent_Value() {}
+
+func (m *Span_TimeEvent) GetValue() isSpan_TimeEvent_Value {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *Span_TimeEvent) GetLocalTime() *google_protobuf1.Timestamp {
+	if m != nil {
+		return m.LocalTime
+	}
+	return nil
+}
+
+func (m *Span_TimeEvent) GetAnnotation() *Span_TimeEvent_Annotation {
+	if x, ok := m.GetValue().(*Span_TimeEvent_Annotation_); ok {
+		return x.Annotation
+	}
+	return nil
+}
+
+func (m *Span_TimeEvent) GetNetworkEvent() *Span_TimeEvent_NetworkEvent {
+	if x, ok := m.GetValue().(*Span_TimeEvent_NetworkEvent_); ok {
+		return x.NetworkEvent
+	}
+	return nil
+}
+
+// XXX_OneofFuncs is for the internal use of the proto package.
+func (*Span_TimeEvent) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
+	return _Span_TimeEvent_OneofMarshaler, _Span_TimeEvent_OneofUnmarshaler, _Span_TimeEvent_OneofSizer, []interface{}{
+		(*Span_TimeEvent_Annotation_)(nil),
+		(*Span_TimeEvent_NetworkEvent_)(nil),
+	}
+}
+
+func _Span_TimeEvent_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
+	m := msg.(*Span_TimeEvent)
+	// value
+	switch x := m.Value.(type) {
+	case *Span_TimeEvent_Annotation_:
+		b.EncodeVarint(2<<3 | proto.WireBytes)
+		if err := b.EncodeMessage(x.Annotation); err != nil {
+			return err
+		}
+	case *Span_TimeEvent_NetworkEvent_:
+		b.EncodeVarint(3<<3 | proto.WireBytes)
+		if err := b.EncodeMessage(x.NetworkEvent); err != nil {
+			return err
+		}
+	case nil:
+	default:
+		return fmt.Errorf("Span_TimeEvent.Value has unexpected type %T", x)
+	}
+	return nil
+}
+
+func _Span_TimeEvent_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
+	m := msg.(*Span_TimeEvent)
+	switch tag {
+	case 2: // value.annotation
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		msg := new(Span_TimeEvent_Annotation)
+		err := b.DecodeMessage(msg)
+		m.Value = &Span_TimeEvent_Annotation_{msg}
+		return true, err
+	case 3: // value.network_event
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		msg := new(Span_TimeEvent_NetworkEvent)
+		err := b.DecodeMessage(msg)
+		m.Value = &Span_TimeEvent_NetworkEvent_{msg}
+		return true, err
+	default:
+		return false, nil
+	}
+}
+
+func _Span_TimeEvent_OneofSizer(msg proto.Message) (n int) {
+	m := msg.(*Span_TimeEvent)
+	// value
+	switch x := m.Value.(type) {
+	case *Span_TimeEvent_Annotation_:
+		s := proto.Size(x.Annotation)
+		n += proto.SizeVarint(2<<3 | proto.WireBytes)
+		n += proto.SizeVarint(uint64(s))
+		n += s
+	case *Span_TimeEvent_NetworkEvent_:
+		s := proto.Size(x.NetworkEvent)
+		n += proto.SizeVarint(3<<3 | proto.WireBytes)
+		n += proto.SizeVarint(uint64(s))
+		n += s
+	case nil:
+	default:
+		panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
+	}
+	return n
+}
+
+// Text annotation with a set of labels.
+type Span_TimeEvent_Annotation struct {
+	// A user-supplied message describing the event.
+	Description string `protobuf:"bytes,1,opt,name=description" json:"description,omitempty"`
+	// A set of labels on the annotation.
+	Labels map[string]*LabelValue `protobuf:"bytes,2,rep,name=labels" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
+}
+
+func (m *Span_TimeEvent_Annotation) Reset()                    { *m = Span_TimeEvent_Annotation{} }
+func (m *Span_TimeEvent_Annotation) String() string            { return proto.CompactTextString(m) }
+func (*Span_TimeEvent_Annotation) ProtoMessage()               {}
+func (*Span_TimeEvent_Annotation) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 0, 0} }
+
+func (m *Span_TimeEvent_Annotation) GetLabels() map[string]*LabelValue {
+	if m != nil {
+		return m.Labels
+	}
+	return nil
+}
+
+// An event describing an RPC message sent/received on the network.
+type Span_TimeEvent_NetworkEvent struct {
+	// If available, this is the kernel time:
+	// For sent messages, this is the time at which the first bit was sent.
+	// For received messages, this is the time at which the last bit was
+	// received.
+	KernelTime *google_protobuf1.Timestamp      `protobuf:"bytes,1,opt,name=kernel_time,json=kernelTime" json:"kernel_time,omitempty"`
+	Type       Span_TimeEvent_NetworkEvent_Type `protobuf:"varint,2,opt,name=type,enum=google.tracing.v1.Span_TimeEvent_NetworkEvent_Type" json:"type,omitempty"`
+	// Every message has an identifier, that must be different from all the
+	// network messages in this span.
+	// This is very important when the request/response are streamed.
+	MessageId uint64 `protobuf:"varint,3,opt,name=message_id,json=messageId" json:"message_id,omitempty"`
+	// Number of bytes send/receive.
+	MessageSize uint64 `protobuf:"varint,4,opt,name=message_size,json=messageSize" json:"message_size,omitempty"`
+}
+
+func (m *Span_TimeEvent_NetworkEvent) Reset()         { *m = Span_TimeEvent_NetworkEvent{} }
+func (m *Span_TimeEvent_NetworkEvent) String() string { return proto.CompactTextString(m) }
+func (*Span_TimeEvent_NetworkEvent) ProtoMessage()    {}
+func (*Span_TimeEvent_NetworkEvent) Descriptor() ([]byte, []int) {
+	return fileDescriptor0, []int{4, 0, 1}
+}
+
+func (m *Span_TimeEvent_NetworkEvent) GetKernelTime() *google_protobuf1.Timestamp {
+	if m != nil {
+		return m.KernelTime
+	}
+	return nil
+}
+
+// Link one span with another which may be in a different Trace. Used (for
+// example) in batching operations, where a single batch handler processes
+// multiple requests from different traces.
+type Span_Link struct {
+	// The trace and span identifier of the linked span.
+	TraceId *TraceId       `protobuf:"bytes,1,opt,name=trace_id,json=traceId" json:"trace_id,omitempty"`
+	SpanId  uint64         `protobuf:"fixed64,2,opt,name=span_id,json=spanId" json:"span_id,omitempty"`
+	Type    Span_Link_Type `protobuf:"varint,3,opt,name=type,enum=google.tracing.v1.Span_Link_Type" json:"type,omitempty"`
+}
+
+func (m *Span_Link) Reset()                    { *m = Span_Link{} }
+func (m *Span_Link) String() string            { return proto.CompactTextString(m) }
+func (*Span_Link) ProtoMessage()               {}
+func (*Span_Link) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{4, 1} }
+
+func (m *Span_Link) GetTraceId() *TraceId {
+	if m != nil {
+		return m.TraceId
+	}
+	return nil
+}
+
+// A trace describes how long it takes for an application to perform some
+// operations. It consists of a tree of spans, each of which contains details
+// about an operation with time information and operation details.
+type Trace struct {
+	// Globally unique identifier for the trace. Common to all the spans.
+	TraceId *TraceId `protobuf:"bytes,1,opt,name=trace_id,json=traceId" json:"trace_id,omitempty"`
+	// Collection of spans in the trace. The root span has parent_id == 0.
+	Spans []*Span `protobuf:"bytes,2,rep,name=spans" json:"spans,omitempty"`
+}
+
+func (m *Trace) Reset()                    { *m = Trace{} }
+func (m *Trace) String() string            { return proto.CompactTextString(m) }
+func (*Trace) ProtoMessage()               {}
+func (*Trace) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{5} }
+
+func (m *Trace) GetTraceId() *TraceId {
+	if m != nil {
+		return m.TraceId
+	}
+	return nil
+}
+
+func (m *Trace) GetSpans() []*Span {
+	if m != nil {
+		return m.Spans
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*TraceId)(nil), "google.tracing.v1.TraceId")
+	proto.RegisterType((*Module)(nil), "google.tracing.v1.Module")
+	proto.RegisterType((*StackTrace)(nil), "google.tracing.v1.StackTrace")
+	proto.RegisterType((*StackTrace_StackFrame)(nil), "google.tracing.v1.StackTrace.StackFrame")
+	proto.RegisterType((*LabelValue)(nil), "google.tracing.v1.LabelValue")
+	proto.RegisterType((*Span)(nil), "google.tracing.v1.Span")
+	proto.RegisterType((*Span_TimeEvent)(nil), "google.tracing.v1.Span.TimeEvent")
+	proto.RegisterType((*Span_TimeEvent_Annotation)(nil), "google.tracing.v1.Span.TimeEvent.Annotation")
+	proto.RegisterType((*Span_TimeEvent_NetworkEvent)(nil), "google.tracing.v1.Span.TimeEvent.NetworkEvent")
+	proto.RegisterType((*Span_Link)(nil), "google.tracing.v1.Span.Link")
+	proto.RegisterType((*Trace)(nil), "google.tracing.v1.Trace")
+	proto.RegisterEnum("google.tracing.v1.Span_TimeEvent_NetworkEvent_Type", Span_TimeEvent_NetworkEvent_Type_name, Span_TimeEvent_NetworkEvent_Type_value)
+	proto.RegisterEnum("google.tracing.v1.Span_Link_Type", Span_Link_Type_name, Span_Link_Type_value)
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/genproto/googleapis/tracing/trace.proto", fileDescriptor0)
+}
+
+var fileDescriptor0 = []byte{
+	// 1122 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xb4, 0x55, 0xdb, 0x6e, 0x1b, 0x37,
+	0x13, 0xd6, 0xf9, 0x30, 0xab, 0x38, 0x0a, 0x7f, 0xfc, 0x8d, 0xa2, 0xd6, 0xb0, 0xad, 0x20, 0x80,
+	0xe1, 0x26, 0x2b, 0x54, 0x46, 0x80, 0x24, 0x46, 0x8b, 0xc6, 0xb6, 0x5c, 0x09, 0x48, 0x05, 0x81,
+	0x72, 0x8c, 0xde, 0x2d, 0xa8, 0x5d, 0x7a, 0x45, 0x68, 0xc5, 0x5d, 0x2c, 0x29, 0x37, 0xf6, 0x6d,
+	0xdf, 0xa0, 0xef, 0xd0, 0xdb, 0xbe, 0x41, 0x1f, 0xa4, 0x7d, 0x9a, 0x82, 0x87, 0x95, 0x55, 0xc4,
+	0x8e, 0x9d, 0x02, 0xbd, 0x5a, 0xf2, 0x9b, 0x6f, 0x66, 0x87, 0xdf, 0xcc, 0x90, 0xf0, 0x3a, 0x8c,
+	0xe3, 0x30, 0xa2, 0x6e, 0x18, 0x47, 0x84, 0x87, 0x6e, 0x9c, 0x86, 0xdd, 0x90, 0xf2, 0x24, 0x8d,
+	0x65, 0xdc, 0x35, 0x26, 0x92, 0x30, 0xd1, 0x95, 0x29, 0xf1, 0x19, 0x0f, 0xf5, 0x97, 0xba, 0xda,
+	0x8c, 0x1e, 0x59, 0x57, 0x6b, 0x73, 0x2f, 0xbe, 0x69, 0x0f, 0xef, 0x17, 0x8d, 0x24, 0xac, 0x2b,
+	0x68, 0x7a, 0xc1, 0x7c, 0xea, 0xc7, 0xfc, 0x9c, 0x85, 0x5d, 0xc2, 0x79, 0x2c, 0x89, 0x64, 0x31,
+	0x17, 0x26, 0x7a, 0xfb, 0x20, 0x64, 0x72, 0xb6, 0x9c, 0xba, 0x7e, 0xbc, 0xe8, 0x9a, 0x70, 0x5d,
+	0x6d, 0x98, 0x2e, 0xcf, 0xbb, 0x89, 0xbc, 0x4c, 0xa8, 0xe8, 0x4a, 0xb6, 0xa0, 0x42, 0x92, 0x45,
+	0x72, 0xbd, 0xb2, 0xce, 0xdf, 0xde, 0x2f, 0x8f, 0x34, 0xf1, 0xbb, 0x42, 0x12, 0xb9, 0x14, 0xf6,
+	0x63, 0xdc, 0x3b, 0x7b, 0x50, 0x3d, 0x55, 0x07, 0x1d, 0x06, 0x68, 0x0b, 0x9c, 0x19, 0xfd, 0xe0,
+	0x51, 0xee, 0xc7, 0x01, 0x0d, 0x5a, 0xf9, 0xed, 0xfc, 0x6e, 0x1d, 0xc3, 0x8c, 0x7e, 0xe8, 0x1b,
+	0xa4, 0x73, 0x00, 0x95, 0x1f, 0xe3, 0x60, 0x19, 0x51, 0xf4, 0x05, 0x54, 0x16, 0x7a, 0x65, 0x59,
+	0x76, 0x87, 0x9e, 0x40, 0x6d, 0xba, 0x64, 0x51, 0xe0, 0xb1, 0xa0, 0x55, 0xd0, 0x96, 0xaa, 0xde,
+	0x0f, 0x83, 0xce, 0xef, 0x45, 0x80, 0x89, 0x24, 0xfe, 0x5c, 0xff, 0x0e, 0x0d, 0xc1, 0x11, 0x6a,
+	0xe7, 0x9d, 0xa7, 0x64, 0xa1, 0xc2, 0x14, 0x77, 0x9d, 0xde, 0xae, 0xfb, 0x91, 0xce, 0xee, 0xb5,
+	0x8f, 0x59, 0x9e, 0x28, 0x3e, 0x06, 0xb1, 0x5a, 0xa3, 0x17, 0xf0, 0x3f, 0x13, 0x4a, 0x57, 0xcc,
+	0x9b, 0x11, 0x31, 0xcb, 0xfe, 0x5f, 0xc2, 0x4d, 0xb1, 0xf2, 0x1f, 0x10, 0x31, 0x1b, 0x06, 0xed,
+	0xdf, 0x0a, 0x36, 0x11, 0xe3, 0xfd, 0x14, 0x1e, 0x9c, 0x2f, 0xb9, 0xaf, 0xea, 0xe1, 0x71, 0x93,
+	0x8a, 0xca, 0xbb, 0x91, 0x81, 0x23, 0x45, 0x7a, 0x0e, 0x28, 0x4e, 0x59, 0xe8, 0xfd, 0x93, 0x69,
+	0x4e, 0xd8, 0x54, 0x96, 0x93, 0x75, 0xf6, 0x97, 0x50, 0x3f, 0x67, 0x11, 0x35, 0xa4, 0xa2, 0x26,
+	0xd5, 0x14, 0xa0, 0x8d, 0x5b, 0xe0, 0x44, 0x8c, 0x53, 0x8f, 0x2f, 0x17, 0x53, 0x9a, 0xb6, 0x4a,
+	0xdb, 0xf9, 0xdd, 0x22, 0x06, 0x05, 0x8d, 0x34, 0xa2, 0x12, 0xf2, 0xe3, 0x68, 0xb9, 0xe0, 0x19,
+	0xa5, 0xac, 0x29, 0x0d, 0x03, 0x5a, 0xd2, 0x1b, 0x70, 0xa2, 0x98, 0x04, 0x9e, 0xad, 0x42, 0x65,
+	0x3b, 0xbf, 0xeb, 0xf4, 0x9e, 0xdc, 0x20, 0x9f, 0x29, 0x18, 0x06, 0xc5, 0xb6, 0xc5, 0x7b, 0x06,
+	0x1b, 0x22, 0x5e, 0xa6, 0x3e, 0xf5, 0x2e, 0x68, 0x2a, 0x58, 0xcc, 0x5b, 0x55, 0x9d, 0xe3, 0x03,
+	0x83, 0x9e, 0x19, 0xb0, 0x73, 0x05, 0xf0, 0x8e, 0x4c, 0x69, 0x74, 0x46, 0xa2, 0xa5, 0x92, 0xa9,
+	0x21, 0x64, 0xca, 0x78, 0xe8, 0x5d, 0xa8, 0xbd, 0x51, 0x69, 0x90, 0xc3, 0x8e, 0x41, 0x0d, 0x69,
+	0x13, 0xea, 0x8c, 0x4b, 0xcb, 0x50, 0xea, 0x14, 0x07, 0x39, 0x5c, 0x63, 0x5c, 0x1a, 0xf3, 0x16,
+	0xc0, 0x34, 0x8e, 0x23, 0x6b, 0x57, 0xc2, 0xd4, 0x06, 0x39, 0x5c, 0x57, 0x98, 0x26, 0x1c, 0x56,
+	0xa1, 0xac, 0x6d, 0x9d, 0x5f, 0x1b, 0x50, 0x9a, 0x24, 0x84, 0xa3, 0x0d, 0x28, 0x30, 0xd3, 0x8a,
+	0x15, 0x5c, 0x60, 0x01, 0x42, 0x50, 0x5a, 0x93, 0x5e, 0xaf, 0x95, 0xdc, 0x09, 0x49, 0x29, 0x97,
+	0xaa, 0xea, 0x45, 0x4d, 0xad, 0x19, 0x60, 0x18, 0xa0, 0x63, 0x68, 0x46, 0xb1, 0x4f, 0x22, 0x4f,
+	0x48, 0x92, 0x4a, 0x4f, 0x4d, 0x8f, 0xd6, 0xdc, 0xe9, 0xb5, 0x33, 0xb5, 0xb2, 0x59, 0x73, 0x4f,
+	0xb3, 0xd1, 0xc2, 0x1b, 0xda, 0x67, 0xa2, 0x5c, 0x14, 0x88, 0xbe, 0x07, 0x83, 0x78, 0x94, 0x07,
+	0x26, 0x46, 0xf9, 0xce, 0x18, 0x0d, 0xed, 0xd1, 0xe7, 0x81, 0x8e, 0x70, 0x00, 0x95, 0x48, 0xa9,
+	0x29, 0x5a, 0x15, 0xdd, 0xea, 0x4f, 0x6f, 0x6a, 0xf5, 0x84, 0x70, 0x57, 0x6b, 0x2e, 0xfa, 0x5c,
+	0xa6, 0x97, 0xd8, 0xba, 0xa0, 0xef, 0xb2, 0x61, 0xd1, 0x1d, 0xae, 0xcb, 0xe5, 0xf4, 0x36, 0x3f,
+	0x39, 0x2c, 0x76, 0x42, 0xcc, 0xb0, 0x1d, 0x82, 0xa3, 0x92, 0xf6, 0xe8, 0x05, 0xe5, 0x52, 0xb4,
+	0x6a, 0x3a, 0x83, 0x9d, 0xdb, 0x32, 0x50, 0xf9, 0xf6, 0x15, 0x13, 0x83, 0xcc, 0x96, 0x02, 0xf5,
+	0xa0, 0x1c, 0x31, 0x3e, 0x17, 0xad, 0xba, 0xf6, 0xfe, 0xea, 0xd6, 0xfc, 0x19, 0x9f, 0x63, 0x43,
+	0x45, 0x7b, 0x50, 0x31, 0x97, 0x4d, 0x0b, 0x74, 0xca, 0x28, 0x73, 0x4a, 0x13, 0x5f, 0xe5, 0x2a,
+	0x97, 0x02, 0x5b, 0x06, 0xda, 0x83, 0x47, 0x33, 0x22, 0xbc, 0x94, 0x2e, 0x62, 0x49, 0x3d, 0x53,
+	0xbf, 0x96, 0xa3, 0x7a, 0x04, 0x3f, 0x9c, 0x11, 0x81, 0x35, 0x3e, 0xd6, 0x70, 0xfb, 0xcf, 0x32,
+	0xd4, 0x57, 0x59, 0xa2, 0xd7, 0x00, 0xa6, 0x38, 0xba, 0x30, 0xf9, 0x3b, 0x0b, 0x53, 0xd7, 0x6c,
+	0x5d, 0x95, 0x11, 0xc0, 0xf5, 0x75, 0xac, 0x9b, 0xca, 0xe9, 0x3d, 0xbf, 0x53, 0x17, 0xf7, 0xed,
+	0xca, 0x67, 0x90, 0xc3, 0x6b, 0x11, 0xd0, 0x7b, 0x78, 0xc0, 0xa9, 0xfc, 0x39, 0x4e, 0xe7, 0x46,
+	0x6b, 0xdd, 0x8e, 0x4e, 0xcf, 0xbd, 0x3b, 0xe4, 0xc8, 0xb8, 0xe9, 0xcd, 0x20, 0x87, 0x1b, 0x7c,
+	0x6d, 0xdf, 0xfe, 0x2b, 0x0f, 0x70, 0xfd, 0x4f, 0xb4, 0x0d, 0x4e, 0x40, 0x85, 0x9f, 0xb2, 0x44,
+	0xa7, 0x6d, 0x2e, 0xac, 0x75, 0x08, 0x8d, 0x57, 0xdd, 0x56, 0xd0, 0xd5, 0x7a, 0xf5, 0x39, 0x67,
+	0xba, 0xa9, 0x05, 0xdb, 0x3f, 0x81, 0xb3, 0x06, 0xa3, 0x26, 0x14, 0xe7, 0xf4, 0xd2, 0xfe, 0x5a,
+	0x2d, 0xd1, 0xbe, 0x9d, 0x5d, 0xab, 0xe2, 0x4d, 0xdd, 0x79, 0x7d, 0x9d, 0x60, 0xc3, 0x7d, 0x53,
+	0x78, 0x95, 0x6f, 0xff, 0x52, 0x80, 0xc6, 0xfa, 0xe9, 0xd1, 0x01, 0x38, 0x73, 0x9a, 0x72, 0x7a,
+	0xef, 0x82, 0x82, 0xa1, 0xeb, 0x8a, 0xfe, 0x00, 0x25, 0xf5, 0x62, 0xea, 0x2c, 0x36, 0x7a, 0xfb,
+	0x9f, 0x27, 0xbc, 0x7b, 0x7a, 0x99, 0x50, 0xac, 0x03, 0xa0, 0x4d, 0x80, 0x05, 0x15, 0x82, 0x84,
+	0x34, 0xbb, 0x56, 0x4a, 0xb8, 0x6e, 0x91, 0x61, 0x80, 0x76, 0xa0, 0x91, 0x99, 0x05, 0xbb, 0x32,
+	0x77, 0x4a, 0x09, 0x3b, 0x16, 0x9b, 0xb0, 0x2b, 0xda, 0xf9, 0x1a, 0x4a, 0x2a, 0x1e, 0x7a, 0x08,
+	0xce, 0xfb, 0xd1, 0x64, 0xdc, 0x3f, 0x1a, 0x9e, 0x0c, 0xfb, 0xc7, 0xcd, 0x1c, 0xaa, 0x41, 0x69,
+	0xd2, 0x1f, 0x9d, 0x36, 0xf3, 0x6a, 0x85, 0xfb, 0x47, 0x67, 0xcd, 0xc2, 0xea, 0xea, 0x6b, 0xff,
+	0x91, 0x87, 0x92, 0x9a, 0x21, 0xf4, 0x12, 0x6a, 0xe6, 0x41, 0xb3, 0x17, 0xe0, 0x9a, 0x06, 0x6b,
+	0xa7, 0xb1, 0x8f, 0x37, 0xae, 0x4a, 0xfb, 0x8a, 0x3f, 0x86, 0xaa, 0x48, 0x08, 0xcf, 0x5e, 0xc0,
+	0x0a, 0xae, 0xa8, 0xed, 0x30, 0x40, 0x2f, 0xad, 0x32, 0x45, 0xad, 0xcc, 0xce, 0xa7, 0xe6, 0x77,
+	0x4d, 0x87, 0x8e, 0x7b, 0xdb, 0x29, 0xea, 0x50, 0x3e, 0x1a, 0x0c, 0xdf, 0x1d, 0x37, 0xf3, 0x08,
+	0xa0, 0x32, 0x7e, 0x8b, 0xd5, 0x91, 0x0a, 0xff, 0x5d, 0xa3, 0x74, 0x16, 0x50, 0x36, 0xd7, 0xd9,
+	0xbf, 0x54, 0xe6, 0x05, 0x94, 0x95, 0x14, 0xd9, 0x4c, 0x3c, 0xbe, 0x45, 0x01, 0x6c, 0x58, 0x87,
+	0xcf, 0xe0, 0xff, 0x7e, 0xbc, 0xf8, 0x98, 0x74, 0x08, 0x3a, 0xf2, 0x58, 0xf5, 0xe1, 0x38, 0x3f,
+	0xad, 0xe8, 0x86, 0xdc, 0xff, 0x3b, 0x00, 0x00, 0xff, 0xff, 0x32, 0x14, 0xf4, 0x2b, 0x5e, 0x0a,
+	0x00, 0x00,
+}
diff --git a/googleapis/tracing/trace.proto b/googleapis/tracing/trace.proto
new file mode 100644
index 0000000000000000000000000000000000000000..c1b64b1bede9c900016b1ebf584a9c83e1bd3309
--- /dev/null
+++ b/googleapis/tracing/trace.proto
@@ -0,0 +1,246 @@
+// Copyright 2016 Google Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package google.tracing.v1;
+
+import "google.golang.org/genproto/googleapis/api/serviceconfig/annotations.proto"; // from google/api/annotations.proto
+import "github.com/golang/protobuf/ptypes/timestamp/timestamp.proto"; // from google/protobuf/timestamp.proto
+import "google.golang.org/genproto/googleapis/rpc/status/status.proto"; // from google/rpc/status.proto
+
+option java_multiple_files = true;
+option java_outer_classname = "TraceProto";
+option java_package = "com.google.tracing.v1";
+
+
+// A TraceId uniquely identifies a Trace. It is conceptually a 128-bit value,
+// represented as a string, containing the hex-encoded value.
+message TraceId {
+  // Trace ID specified as a hex-encoded string. *Must* be 32 bytes long.
+  string hex_encoded = 1;
+}
+
+message Module {
+  // Binary module.
+  // E.g. main binary, kernel modules, and dynamic libraries
+  // such as libc.so, sharedlib.so
+  string module = 1;
+
+  // Build_id is a unique identifier for the module,
+  // probably a hash of its contents
+  string build_id = 2;
+}
+
+message StackTrace {
+  // Presents a single stack frame in a stack trace.
+  message StackFrame {
+    // Fully qualified names which uniquely identify function/method/etc.
+    string function_name = 1;
+
+    // Used when function name is ‘mangled’. Not guaranteed to be fully
+    // qualified but usually it is.
+    string orig_function_name = 2;
+
+    // File name of the frame.
+    string file_name = 3;
+
+    // Line number of the frame.
+    int64 line_number = 4;
+
+    // Column number is important in JavaScript(anonymous functions),
+    // Might not be available in some languages.
+    int64 column_number = 5;
+
+    // Binary module the code is loaded from.
+    Module load_module = 6;
+
+    // source_version is deployment specific. It might be
+    // better to be stored in deployment metadata.
+    // However, in distributed tracing, it’s hard to keep track of
+    // source/binary versions at one place for all spans.
+    string source_version = 7;
+  }
+
+  // Stack frames of this stack trace.
+  repeated StackFrame stack_frame = 1;
+
+  // User can choose to use his own hash function to hash large labels to save
+  // network bandwidth and storage.
+  // Typical usage is to pass both initially to inform the storage of the
+  // mapping. And in subsequent calls, pass in stack_trace_hash_id only.
+  // User shall verify the hash value is successfully stored.
+  uint64 stack_trace_hash_id = 2;
+}
+
+// Allowed label values.
+message LabelValue {
+  // The value of the label.
+  oneof value {
+    // A string value.
+    string string_value = 1;
+
+    // An integer value.
+    int64 int_value = 2;
+
+    // A boolean value.
+    bool bool_value = 3;
+  }
+}
+
+// A span represents a single operation within a trace. Spans can be nested
+// and form a trace tree. Often, a trace contains a root span that describes the
+// end-to-end latency and, optionally, one or more subspans for
+// its sub-operations. Spans do not need to be contiguous. There may be gaps
+// between spans in a trace.
+message Span {
+  // A time-stamped annotation in the Span.
+  message TimeEvent {
+    // Text annotation with a set of labels.
+    message Annotation {
+      // A user-supplied message describing the event.
+      string description = 1;
+
+      // A set of labels on the annotation.
+      map<string, LabelValue> labels = 2;
+    }
+
+    // An event describing an RPC message sent/received on the network.
+    message NetworkEvent {
+      // The type of the network event. SENT or RECV event.
+      enum Type {
+        UNSPECIFIED = 0;
+
+        SENT = 1;
+
+        RECV = 2;
+      }
+
+      // If available, this is the kernel time:
+      // For sent messages, this is the time at which the first bit was sent.
+      // For received messages, this is the time at which the last bit was
+      // received.
+      google.protobuf.Timestamp kernel_time = 1;
+
+      Type type = 2;
+
+      // Every message has an identifier, that must be different from all the
+      // network messages in this span.
+      // This is very important when the request/response are streamed.
+      uint64 message_id = 3;
+
+      // Number of bytes send/receive.
+      uint64 message_size = 4;
+    }
+
+    // The local machine absolute timestamp when this event happened.
+    google.protobuf.Timestamp local_time = 1;
+
+    oneof value {
+      // Optional field for user supplied <string, LabelValue> map
+      Annotation annotation = 2;
+
+      // Optional field that can be used only for network events.
+      NetworkEvent network_event = 3;
+    }
+  }
+
+  // Link one span with another which may be in a different Trace. Used (for
+  // example) in batching operations, where a single batch handler processes
+  // multiple requests from different traces.
+  message Link {
+    // The type of the link.
+    enum Type {
+      UNSPECIFIED = 0;
+
+      CHILD = 1;
+
+      PARENT = 2;
+    }
+
+    // The trace and span identifier of the linked span.
+    TraceId trace_id = 1;
+
+    fixed64 span_id = 2;
+
+    Type type = 3;
+  }
+
+  // Identifier for the span. Must be a 64-bit integer other than 0 and
+  // unique within a trace.
+  fixed64 id = 1;
+
+  // Name of the span. The span name is sanitized and displayed in the
+  // Stackdriver Trace tool in the {% dynamic print site_values.console_name %}.
+  // The name may be a method name or some other per-call site name.
+  // For the same executable and the same call point, a best practice is
+  // to use a consistent name, which makes it easier to correlate
+  // cross-trace spans.
+  string name = 2;
+
+  // ID of parent span. 0 or missing if this is a root span.
+  fixed64 parent_id = 3;
+
+  // Local machine clock in nanoseconds from the UNIX epoch,
+  // at which span execution started.
+  // On the server side these are the times when the server application
+  // handler starts running.
+  google.protobuf.Timestamp local_start_time = 4;
+
+  // Local machine clock in nanoseconds from the UNIX epoch,
+  // at which span execution ended.
+  // On the server side these are the times when the server application
+  // handler finishes running.
+  google.protobuf.Timestamp local_end_time = 5;
+
+  // Properties of a span. Labels at the span level.
+  // E.g.
+  // "/instance_id": "my-instance"
+  // "/zone": "us-central1-a"
+  // "/grpc/peer_address": "ip:port" (dns, etc.)
+  // "/grpc/deadline": "Duration"
+  // "/http/user_agent"
+  // "/http/request_bytes": 300
+  // "/http/response_bytes": 1200
+  // "/http/url": google.com/apis
+  // "/pid"
+  // "abc.com/mylabel": "my label value"
+  map<string, LabelValue> labels = 6;
+
+  // Stack trace captured at the start of the span. This is optional.
+  StackTrace stack_trace = 7;
+
+  // A collection of time-stamped events.
+  repeated TimeEvent time_events = 8;
+
+  // A collection of links.
+  repeated Link links = 9;
+
+  // The final status of the Span. This is optional.
+  google.rpc.Status status = 10;
+
+  // True if this Span has a remote parent (is an RPC server Span).
+  bool has_remote_parent = 11;
+}
+
+// A trace describes how long it takes for an application to perform some
+// operations. It consists of a tree of spans, each of which contains details
+// about an operation with time information and operation details.
+message Trace {
+  // Globally unique identifier for the trace. Common to all the spans.
+  TraceId trace_id = 1;
+
+  // Collection of spans in the trace. The root span has parent_id == 0.
+  repeated Span spans = 2;
+}