1 // Code generated by protoc-gen-go. DO NOT EDIT.
2 // source: google/cloud/dataproc/v1/jobs.proto
6 import proto "github.com/golang/protobuf/proto"
9 import _ "google.golang.org/genproto/googleapis/api/annotations"
10 import google_protobuf2 "github.com/golang/protobuf/ptypes/empty"
11 import google_protobuf3 "github.com/golang/protobuf/ptypes/timestamp"
14 context "golang.org/x/net/context"
15 grpc "google.golang.org/grpc"
18 // Reference imports to suppress errors if they are not otherwise used.
23 // The Log4j level for job execution. When running an
24 // [Apache Hive](http://hive.apache.org/) job, Cloud
25 // Dataproc configures the Hive client to an equivalent verbosity level.
26 type LoggingConfig_Level int32
29 // Level is unspecified. Use default level for log4j.
30 LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0
31 // Use ALL level for log4j.
32 LoggingConfig_ALL LoggingConfig_Level = 1
33 // Use TRACE level for log4j.
34 LoggingConfig_TRACE LoggingConfig_Level = 2
35 // Use DEBUG level for log4j.
36 LoggingConfig_DEBUG LoggingConfig_Level = 3
37 // Use INFO level for log4j.
38 LoggingConfig_INFO LoggingConfig_Level = 4
39 // Use WARN level for log4j.
40 LoggingConfig_WARN LoggingConfig_Level = 5
41 // Use ERROR level for log4j.
42 LoggingConfig_ERROR LoggingConfig_Level = 6
43 // Use FATAL level for log4j.
44 LoggingConfig_FATAL LoggingConfig_Level = 7
46 LoggingConfig_OFF LoggingConfig_Level = 8
49 var LoggingConfig_Level_name = map[int32]string{
50 0: "LEVEL_UNSPECIFIED",
60 var LoggingConfig_Level_value = map[string]int32{
61 "LEVEL_UNSPECIFIED": 0,
72 func (x LoggingConfig_Level) String() string {
73 return proto.EnumName(LoggingConfig_Level_name, int32(x))
75 func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{0, 0} }
78 type JobStatus_State int32
81 // The job state is unknown.
82 JobStatus_STATE_UNSPECIFIED JobStatus_State = 0
83 // The job is pending; it has been submitted, but is not yet running.
84 JobStatus_PENDING JobStatus_State = 1
85 // Job has been received by the service and completed initial setup;
86 // it will soon be submitted to the cluster.
87 JobStatus_SETUP_DONE JobStatus_State = 8
88 // The job is running on the cluster.
89 JobStatus_RUNNING JobStatus_State = 2
90 // A CancelJob request has been received, but is pending.
91 JobStatus_CANCEL_PENDING JobStatus_State = 3
92 // Transient in-flight resources have been canceled, and the request to
93 // cancel the running job has been issued to the cluster.
94 JobStatus_CANCEL_STARTED JobStatus_State = 7
95 // The job cancellation was successful.
96 JobStatus_CANCELLED JobStatus_State = 4
97 // The job has completed successfully.
98 JobStatus_DONE JobStatus_State = 5
99 // The job has completed, but encountered an error.
100 JobStatus_ERROR JobStatus_State = 6
103 var JobStatus_State_name = map[int32]string{
104 0: "STATE_UNSPECIFIED",
114 var JobStatus_State_value = map[string]int32{
115 "STATE_UNSPECIFIED": 0,
126 func (x JobStatus_State) String() string {
127 return proto.EnumName(JobStatus_State_name, int32(x))
129 func (JobStatus_State) EnumDescriptor() ([]byte, []int) { return fileDescriptor1, []int{9, 0} }
131 // A matcher that specifies categories of job states.
132 type ListJobsRequest_JobStateMatcher int32
135 // Match all jobs, regardless of state.
136 ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0
137 // Only match jobs in non-terminal states: PENDING, RUNNING, or
139 ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1
140 // Only match jobs in terminal states: CANCELLED, DONE, or ERROR.
141 ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2
144 var ListJobsRequest_JobStateMatcher_name = map[int32]string{
149 var ListJobsRequest_JobStateMatcher_value = map[string]int32{
155 func (x ListJobsRequest_JobStateMatcher) String() string {
156 return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x))
158 func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) {
159 return fileDescriptor1, []int{14, 0}
162 // The runtime logging config of the job.
163 type LoggingConfig struct {
164 // The per-package log levels for the driver. This may include
165 // "root" package name to configure rootLogger.
167 // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
168 DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value,enum=google.cloud.dataproc.v1.LoggingConfig_Level"`
171 func (m *LoggingConfig) Reset() { *m = LoggingConfig{} }
172 func (m *LoggingConfig) String() string { return proto.CompactTextString(m) }
173 func (*LoggingConfig) ProtoMessage() {}
174 func (*LoggingConfig) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{0} }
176 func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level {
178 return m.DriverLogLevels
183 // A Cloud Dataproc job for running
184 // [Apache Hadoop MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
185 // jobs on [Apache Hadoop YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
186 type HadoopJob struct {
187 // [Required] Indicates the location of the driver's main class. Specify
188 // either the jar file that contains the main class or the main class name.
189 // To specify both, add the jar file to `jar_file_uris`, and then specify
190 // the main class name in this property.
192 // Types that are valid to be assigned to Driver:
193 // *HadoopJob_MainJarFileUri
194 // *HadoopJob_MainClass
195 Driver isHadoopJob_Driver `protobuf_oneof:"driver"`
196 // [Optional] The arguments to pass to the driver. Do not
197 // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as job
198 // properties, since a collision may occur that causes an incorrect job
200 Args []string `protobuf:"bytes,3,rep,name=args" json:"args,omitempty"`
201 // [Optional] Jar file URIs to add to the CLASSPATHs of the
202 // Hadoop driver and tasks.
203 JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
204 // [Optional] HCFS (Hadoop Compatible Filesystem) URIs of files to be copied
205 // to the working directory of Hadoop drivers and distributed tasks. Useful
206 // for naively parallel tasks.
207 FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris" json:"file_uris,omitempty"`
208 // [Optional] HCFS URIs of archives to be extracted in the working directory of
209 // Hadoop drivers and tasks. Supported file types:
210 // .jar, .tar, .tar.gz, .tgz, or .zip.
211 ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris" json:"archive_uris,omitempty"`
212 // [Optional] A mapping of property names to values, used to configure Hadoop.
213 // Properties that conflict with values set by the Cloud Dataproc API may be
214 // overwritten. Can include properties set in /etc/hadoop/conf/*-site and
215 // classes in user code.
216 Properties map[string]string `protobuf:"bytes,7,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
217 // [Optional] The runtime log config for job execution.
218 LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig" json:"logging_config,omitempty"`
221 func (m *HadoopJob) Reset() { *m = HadoopJob{} }
222 func (m *HadoopJob) String() string { return proto.CompactTextString(m) }
223 func (*HadoopJob) ProtoMessage() {}
224 func (*HadoopJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{1} }
226 type isHadoopJob_Driver interface {
230 type HadoopJob_MainJarFileUri struct {
231 MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,oneof"`
233 type HadoopJob_MainClass struct {
234 MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,oneof"`
237 func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {}
238 func (*HadoopJob_MainClass) isHadoopJob_Driver() {}
240 func (m *HadoopJob) GetDriver() isHadoopJob_Driver {
247 func (m *HadoopJob) GetMainJarFileUri() string {
248 if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok {
249 return x.MainJarFileUri
254 func (m *HadoopJob) GetMainClass() string {
255 if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok {
261 func (m *HadoopJob) GetArgs() []string {
268 func (m *HadoopJob) GetJarFileUris() []string {
275 func (m *HadoopJob) GetFileUris() []string {
282 func (m *HadoopJob) GetArchiveUris() []string {
289 func (m *HadoopJob) GetProperties() map[string]string {
296 func (m *HadoopJob) GetLoggingConfig() *LoggingConfig {
298 return m.LoggingConfig
303 // XXX_OneofFuncs is for the internal use of the proto package.
304 func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
305 return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{
306 (*HadoopJob_MainJarFileUri)(nil),
307 (*HadoopJob_MainClass)(nil),
311 func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
312 m := msg.(*HadoopJob)
314 switch x := m.Driver.(type) {
315 case *HadoopJob_MainJarFileUri:
316 b.EncodeVarint(1<<3 | proto.WireBytes)
317 b.EncodeStringBytes(x.MainJarFileUri)
318 case *HadoopJob_MainClass:
319 b.EncodeVarint(2<<3 | proto.WireBytes)
320 b.EncodeStringBytes(x.MainClass)
323 return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x)
328 func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
329 m := msg.(*HadoopJob)
331 case 1: // driver.main_jar_file_uri
332 if wire != proto.WireBytes {
333 return true, proto.ErrInternalBadWireType
335 x, err := b.DecodeStringBytes()
336 m.Driver = &HadoopJob_MainJarFileUri{x}
338 case 2: // driver.main_class
339 if wire != proto.WireBytes {
340 return true, proto.ErrInternalBadWireType
342 x, err := b.DecodeStringBytes()
343 m.Driver = &HadoopJob_MainClass{x}
350 func _HadoopJob_OneofSizer(msg proto.Message) (n int) {
351 m := msg.(*HadoopJob)
353 switch x := m.Driver.(type) {
354 case *HadoopJob_MainJarFileUri:
355 n += proto.SizeVarint(1<<3 | proto.WireBytes)
356 n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
357 n += len(x.MainJarFileUri)
358 case *HadoopJob_MainClass:
359 n += proto.SizeVarint(2<<3 | proto.WireBytes)
360 n += proto.SizeVarint(uint64(len(x.MainClass)))
361 n += len(x.MainClass)
364 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
369 // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/)
370 // applications on YARN.
371 type SparkJob struct {
372 // [Required] The specification of the main method to call to drive the job.
373 // Specify either the jar file that contains the main class or the main class
374 // name. To pass both a main jar and a main class in that jar, add the jar to
375 // `CommonJob.jar_file_uris`, and then specify the main class name in `main_class`.
377 // Types that are valid to be assigned to Driver:
378 // *SparkJob_MainJarFileUri
379 // *SparkJob_MainClass
380 Driver isSparkJob_Driver `protobuf_oneof:"driver"`
381 // [Optional] The arguments to pass to the driver. Do not include arguments,
382 // such as `--conf`, that can be set as job properties, since a collision may
383 // occur that causes an incorrect job submission.
384 Args []string `protobuf:"bytes,3,rep,name=args" json:"args,omitempty"`
385 // [Optional] HCFS URIs of jar files to add to the CLASSPATHs of the
386 // Spark driver and tasks.
387 JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
388 // [Optional] HCFS URIs of files to be copied to the working directory of
389 // Spark drivers and distributed tasks. Useful for naively parallel tasks.
390 FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris" json:"file_uris,omitempty"`
391 // [Optional] HCFS URIs of archives to be extracted in the working directory
392 // of Spark drivers and tasks. Supported file types:
393 // .jar, .tar, .tar.gz, .tgz, and .zip.
394 ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris" json:"archive_uris,omitempty"`
395 // [Optional] A mapping of property names to values, used to configure Spark.
396 // Properties that conflict with values set by the Cloud Dataproc API may be
397 // overwritten. Can include properties set in
398 // /etc/spark/conf/spark-defaults.conf and classes in user code.
399 Properties map[string]string `protobuf:"bytes,7,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
400 // [Optional] The runtime log config for job execution.
401 LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig" json:"logging_config,omitempty"`
404 func (m *SparkJob) Reset() { *m = SparkJob{} }
405 func (m *SparkJob) String() string { return proto.CompactTextString(m) }
406 func (*SparkJob) ProtoMessage() {}
407 func (*SparkJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{2} }
409 type isSparkJob_Driver interface {
413 type SparkJob_MainJarFileUri struct {
414 MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,oneof"`
416 type SparkJob_MainClass struct {
417 MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,oneof"`
420 func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {}
421 func (*SparkJob_MainClass) isSparkJob_Driver() {}
423 func (m *SparkJob) GetDriver() isSparkJob_Driver {
430 func (m *SparkJob) GetMainJarFileUri() string {
431 if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok {
432 return x.MainJarFileUri
437 func (m *SparkJob) GetMainClass() string {
438 if x, ok := m.GetDriver().(*SparkJob_MainClass); ok {
444 func (m *SparkJob) GetArgs() []string {
451 func (m *SparkJob) GetJarFileUris() []string {
458 func (m *SparkJob) GetFileUris() []string {
465 func (m *SparkJob) GetArchiveUris() []string {
472 func (m *SparkJob) GetProperties() map[string]string {
479 func (m *SparkJob) GetLoggingConfig() *LoggingConfig {
481 return m.LoggingConfig
486 // XXX_OneofFuncs is for the internal use of the proto package.
487 func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
488 return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{
489 (*SparkJob_MainJarFileUri)(nil),
490 (*SparkJob_MainClass)(nil),
494 func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
497 switch x := m.Driver.(type) {
498 case *SparkJob_MainJarFileUri:
499 b.EncodeVarint(1<<3 | proto.WireBytes)
500 b.EncodeStringBytes(x.MainJarFileUri)
501 case *SparkJob_MainClass:
502 b.EncodeVarint(2<<3 | proto.WireBytes)
503 b.EncodeStringBytes(x.MainClass)
506 return fmt.Errorf("SparkJob.Driver has unexpected type %T", x)
511 func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
514 case 1: // driver.main_jar_file_uri
515 if wire != proto.WireBytes {
516 return true, proto.ErrInternalBadWireType
518 x, err := b.DecodeStringBytes()
519 m.Driver = &SparkJob_MainJarFileUri{x}
521 case 2: // driver.main_class
522 if wire != proto.WireBytes {
523 return true, proto.ErrInternalBadWireType
525 x, err := b.DecodeStringBytes()
526 m.Driver = &SparkJob_MainClass{x}
533 func _SparkJob_OneofSizer(msg proto.Message) (n int) {
536 switch x := m.Driver.(type) {
537 case *SparkJob_MainJarFileUri:
538 n += proto.SizeVarint(1<<3 | proto.WireBytes)
539 n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
540 n += len(x.MainJarFileUri)
541 case *SparkJob_MainClass:
542 n += proto.SizeVarint(2<<3 | proto.WireBytes)
543 n += proto.SizeVarint(uint64(len(x.MainClass)))
544 n += len(x.MainClass)
547 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
552 // A Cloud Dataproc job for running
553 // [Apache PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
554 // applications on YARN.
555 type PySparkJob struct {
556 // [Required] The HCFS URI of the main Python file to use as the driver. Must
558 MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri" json:"main_python_file_uri,omitempty"`
559 // [Optional] The arguments to pass to the driver. Do not include arguments,
560 // such as `--conf`, that can be set as job properties, since a collision may
561 // occur that causes an incorrect job submission.
562 Args []string `protobuf:"bytes,2,rep,name=args" json:"args,omitempty"`
563 // [Optional] HCFS file URIs of Python files to pass to the PySpark
564 // framework. Supported file types: .py, .egg, and .zip.
565 PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris" json:"python_file_uris,omitempty"`
566 // [Optional] HCFS URIs of jar files to add to the CLASSPATHs of the
567 // Python driver and tasks.
568 JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
569 // [Optional] HCFS URIs of files to be copied to the working directory of
570 // Python drivers and distributed tasks. Useful for naively parallel tasks.
571 FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris" json:"file_uris,omitempty"`
572 // [Optional] HCFS URIs of archives to be extracted in the working directory of
573 // .jar, .tar, .tar.gz, .tgz, and .zip.
574 ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris" json:"archive_uris,omitempty"`
575 // [Optional] A mapping of property names to values, used to configure PySpark.
576 // Properties that conflict with values set by the Cloud Dataproc API may be
577 // overwritten. Can include properties set in
578 // /etc/spark/conf/spark-defaults.conf and classes in user code.
579 Properties map[string]string `protobuf:"bytes,7,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
580 // [Optional] The runtime log config for job execution.
581 LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig" json:"logging_config,omitempty"`
584 func (m *PySparkJob) Reset() { *m = PySparkJob{} }
585 func (m *PySparkJob) String() string { return proto.CompactTextString(m) }
586 func (*PySparkJob) ProtoMessage() {}
587 func (*PySparkJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{3} }
589 func (m *PySparkJob) GetMainPythonFileUri() string {
591 return m.MainPythonFileUri
596 func (m *PySparkJob) GetArgs() []string {
603 func (m *PySparkJob) GetPythonFileUris() []string {
605 return m.PythonFileUris
610 func (m *PySparkJob) GetJarFileUris() []string {
617 func (m *PySparkJob) GetFileUris() []string {
624 func (m *PySparkJob) GetArchiveUris() []string {
631 func (m *PySparkJob) GetProperties() map[string]string {
638 func (m *PySparkJob) GetLoggingConfig() *LoggingConfig {
640 return m.LoggingConfig
645 // A list of queries to run on a cluster.
646 type QueryList struct {
647 // [Required] The queries to execute. You do not need to terminate a query
648 // with a semicolon. Multiple queries can be specified in one string
649 // by separating each with a semicolon. Here is an example of an Cloud
650 // Dataproc API snippet that uses a QueryList to specify a HiveJob:
661 Queries []string `protobuf:"bytes,1,rep,name=queries" json:"queries,omitempty"`
664 func (m *QueryList) Reset() { *m = QueryList{} }
665 func (m *QueryList) String() string { return proto.CompactTextString(m) }
666 func (*QueryList) ProtoMessage() {}
667 func (*QueryList) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{4} }
669 func (m *QueryList) GetQueries() []string {
676 // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/)
678 type HiveJob struct {
679 // [Required] The sequence of Hive queries to execute, specified as either
680 // an HCFS file URI or a list of queries.
682 // Types that are valid to be assigned to Queries:
683 // *HiveJob_QueryFileUri
684 // *HiveJob_QueryList
685 Queries isHiveJob_Queries `protobuf_oneof:"queries"`
686 // [Optional] Whether to continue executing queries if a query fails.
687 // The default value is `false`. Setting to `true` can be useful when executing
688 // independent parallel queries.
689 ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure" json:"continue_on_failure,omitempty"`
690 // [Optional] Mapping of query variable names to values (equivalent to the
691 // Hive command: `SET name="value";`).
692 ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
693 // [Optional] A mapping of property names and values, used to configure Hive.
694 // Properties that conflict with values set by the Cloud Dataproc API may be
695 // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
696 // /etc/hive/conf/hive-site.xml, and classes in user code.
697 Properties map[string]string `protobuf:"bytes,5,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
698 // [Optional] HCFS URIs of jar files to add to the CLASSPATH of the
699 // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes
701 JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
704 func (m *HiveJob) Reset() { *m = HiveJob{} }
705 func (m *HiveJob) String() string { return proto.CompactTextString(m) }
706 func (*HiveJob) ProtoMessage() {}
707 func (*HiveJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{5} }
709 type isHiveJob_Queries interface {
713 type HiveJob_QueryFileUri struct {
714 QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,oneof"`
716 type HiveJob_QueryList struct {
717 QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,oneof"`
720 func (*HiveJob_QueryFileUri) isHiveJob_Queries() {}
721 func (*HiveJob_QueryList) isHiveJob_Queries() {}
723 func (m *HiveJob) GetQueries() isHiveJob_Queries {
730 func (m *HiveJob) GetQueryFileUri() string {
731 if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok {
732 return x.QueryFileUri
737 func (m *HiveJob) GetQueryList() *QueryList {
738 if x, ok := m.GetQueries().(*HiveJob_QueryList); ok {
744 func (m *HiveJob) GetContinueOnFailure() bool {
746 return m.ContinueOnFailure
751 func (m *HiveJob) GetScriptVariables() map[string]string {
753 return m.ScriptVariables
758 func (m *HiveJob) GetProperties() map[string]string {
765 func (m *HiveJob) GetJarFileUris() []string {
772 // XXX_OneofFuncs is for the internal use of the proto package.
773 func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
774 return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{
775 (*HiveJob_QueryFileUri)(nil),
776 (*HiveJob_QueryList)(nil),
780 func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
783 switch x := m.Queries.(type) {
784 case *HiveJob_QueryFileUri:
785 b.EncodeVarint(1<<3 | proto.WireBytes)
786 b.EncodeStringBytes(x.QueryFileUri)
787 case *HiveJob_QueryList:
788 b.EncodeVarint(2<<3 | proto.WireBytes)
789 if err := b.EncodeMessage(x.QueryList); err != nil {
794 return fmt.Errorf("HiveJob.Queries has unexpected type %T", x)
799 func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
802 case 1: // queries.query_file_uri
803 if wire != proto.WireBytes {
804 return true, proto.ErrInternalBadWireType
806 x, err := b.DecodeStringBytes()
807 m.Queries = &HiveJob_QueryFileUri{x}
809 case 2: // queries.query_list
810 if wire != proto.WireBytes {
811 return true, proto.ErrInternalBadWireType
813 msg := new(QueryList)
814 err := b.DecodeMessage(msg)
815 m.Queries = &HiveJob_QueryList{msg}
822 func _HiveJob_OneofSizer(msg proto.Message) (n int) {
825 switch x := m.Queries.(type) {
826 case *HiveJob_QueryFileUri:
827 n += proto.SizeVarint(1<<3 | proto.WireBytes)
828 n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
829 n += len(x.QueryFileUri)
830 case *HiveJob_QueryList:
831 s := proto.Size(x.QueryList)
832 n += proto.SizeVarint(2<<3 | proto.WireBytes)
833 n += proto.SizeVarint(uint64(s))
837 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
842 // A Cloud Dataproc job for running [Apache Spark SQL](http://spark.apache.org/sql/)
844 type SparkSqlJob struct {
845 // [Required] The sequence of Spark SQL queries to execute, specified as
846 // either an HCFS file URI or as a list of queries.
848 // Types that are valid to be assigned to Queries:
849 // *SparkSqlJob_QueryFileUri
850 // *SparkSqlJob_QueryList
851 Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"`
852 // [Optional] Mapping of query variable names to values (equivalent to the
853 // Spark SQL command: SET `name="value";`).
854 ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
855 // [Optional] A mapping of property names to values, used to configure
856 // Spark SQL's SparkConf. Properties that conflict with values set by the
857 // Cloud Dataproc API may be overwritten.
858 Properties map[string]string `protobuf:"bytes,4,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
859 // [Optional] HCFS URIs of jar files to be added to the Spark CLASSPATH.
860 JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
861 // [Optional] The runtime log config for job execution.
862 LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig" json:"logging_config,omitempty"`
865 func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} }
866 func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) }
867 func (*SparkSqlJob) ProtoMessage() {}
868 func (*SparkSqlJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{6} }
870 type isSparkSqlJob_Queries interface {
871 isSparkSqlJob_Queries()
874 type SparkSqlJob_QueryFileUri struct {
875 QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,oneof"`
877 type SparkSqlJob_QueryList struct {
878 QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,oneof"`
881 func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {}
882 func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {}
884 func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries {
891 func (m *SparkSqlJob) GetQueryFileUri() string {
892 if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok {
893 return x.QueryFileUri
898 func (m *SparkSqlJob) GetQueryList() *QueryList {
899 if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok {
905 func (m *SparkSqlJob) GetScriptVariables() map[string]string {
907 return m.ScriptVariables
912 func (m *SparkSqlJob) GetProperties() map[string]string {
919 func (m *SparkSqlJob) GetJarFileUris() []string {
926 func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig {
928 return m.LoggingConfig
933 // XXX_OneofFuncs is for the internal use of the proto package.
934 func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
935 return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{
936 (*SparkSqlJob_QueryFileUri)(nil),
937 (*SparkSqlJob_QueryList)(nil),
941 func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
942 m := msg.(*SparkSqlJob)
944 switch x := m.Queries.(type) {
945 case *SparkSqlJob_QueryFileUri:
946 b.EncodeVarint(1<<3 | proto.WireBytes)
947 b.EncodeStringBytes(x.QueryFileUri)
948 case *SparkSqlJob_QueryList:
949 b.EncodeVarint(2<<3 | proto.WireBytes)
950 if err := b.EncodeMessage(x.QueryList); err != nil {
955 return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x)
960 func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
961 m := msg.(*SparkSqlJob)
963 case 1: // queries.query_file_uri
964 if wire != proto.WireBytes {
965 return true, proto.ErrInternalBadWireType
967 x, err := b.DecodeStringBytes()
968 m.Queries = &SparkSqlJob_QueryFileUri{x}
970 case 2: // queries.query_list
971 if wire != proto.WireBytes {
972 return true, proto.ErrInternalBadWireType
974 msg := new(QueryList)
975 err := b.DecodeMessage(msg)
976 m.Queries = &SparkSqlJob_QueryList{msg}
983 func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) {
984 m := msg.(*SparkSqlJob)
986 switch x := m.Queries.(type) {
987 case *SparkSqlJob_QueryFileUri:
988 n += proto.SizeVarint(1<<3 | proto.WireBytes)
989 n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
990 n += len(x.QueryFileUri)
991 case *SparkSqlJob_QueryList:
992 s := proto.Size(x.QueryList)
993 n += proto.SizeVarint(2<<3 | proto.WireBytes)
994 n += proto.SizeVarint(uint64(s))
998 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
1003 // A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/)
1005 type PigJob struct {
1006 // [Required] The sequence of Pig queries to execute, specified as an HCFS
1007 // file URI or a list of queries.
1009 // Types that are valid to be assigned to Queries:
1010 // *PigJob_QueryFileUri
1011 // *PigJob_QueryList
1012 Queries isPigJob_Queries `protobuf_oneof:"queries"`
1013 // [Optional] Whether to continue executing queries if a query fails.
1014 // The default value is `false`. Setting to `true` can be useful when executing
1015 // independent parallel queries.
1016 ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure" json:"continue_on_failure,omitempty"`
1017 // [Optional] Mapping of query variable names to values (equivalent to the Pig
1018 // command: `name=[value]`).
1019 ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
1020 // [Optional] A mapping of property names to values, used to configure Pig.
1021 // Properties that conflict with values set by the Cloud Dataproc API may be
1022 // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
1023 // /etc/pig/conf/pig.properties, and classes in user code.
1024 Properties map[string]string `protobuf:"bytes,5,rep,name=properties" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"`
1025 // [Optional] HCFS URIs of jar files to add to the CLASSPATH of
1026 // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
1027 JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris" json:"jar_file_uris,omitempty"`
1028 // [Optional] The runtime log config for job execution.
1029 LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig" json:"logging_config,omitempty"`
1032 func (m *PigJob) Reset() { *m = PigJob{} }
1033 func (m *PigJob) String() string { return proto.CompactTextString(m) }
1034 func (*PigJob) ProtoMessage() {}
1035 func (*PigJob) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{7} }
1037 type isPigJob_Queries interface {
1041 type PigJob_QueryFileUri struct {
1042 QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,oneof"`
1044 type PigJob_QueryList struct {
1045 QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,oneof"`
1048 func (*PigJob_QueryFileUri) isPigJob_Queries() {}
1049 func (*PigJob_QueryList) isPigJob_Queries() {}
1051 func (m *PigJob) GetQueries() isPigJob_Queries {
1058 func (m *PigJob) GetQueryFileUri() string {
1059 if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok {
1060 return x.QueryFileUri
1065 func (m *PigJob) GetQueryList() *QueryList {
1066 if x, ok := m.GetQueries().(*PigJob_QueryList); ok {
1072 func (m *PigJob) GetContinueOnFailure() bool {
1074 return m.ContinueOnFailure
1079 func (m *PigJob) GetScriptVariables() map[string]string {
1081 return m.ScriptVariables
1086 func (m *PigJob) GetProperties() map[string]string {
1093 func (m *PigJob) GetJarFileUris() []string {
1095 return m.JarFileUris
1100 func (m *PigJob) GetLoggingConfig() *LoggingConfig {
1102 return m.LoggingConfig
1107 // XXX_OneofFuncs is for the internal use of the proto package.
1108 func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
1109 return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{
1110 (*PigJob_QueryFileUri)(nil),
1111 (*PigJob_QueryList)(nil),
1115 func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
1118 switch x := m.Queries.(type) {
1119 case *PigJob_QueryFileUri:
1120 b.EncodeVarint(1<<3 | proto.WireBytes)
1121 b.EncodeStringBytes(x.QueryFileUri)
1122 case *PigJob_QueryList:
1123 b.EncodeVarint(2<<3 | proto.WireBytes)
1124 if err := b.EncodeMessage(x.QueryList); err != nil {
1129 return fmt.Errorf("PigJob.Queries has unexpected type %T", x)
1134 func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
1137 case 1: // queries.query_file_uri
1138 if wire != proto.WireBytes {
1139 return true, proto.ErrInternalBadWireType
1141 x, err := b.DecodeStringBytes()
1142 m.Queries = &PigJob_QueryFileUri{x}
1144 case 2: // queries.query_list
1145 if wire != proto.WireBytes {
1146 return true, proto.ErrInternalBadWireType
1148 msg := new(QueryList)
1149 err := b.DecodeMessage(msg)
1150 m.Queries = &PigJob_QueryList{msg}
1157 func _PigJob_OneofSizer(msg proto.Message) (n int) {
1160 switch x := m.Queries.(type) {
1161 case *PigJob_QueryFileUri:
1162 n += proto.SizeVarint(1<<3 | proto.WireBytes)
1163 n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
1164 n += len(x.QueryFileUri)
1165 case *PigJob_QueryList:
1166 s := proto.Size(x.QueryList)
1167 n += proto.SizeVarint(2<<3 | proto.WireBytes)
1168 n += proto.SizeVarint(uint64(s))
1172 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
1177 // Cloud Dataproc job config.
1178 type JobPlacement struct {
1179 // [Required] The name of the cluster where the job will be submitted.
1180 ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName" json:"cluster_name,omitempty"`
1181 // [Output-only] A cluster UUID generated by the Cloud Dataproc service when
1182 // the job is submitted.
1183 ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid" json:"cluster_uuid,omitempty"`
1186 func (m *JobPlacement) Reset() { *m = JobPlacement{} }
1187 func (m *JobPlacement) String() string { return proto.CompactTextString(m) }
1188 func (*JobPlacement) ProtoMessage() {}
1189 func (*JobPlacement) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{8} }
1191 func (m *JobPlacement) GetClusterName() string {
1193 return m.ClusterName
1198 func (m *JobPlacement) GetClusterUuid() string {
1200 return m.ClusterUuid
1205 // Cloud Dataproc job status.
1206 type JobStatus struct {
1207 // [Output-only] A state message specifying the overall job state.
1208 State JobStatus_State `protobuf:"varint,1,opt,name=state,enum=google.cloud.dataproc.v1.JobStatus_State" json:"state,omitempty"`
1209 // [Output-only] Optional job state details, such as an error
1210 // description if the state is <code>ERROR</code>.
1211 Details string `protobuf:"bytes,2,opt,name=details" json:"details,omitempty"`
1212 // [Output-only] The time when this state was entered.
1213 StateStartTime *google_protobuf3.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime" json:"state_start_time,omitempty"`
1216 func (m *JobStatus) Reset() { *m = JobStatus{} }
1217 func (m *JobStatus) String() string { return proto.CompactTextString(m) }
1218 func (*JobStatus) ProtoMessage() {}
1219 func (*JobStatus) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{9} }
1221 func (m *JobStatus) GetState() JobStatus_State {
1225 return JobStatus_STATE_UNSPECIFIED
1228 func (m *JobStatus) GetDetails() string {
1235 func (m *JobStatus) GetStateStartTime() *google_protobuf3.Timestamp {
1237 return m.StateStartTime
1242 // Encapsulates the full scoping used to reference a job.
1243 type JobReference struct {
1244 // [Required] The ID of the Google Cloud Platform project that the job
1246 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1247 // [Optional] The job ID, which must be unique within the project. The job ID
1248 // is generated by the server upon job submission or provided by the user as a
1249 // means to perform retries without creating duplicate jobs. The ID must
1250 // contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
1251 // hyphens (-). The maximum length is 512 characters.
1252 JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId" json:"job_id,omitempty"`
1255 func (m *JobReference) Reset() { *m = JobReference{} }
1256 func (m *JobReference) String() string { return proto.CompactTextString(m) }
1257 func (*JobReference) ProtoMessage() {}
1258 func (*JobReference) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{10} }
1260 func (m *JobReference) GetProjectId() string {
1267 func (m *JobReference) GetJobId() string {
1274 // A Cloud Dataproc job resource.
1276 // [Optional] The fully qualified reference to the job, which can be used to
1277 // obtain the equivalent REST path of the job resource. If this property
1278 // is not specified when a job is created, the server generates a
1279 // <code>job_id</code>.
1280 Reference *JobReference `protobuf:"bytes,1,opt,name=reference" json:"reference,omitempty"`
1281 // [Required] Job information, including how, when, and where to
1283 Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement" json:"placement,omitempty"`
1284 // [Required] The application/framework-specific portion of the job.
1286 // Types that are valid to be assigned to TypeJob:
1293 TypeJob isJob_TypeJob `protobuf_oneof:"type_job"`
1294 // [Output-only] The job status. Additional application-specific
1295 // status information may be contained in the <code>type_job</code>
1296 // and <code>yarn_applications</code> fields.
1297 Status *JobStatus `protobuf:"bytes,8,opt,name=status" json:"status,omitempty"`
1298 // [Output-only] The previous job status.
1299 StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory" json:"status_history,omitempty"`
1300 // [Output-only] A URI pointing to the location of the stdout of the job's
1302 DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri" json:"driver_output_resource_uri,omitempty"`
1303 // [Output-only] If present, the location of miscellaneous control files
1304 // which may be used as part of job setup and handling. If not present,
1305 // control files may be placed in the same location as `driver_output_uri`.
1306 DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri" json:"driver_control_files_uri,omitempty"`
1309 func (m *Job) Reset() { *m = Job{} }
1310 func (m *Job) String() string { return proto.CompactTextString(m) }
1311 func (*Job) ProtoMessage() {}
1312 func (*Job) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{11} }
1314 type isJob_TypeJob interface {
1318 type Job_HadoopJob struct {
1319 HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,oneof"`
1321 type Job_SparkJob struct {
1322 SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,oneof"`
1324 type Job_PysparkJob struct {
1325 PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,oneof"`
1327 type Job_HiveJob struct {
1328 HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,oneof"`
1330 type Job_PigJob struct {
1331 PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,oneof"`
1333 type Job_SparkSqlJob struct {
1334 SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,oneof"`
1337 func (*Job_HadoopJob) isJob_TypeJob() {}
1338 func (*Job_SparkJob) isJob_TypeJob() {}
1339 func (*Job_PysparkJob) isJob_TypeJob() {}
1340 func (*Job_HiveJob) isJob_TypeJob() {}
1341 func (*Job_PigJob) isJob_TypeJob() {}
1342 func (*Job_SparkSqlJob) isJob_TypeJob() {}
1344 func (m *Job) GetTypeJob() isJob_TypeJob {
1351 func (m *Job) GetReference() *JobReference {
1358 func (m *Job) GetPlacement() *JobPlacement {
1365 func (m *Job) GetHadoopJob() *HadoopJob {
1366 if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok {
1372 func (m *Job) GetSparkJob() *SparkJob {
1373 if x, ok := m.GetTypeJob().(*Job_SparkJob); ok {
1379 func (m *Job) GetPysparkJob() *PySparkJob {
1380 if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok {
1386 func (m *Job) GetHiveJob() *HiveJob {
1387 if x, ok := m.GetTypeJob().(*Job_HiveJob); ok {
1393 func (m *Job) GetPigJob() *PigJob {
1394 if x, ok := m.GetTypeJob().(*Job_PigJob); ok {
1400 func (m *Job) GetSparkSqlJob() *SparkSqlJob {
1401 if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok {
1402 return x.SparkSqlJob
1407 func (m *Job) GetStatus() *JobStatus {
1414 func (m *Job) GetStatusHistory() []*JobStatus {
1416 return m.StatusHistory
1421 func (m *Job) GetDriverOutputResourceUri() string {
1423 return m.DriverOutputResourceUri
1428 func (m *Job) GetDriverControlFilesUri() string {
1430 return m.DriverControlFilesUri
1435 // XXX_OneofFuncs is for the internal use of the proto package.
1436 func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
1437 return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{
1438 (*Job_HadoopJob)(nil),
1439 (*Job_SparkJob)(nil),
1440 (*Job_PysparkJob)(nil),
1441 (*Job_HiveJob)(nil),
1443 (*Job_SparkSqlJob)(nil),
1447 func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
1450 switch x := m.TypeJob.(type) {
1451 case *Job_HadoopJob:
1452 b.EncodeVarint(3<<3 | proto.WireBytes)
1453 if err := b.EncodeMessage(x.HadoopJob); err != nil {
1457 b.EncodeVarint(4<<3 | proto.WireBytes)
1458 if err := b.EncodeMessage(x.SparkJob); err != nil {
1461 case *Job_PysparkJob:
1462 b.EncodeVarint(5<<3 | proto.WireBytes)
1463 if err := b.EncodeMessage(x.PysparkJob); err != nil {
1467 b.EncodeVarint(6<<3 | proto.WireBytes)
1468 if err := b.EncodeMessage(x.HiveJob); err != nil {
1472 b.EncodeVarint(7<<3 | proto.WireBytes)
1473 if err := b.EncodeMessage(x.PigJob); err != nil {
1476 case *Job_SparkSqlJob:
1477 b.EncodeVarint(12<<3 | proto.WireBytes)
1478 if err := b.EncodeMessage(x.SparkSqlJob); err != nil {
1483 return fmt.Errorf("Job.TypeJob has unexpected type %T", x)
1488 func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
1491 case 3: // type_job.hadoop_job
1492 if wire != proto.WireBytes {
1493 return true, proto.ErrInternalBadWireType
1495 msg := new(HadoopJob)
1496 err := b.DecodeMessage(msg)
1497 m.TypeJob = &Job_HadoopJob{msg}
1499 case 4: // type_job.spark_job
1500 if wire != proto.WireBytes {
1501 return true, proto.ErrInternalBadWireType
1503 msg := new(SparkJob)
1504 err := b.DecodeMessage(msg)
1505 m.TypeJob = &Job_SparkJob{msg}
1507 case 5: // type_job.pyspark_job
1508 if wire != proto.WireBytes {
1509 return true, proto.ErrInternalBadWireType
1511 msg := new(PySparkJob)
1512 err := b.DecodeMessage(msg)
1513 m.TypeJob = &Job_PysparkJob{msg}
1515 case 6: // type_job.hive_job
1516 if wire != proto.WireBytes {
1517 return true, proto.ErrInternalBadWireType
1520 err := b.DecodeMessage(msg)
1521 m.TypeJob = &Job_HiveJob{msg}
1523 case 7: // type_job.pig_job
1524 if wire != proto.WireBytes {
1525 return true, proto.ErrInternalBadWireType
1528 err := b.DecodeMessage(msg)
1529 m.TypeJob = &Job_PigJob{msg}
1531 case 12: // type_job.spark_sql_job
1532 if wire != proto.WireBytes {
1533 return true, proto.ErrInternalBadWireType
1535 msg := new(SparkSqlJob)
1536 err := b.DecodeMessage(msg)
1537 m.TypeJob = &Job_SparkSqlJob{msg}
1544 func _Job_OneofSizer(msg proto.Message) (n int) {
1547 switch x := m.TypeJob.(type) {
1548 case *Job_HadoopJob:
1549 s := proto.Size(x.HadoopJob)
1550 n += proto.SizeVarint(3<<3 | proto.WireBytes)
1551 n += proto.SizeVarint(uint64(s))
1554 s := proto.Size(x.SparkJob)
1555 n += proto.SizeVarint(4<<3 | proto.WireBytes)
1556 n += proto.SizeVarint(uint64(s))
1558 case *Job_PysparkJob:
1559 s := proto.Size(x.PysparkJob)
1560 n += proto.SizeVarint(5<<3 | proto.WireBytes)
1561 n += proto.SizeVarint(uint64(s))
1564 s := proto.Size(x.HiveJob)
1565 n += proto.SizeVarint(6<<3 | proto.WireBytes)
1566 n += proto.SizeVarint(uint64(s))
1569 s := proto.Size(x.PigJob)
1570 n += proto.SizeVarint(7<<3 | proto.WireBytes)
1571 n += proto.SizeVarint(uint64(s))
1573 case *Job_SparkSqlJob:
1574 s := proto.Size(x.SparkSqlJob)
1575 n += proto.SizeVarint(12<<3 | proto.WireBytes)
1576 n += proto.SizeVarint(uint64(s))
1580 panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
1585 // A request to submit a job.
1586 type SubmitJobRequest struct {
1587 // [Required] The ID of the Google Cloud Platform project that the job
1589 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1590 // [Required] The Cloud Dataproc region in which to handle the request.
1591 Region string `protobuf:"bytes,3,opt,name=region" json:"region,omitempty"`
1592 // [Required] The job resource.
1593 Job *Job `protobuf:"bytes,2,opt,name=job" json:"job,omitempty"`
1596 func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} }
1597 func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) }
1598 func (*SubmitJobRequest) ProtoMessage() {}
1599 func (*SubmitJobRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{12} }
1601 func (m *SubmitJobRequest) GetProjectId() string {
1608 func (m *SubmitJobRequest) GetRegion() string {
1615 func (m *SubmitJobRequest) GetJob() *Job {
1622 // A request to get the resource representation for a job in a project.
1623 type GetJobRequest struct {
1624 // [Required] The ID of the Google Cloud Platform project that the job
1626 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1627 // [Required] The Cloud Dataproc region in which to handle the request.
1628 Region string `protobuf:"bytes,3,opt,name=region" json:"region,omitempty"`
1629 // [Required] The job ID.
1630 JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId" json:"job_id,omitempty"`
1633 func (m *GetJobRequest) Reset() { *m = GetJobRequest{} }
1634 func (m *GetJobRequest) String() string { return proto.CompactTextString(m) }
1635 func (*GetJobRequest) ProtoMessage() {}
1636 func (*GetJobRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{13} }
1638 func (m *GetJobRequest) GetProjectId() string {
1645 func (m *GetJobRequest) GetRegion() string {
1652 func (m *GetJobRequest) GetJobId() string {
1659 // A request to list jobs in a project.
1660 type ListJobsRequest struct {
1661 // [Required] The ID of the Google Cloud Platform project that the job
1663 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1664 // [Required] The Cloud Dataproc region in which to handle the request.
1665 Region string `protobuf:"bytes,6,opt,name=region" json:"region,omitempty"`
1666 // [Optional] The number of results to return in each response.
1667 PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize" json:"page_size,omitempty"`
1668 // [Optional] The page token, returned by a previous call, to request the
1669 // next page of results.
1670 PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken" json:"page_token,omitempty"`
1671 // [Optional] If set, the returned jobs list includes only jobs that were
1672 // submitted to the named cluster.
1673 ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName" json:"cluster_name,omitempty"`
1674 // [Optional] Specifies enumerated categories of jobs to list
1675 // (default = match ALL jobs).
1676 JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,enum=google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"`
1679 func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} }
1680 func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) }
1681 func (*ListJobsRequest) ProtoMessage() {}
1682 func (*ListJobsRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{14} }
1684 func (m *ListJobsRequest) GetProjectId() string {
1691 func (m *ListJobsRequest) GetRegion() string {
1698 func (m *ListJobsRequest) GetPageSize() int32 {
1705 func (m *ListJobsRequest) GetPageToken() string {
1712 func (m *ListJobsRequest) GetClusterName() string {
1714 return m.ClusterName
1719 func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher {
1721 return m.JobStateMatcher
1723 return ListJobsRequest_ALL
1726 // A list of jobs in a project.
1727 type ListJobsResponse struct {
1728 // [Output-only] Jobs list.
1729 Jobs []*Job `protobuf:"bytes,1,rep,name=jobs" json:"jobs,omitempty"`
1730 // [Optional] This token is included in the response if there are more results
1731 // to fetch. To fetch additional results, provide this value as the
1732 // `page_token` in a subsequent <code>ListJobsRequest</code>.
1733 NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken" json:"next_page_token,omitempty"`
1736 func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} }
1737 func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) }
1738 func (*ListJobsResponse) ProtoMessage() {}
1739 func (*ListJobsResponse) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{15} }
1741 func (m *ListJobsResponse) GetJobs() []*Job {
1748 func (m *ListJobsResponse) GetNextPageToken() string {
1750 return m.NextPageToken
1755 // A request to cancel a job.
1756 type CancelJobRequest struct {
1757 // [Required] The ID of the Google Cloud Platform project that the job
1759 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1760 // [Required] The Cloud Dataproc region in which to handle the request.
1761 Region string `protobuf:"bytes,3,opt,name=region" json:"region,omitempty"`
1762 // [Required] The job ID.
1763 JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId" json:"job_id,omitempty"`
1766 func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} }
1767 func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) }
1768 func (*CancelJobRequest) ProtoMessage() {}
1769 func (*CancelJobRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{16} }
1771 func (m *CancelJobRequest) GetProjectId() string {
1778 func (m *CancelJobRequest) GetRegion() string {
1785 func (m *CancelJobRequest) GetJobId() string {
1792 // A request to delete a job.
1793 type DeleteJobRequest struct {
1794 // [Required] The ID of the Google Cloud Platform project that the job
1796 ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId" json:"project_id,omitempty"`
1797 // [Required] The Cloud Dataproc region in which to handle the request.
1798 Region string `protobuf:"bytes,3,opt,name=region" json:"region,omitempty"`
1799 // [Required] The job ID.
1800 JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId" json:"job_id,omitempty"`
1803 func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} }
1804 func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) }
1805 func (*DeleteJobRequest) ProtoMessage() {}
1806 func (*DeleteJobRequest) Descriptor() ([]byte, []int) { return fileDescriptor1, []int{17} }
1808 func (m *DeleteJobRequest) GetProjectId() string {
1815 func (m *DeleteJobRequest) GetRegion() string {
1822 func (m *DeleteJobRequest) GetJobId() string {
1830 proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1.LoggingConfig")
1831 proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1.HadoopJob")
1832 proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1.SparkJob")
1833 proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1.PySparkJob")
1834 proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1.QueryList")
1835 proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1.HiveJob")
1836 proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1.SparkSqlJob")
1837 proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1.PigJob")
1838 proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1.JobPlacement")
1839 proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1.JobStatus")
1840 proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1.JobReference")
1841 proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1.Job")
1842 proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1.SubmitJobRequest")
1843 proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1.GetJobRequest")
1844 proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1.ListJobsRequest")
1845 proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1.ListJobsResponse")
1846 proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1.CancelJobRequest")
1847 proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1.DeleteJobRequest")
1848 proto.RegisterEnum("google.cloud.dataproc.v1.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value)
1849 proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_State", JobStatus_State_name, JobStatus_State_value)
1850 proto.RegisterEnum("google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value)
1853 // Reference imports to suppress errors if they are not otherwise used.
1854 var _ context.Context
1855 var _ grpc.ClientConn
1857 // This is a compile-time assertion to ensure that this generated file
1858 // is compatible with the grpc package it is being compiled against.
1859 const _ = grpc.SupportPackageIsVersion4
1861 // Client API for JobController service
1863 type JobControllerClient interface {
1864 // Submits a job to a cluster.
1865 SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error)
1866 // Gets the resource representation for a job in a project.
1867 GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error)
1868 // Lists regions/{region}/jobs in a project.
1869 ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error)
1870 // Starts a job cancellation request. To access the job resource
1871 // after cancellation, call
1872 // [regions/{region}/jobs.list](/dataproc/reference/rest/v1/projects.regions.jobs/list) or
1873 // [regions/{region}/jobs.get](/dataproc/reference/rest/v1/projects.regions.jobs/get).
1874 CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error)
1875 // Deletes the job from the project. If the job is active, the delete fails,
1876 // and the response returns `FAILED_PRECONDITION`.
1877 DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*google_protobuf2.Empty, error)
1880 type jobControllerClient struct {
1884 func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient {
1885 return &jobControllerClient{cc}
1888 func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) {
1890 err := grpc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/SubmitJob", in, out, c.cc, opts...)
1897 func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) {
1899 err := grpc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/GetJob", in, out, c.cc, opts...)
1906 func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) {
1907 out := new(ListJobsResponse)
1908 err := grpc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/ListJobs", in, out, c.cc, opts...)
1915 func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) {
1917 err := grpc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/CancelJob", in, out, c.cc, opts...)
1924 func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*google_protobuf2.Empty, error) {
1925 out := new(google_protobuf2.Empty)
1926 err := grpc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/DeleteJob", in, out, c.cc, opts...)
1933 // Server API for JobController service
1935 type JobControllerServer interface {
1936 // Submits a job to a cluster.
1937 SubmitJob(context.Context, *SubmitJobRequest) (*Job, error)
1938 // Gets the resource representation for a job in a project.
1939 GetJob(context.Context, *GetJobRequest) (*Job, error)
1940 // Lists regions/{region}/jobs in a project.
1941 ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error)
1942 // Starts a job cancellation request. To access the job resource
1943 // after cancellation, call
1944 // [regions/{region}/jobs.list](/dataproc/reference/rest/v1/projects.regions.jobs/list) or
1945 // [regions/{region}/jobs.get](/dataproc/reference/rest/v1/projects.regions.jobs/get).
1946 CancelJob(context.Context, *CancelJobRequest) (*Job, error)
1947 // Deletes the job from the project. If the job is active, the delete fails,
1948 // and the response returns `FAILED_PRECONDITION`.
1949 DeleteJob(context.Context, *DeleteJobRequest) (*google_protobuf2.Empty, error)
1952 func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) {
1953 s.RegisterService(&_JobController_serviceDesc, srv)
1956 func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1957 in := new(SubmitJobRequest)
1958 if err := dec(in); err != nil {
1961 if interceptor == nil {
1962 return srv.(JobControllerServer).SubmitJob(ctx, in)
1964 info := &grpc.UnaryServerInfo{
1966 FullMethod: "/google.cloud.dataproc.v1.JobController/SubmitJob",
1968 handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1969 return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest))
1971 return interceptor(ctx, in, info, handler)
1974 func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1975 in := new(GetJobRequest)
1976 if err := dec(in); err != nil {
1979 if interceptor == nil {
1980 return srv.(JobControllerServer).GetJob(ctx, in)
1982 info := &grpc.UnaryServerInfo{
1984 FullMethod: "/google.cloud.dataproc.v1.JobController/GetJob",
1986 handler := func(ctx context.Context, req interface{}) (interface{}, error) {
1987 return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest))
1989 return interceptor(ctx, in, info, handler)
1992 func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
1993 in := new(ListJobsRequest)
1994 if err := dec(in); err != nil {
1997 if interceptor == nil {
1998 return srv.(JobControllerServer).ListJobs(ctx, in)
2000 info := &grpc.UnaryServerInfo{
2002 FullMethod: "/google.cloud.dataproc.v1.JobController/ListJobs",
2004 handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2005 return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest))
2007 return interceptor(ctx, in, info, handler)
2010 func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2011 in := new(CancelJobRequest)
2012 if err := dec(in); err != nil {
2015 if interceptor == nil {
2016 return srv.(JobControllerServer).CancelJob(ctx, in)
2018 info := &grpc.UnaryServerInfo{
2020 FullMethod: "/google.cloud.dataproc.v1.JobController/CancelJob",
2022 handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2023 return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest))
2025 return interceptor(ctx, in, info, handler)
2028 func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
2029 in := new(DeleteJobRequest)
2030 if err := dec(in); err != nil {
2033 if interceptor == nil {
2034 return srv.(JobControllerServer).DeleteJob(ctx, in)
2036 info := &grpc.UnaryServerInfo{
2038 FullMethod: "/google.cloud.dataproc.v1.JobController/DeleteJob",
2040 handler := func(ctx context.Context, req interface{}) (interface{}, error) {
2041 return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest))
2043 return interceptor(ctx, in, info, handler)
2046 var _JobController_serviceDesc = grpc.ServiceDesc{
2047 ServiceName: "google.cloud.dataproc.v1.JobController",
2048 HandlerType: (*JobControllerServer)(nil),
2049 Methods: []grpc.MethodDesc{
2051 MethodName: "SubmitJob",
2052 Handler: _JobController_SubmitJob_Handler,
2055 MethodName: "GetJob",
2056 Handler: _JobController_GetJob_Handler,
2059 MethodName: "ListJobs",
2060 Handler: _JobController_ListJobs_Handler,
2063 MethodName: "CancelJob",
2064 Handler: _JobController_CancelJob_Handler,
2067 MethodName: "DeleteJob",
2068 Handler: _JobController_DeleteJob_Handler,
2071 Streams: []grpc.StreamDesc{},
2072 Metadata: "google/cloud/dataproc/v1/jobs.proto",
2075 func init() { proto.RegisterFile("google/cloud/dataproc/v1/jobs.proto", fileDescriptor1) }
2077 var fileDescriptor1 = []byte{
2078 // 1862 bytes of a gzipped FileDescriptorProto
2079 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0x5b, 0x6f, 0x23, 0x49,
2080 0x15, 0x8e, 0xef, 0xee, 0xe3, 0xb1, 0xd3, 0x29, 0x66, 0x17, 0xcb, 0xb3, 0xab, 0xcd, 0xf6, 0xb0,
2081 0x43, 0x76, 0x10, 0x36, 0xf1, 0xc2, 0xec, 0x90, 0x00, 0xbb, 0x8e, 0xed, 0x8c, 0x13, 0x8c, 0xe3,
2082 0x6d, 0x3b, 0x83, 0x84, 0x84, 0x7a, 0xda, 0x76, 0xc5, 0x69, 0x4f, 0xbb, 0xab, 0xd3, 0xd5, 0x6d,
2083 0xe1, 0x19, 0xcd, 0x0b, 0x7f, 0x00, 0x71, 0x11, 0x12, 0x3c, 0xf2, 0x2b, 0x90, 0x10, 0xe2, 0x01,
2084 0xc4, 0x1f, 0xe0, 0x15, 0xf1, 0xc4, 0x0f, 0x41, 0x55, 0xd5, 0xed, 0xf8, 0x12, 0x5f, 0xb2, 0xc3,
2085 0xae, 0x76, 0xf7, 0x29, 0xd5, 0xe7, 0x56, 0xa7, 0xea, 0xfb, 0xce, 0xa9, 0x2a, 0x07, 0xee, 0xf7,
2086 0x09, 0xe9, 0x9b, 0xb8, 0xd0, 0x35, 0x89, 0xd7, 0x2b, 0xf4, 0x74, 0x57, 0xb7, 0x1d, 0xd2, 0x2d,
2087 0x8c, 0xf6, 0x0b, 0x03, 0xd2, 0xa1, 0x79, 0xdb, 0x21, 0x2e, 0x41, 0x59, 0x61, 0x94, 0xe7, 0x46,
2088 0xf9, 0xc0, 0x28, 0x3f, 0xda, 0xcf, 0xbd, 0xe5, 0xbb, 0xeb, 0xb6, 0x51, 0xd0, 0x2d, 0x8b, 0xb8,
2089 0xba, 0x6b, 0x10, 0xcb, 0xf7, 0xcb, 0xdd, 0xf3, 0xb5, 0xfc, 0xab, 0xe3, 0x5d, 0x14, 0xf0, 0xd0,
2090 0x76, 0xc7, 0xbe, 0xf2, 0x9d, 0x79, 0xa5, 0x6b, 0x0c, 0x31, 0x75, 0xf5, 0xa1, 0x2d, 0x0c, 0x94,
2091 0xff, 0x84, 0x21, 0x5d, 0x27, 0xfd, 0xbe, 0x61, 0xf5, 0xcb, 0xc4, 0xba, 0x30, 0xfa, 0xe8, 0x12,
2092 0x76, 0x7a, 0x8e, 0x31, 0xc2, 0x8e, 0x66, 0x92, 0xbe, 0x66, 0xe2, 0x11, 0x36, 0x69, 0x36, 0xbc,
2093 0x1b, 0xd9, 0x4b, 0x15, 0x7f, 0x90, 0x5f, 0x96, 0x63, 0x7e, 0x26, 0x46, 0xbe, 0xc2, 0x03, 0xd4,
2094 0x49, 0xbf, 0xce, 0xdd, 0xab, 0x96, 0xeb, 0x8c, 0xd5, 0xed, 0xde, 0xac, 0x34, 0x77, 0x05, 0x77,
2095 0x6f, 0x32, 0x44, 0x32, 0x44, 0x9e, 0xe3, 0x71, 0x36, 0xb4, 0x1b, 0xda, 0x93, 0x54, 0x36, 0x44,
2096 0x65, 0x88, 0x8d, 0x74, 0xd3, 0xc3, 0xd9, 0xf0, 0x6e, 0x68, 0x2f, 0x53, 0xfc, 0xf6, 0xa6, 0x79,
2097 0xf0, 0xa8, 0xaa, 0xf0, 0x3d, 0x08, 0x3f, 0x0e, 0x29, 0x36, 0xc4, 0xb8, 0x0c, 0xbd, 0x01, 0x3b,
2098 0xf5, 0xea, 0xd3, 0x6a, 0x5d, 0x3b, 0x6f, 0xb4, 0x9a, 0xd5, 0xf2, 0xc9, 0xf1, 0x49, 0xb5, 0x22,
2099 0x6f, 0xa1, 0x04, 0x44, 0x4a, 0xf5, 0xba, 0x1c, 0x42, 0x12, 0xc4, 0xda, 0x6a, 0xa9, 0x5c, 0x95,
2100 0xc3, 0x6c, 0x58, 0xa9, 0x1e, 0x9d, 0x3f, 0x91, 0x23, 0x28, 0x09, 0xd1, 0x93, 0xc6, 0xf1, 0x99,
2101 0x1c, 0x65, 0xa3, 0x9f, 0x96, 0xd4, 0x86, 0x1c, 0x63, 0xea, 0xaa, 0xaa, 0x9e, 0xa9, 0x72, 0x9c,
2102 0x0d, 0x8f, 0x4b, 0xed, 0x52, 0x5d, 0x4e, 0xb0, 0x40, 0x67, 0xc7, 0xc7, 0x72, 0x52, 0xf9, 0x5b,
2103 0x04, 0xa4, 0x9a, 0xde, 0x23, 0xc4, 0x3e, 0x25, 0x1d, 0xf4, 0x2d, 0xd8, 0x19, 0xea, 0x86, 0xa5,
2104 0x0d, 0x74, 0x47, 0xbb, 0x30, 0x4c, 0xac, 0x79, 0x8e, 0x21, 0x16, 0x5a, 0xdb, 0x52, 0x33, 0x4c,
2105 0x75, 0xaa, 0x3b, 0xc7, 0x86, 0x89, 0xcf, 0x1d, 0x03, 0xbd, 0x03, 0xc0, 0x8d, 0xbb, 0xa6, 0x4e,
2106 0x29, 0x5f, 0x3a, 0xb3, 0x92, 0x98, 0xac, 0xcc, 0x44, 0x08, 0x41, 0x54, 0x77, 0xfa, 0x34, 0x1b,
2107 0xd9, 0x8d, 0xec, 0x49, 0x2a, 0x1f, 0x23, 0x05, 0xd2, 0xd3, 0xc1, 0x69, 0x36, 0xca, 0x95, 0xa9,
2108 0xc1, 0x24, 0x2e, 0x45, 0xf7, 0x40, 0xba, 0xd6, 0xc7, 0xb8, 0x3e, 0x79, 0x11, 0x28, 0xdf, 0x85,
2109 0x3b, 0xba, 0xd3, 0xbd, 0x34, 0x46, 0xbe, 0x3e, 0x2e, 0xfc, 0x7d, 0x19, 0x37, 0x69, 0x01, 0xd8,
2110 0x0e, 0xb1, 0xb1, 0xe3, 0x1a, 0x98, 0x66, 0x13, 0x9c, 0x1b, 0x1f, 0x2c, 0xc7, 0x64, 0xb2, 0xfc,
2111 0x7c, 0x73, 0xe2, 0x25, 0x28, 0x31, 0x15, 0x06, 0x35, 0x20, 0x63, 0x0a, 0xf0, 0xb4, 0x2e, 0x47,
2112 0x2f, 0x9b, 0xdc, 0x0d, 0xed, 0xa5, 0x8a, 0xdf, 0xdc, 0x10, 0x6c, 0x35, 0x6d, 0x4e, 0x7f, 0xe6,
2113 0x7e, 0x08, 0xdb, 0x73, 0xd3, 0xdd, 0x40, 0xac, 0xbb, 0xd3, 0xc4, 0x92, 0xa6, 0x98, 0x72, 0x94,
2114 0x84, 0xb8, 0xe0, 0xab, 0xf2, 0xd7, 0x08, 0x24, 0x5b, 0xb6, 0xee, 0x3c, 0xff, 0xea, 0x00, 0xa8,
2115 0xde, 0x00, 0x60, 0x71, 0xf9, 0x3e, 0x07, 0xab, 0xff, 0x72, 0xe2, 0xf7, 0x8f, 0x08, 0x40, 0x73,
2116 0x3c, 0x41, 0xb0, 0x00, 0x77, 0x39, 0x28, 0xf6, 0xd8, 0xbd, 0x24, 0xd6, 0x1c, 0x88, 0x2a, 0x47,
2117 0xb7, 0xc9, 0x55, 0x01, 0x8a, 0x01, 0x48, 0xe1, 0x29, 0x90, 0xf6, 0x40, 0x9e, 0xf3, 0x0f, 0x40,
2118 0xcc, 0xd8, 0xd3, 0xce, 0x9f, 0x0f, 0x9c, 0xed, 0x1b, 0xe0, 0xfc, 0xee, 0xf2, 0x6d, 0xbf, 0xde,
2119 0x8c, 0x2f, 0x11, 0xa0, 0xca, 0x7b, 0x20, 0x7d, 0xe2, 0x61, 0x67, 0x5c, 0x37, 0xa8, 0x8b, 0xb2,
2120 0x90, 0xb8, 0xf2, 0xb0, 0xc3, 0x96, 0x1b, 0xe2, 0xfb, 0x11, 0x7c, 0x2a, 0xbf, 0x8a, 0x42, 0xa2,
2121 0x66, 0x8c, 0x30, 0x83, 0xfa, 0x01, 0x64, 0x98, 0x78, 0xbc, 0x58, 0xa9, 0x77, 0xb8, 0x3c, 0x40,
2122 0xb8, 0x02, 0x20, 0xec, 0x4c, 0x83, 0xba, 0x7c, 0xe6, 0x54, 0xf1, 0xfe, 0xf2, 0x55, 0x4e, 0xd2,
2123 0x60, 0xc5, 0x7c, 0x35, 0xc9, 0x29, 0x0f, 0x5f, 0xeb, 0x12, 0xcb, 0x35, 0x2c, 0x0f, 0x6b, 0x8c,
2124 0x18, 0xba, 0x61, 0x7a, 0x0e, 0xce, 0x46, 0x76, 0x43, 0x7b, 0x49, 0x75, 0x27, 0x50, 0x9d, 0x59,
2125 0xc7, 0x42, 0x81, 0x74, 0x90, 0x69, 0xd7, 0x31, 0x6c, 0x57, 0x1b, 0xe9, 0x8e, 0xa1, 0x77, 0x4c,
2126 0x2c, 0xc8, 0x91, 0x2a, 0x3e, 0x5a, 0xd1, 0x4b, 0xc5, 0xd2, 0xf2, 0x2d, 0xee, 0xf9, 0x34, 0x70,
2127 0xf4, 0x4f, 0x58, 0x3a, 0x2b, 0x45, 0x9f, 0xcc, 0x10, 0x23, 0xc6, 0x83, 0xef, 0xaf, 0x0f, 0xbe,
2128 0x8a, 0x15, 0x0b, 0x7c, 0x8e, 0x2f, 0xf0, 0x39, 0x77, 0x04, 0x77, 0x6f, 0xca, 0xef, 0x36, 0x70,
2129 0xbf, 0x6e, 0xf9, 0x4b, 0x13, 0x82, 0x28, 0x7f, 0x89, 0x42, 0x8a, 0x13, 0xbe, 0x75, 0x65, 0x7e,
2130 0xfe, 0xac, 0xc0, 0x37, 0xa0, 0x1c, 0xe1, 0x40, 0x1c, 0xac, 0x69, 0xb8, 0x22, 0xdd, 0x0d, 0x91,
2131 0x3e, 0x9f, 0x41, 0x5a, 0xd0, 0xe8, 0x7b, 0x9b, 0x4d, 0x70, 0x2b, 0xb4, 0x1f, 0x2f, 0x76, 0xaf,
2132 0xc5, 0x3e, 0x11, 0x7f, 0xad, 0x3e, 0xf1, 0xc5, 0x62, 0xcf, 0xbf, 0xa3, 0x10, 0x6f, 0x1a, 0xfd,
2133 0x2f, 0x7e, 0x3b, 0x79, 0xb6, 0xb4, 0x9d, 0xac, 0xe0, 0x81, 0x58, 0xd9, 0x86, 0x1c, 0x6b, 0xde,
2134 0xd0, 0x4d, 0xbe, 0xb3, 0x36, 0xf6, 0x6b, 0x36, 0x93, 0x1b, 0xe8, 0x95, 0xf8, 0x0a, 0xd1, 0xab,
2135 0x0d, 0x77, 0x4e, 0x49, 0xa7, 0x69, 0xea, 0x5d, 0x3c, 0xc4, 0x96, 0xcb, 0x4e, 0xfb, 0xae, 0xe9,
2136 0x51, 0x17, 0x3b, 0x9a, 0xa5, 0x0f, 0xb1, 0x1f, 0x2f, 0xe5, 0xcb, 0x1a, 0xfa, 0x10, 0x4f, 0x9b,
2137 0x78, 0x9e, 0xd1, 0xf3, 0xc3, 0x07, 0x26, 0xe7, 0x9e, 0xd1, 0x53, 0xfe, 0x1e, 0x06, 0xe9, 0x94,
2138 0x74, 0x5a, 0xae, 0xee, 0x7a, 0x14, 0x7d, 0x04, 0x31, 0xea, 0xea, 0xae, 0x08, 0x96, 0x29, 0xbe,
2139 0xbf, 0x7c, 0xe3, 0x26, 0x3e, 0x79, 0xf6, 0x07, 0xab, 0xc2, 0x8f, 0x9d, 0xb6, 0x3d, 0xec, 0xea,
2140 0x86, 0xe9, 0x5f, 0x62, 0xd5, 0xe0, 0x13, 0x55, 0x40, 0xe6, 0x26, 0x1a, 0x75, 0x75, 0xc7, 0xd5,
2141 0xd8, 0xeb, 0xd2, 0xaf, 0xfe, 0x5c, 0x30, 0x4b, 0xf0, 0xf4, 0xcc, 0xb7, 0x83, 0xa7, 0xa7, 0x9a,
2142 0xe1, 0x3e, 0x2d, 0xe6, 0xc2, 0x84, 0xca, 0xef, 0x42, 0x10, 0xe3, 0x13, 0xb2, 0x67, 0x59, 0xab,
2143 0x5d, 0x6a, 0x57, 0xe7, 0x9e, 0x65, 0x29, 0x48, 0x34, 0xab, 0x8d, 0xca, 0x49, 0xe3, 0x89, 0x1c,
2144 0x42, 0x19, 0x80, 0x56, 0xb5, 0x7d, 0xde, 0xd4, 0x2a, 0x67, 0x8d, 0xaa, 0x9c, 0x64, 0x4a, 0xf5,
2145 0xbc, 0xd1, 0x60, 0xca, 0x30, 0x42, 0x90, 0x29, 0x97, 0x1a, 0xe5, 0x6a, 0x5d, 0x0b, 0x1c, 0x22,
2146 0x53, 0xb2, 0x56, 0xbb, 0xa4, 0xb6, 0xab, 0x15, 0x39, 0x81, 0xd2, 0x20, 0x09, 0x59, 0xbd, 0x5a,
2147 0x11, 0xcf, 0x39, 0x1e, 0x6d, 0xfa, 0x39, 0xa7, 0x54, 0x38, 0x36, 0x2a, 0xbe, 0xc0, 0x0e, 0xb6,
2148 0xba, 0x18, 0xbd, 0xcd, 0xf9, 0x3f, 0xc0, 0x5d, 0x57, 0x33, 0x7a, 0x3e, 0x32, 0x92, 0x2f, 0x39,
2149 0xe9, 0xa1, 0x37, 0x20, 0x3e, 0x20, 0x1d, 0x6d, 0x82, 0x48, 0x6c, 0x40, 0x3a, 0x27, 0x3d, 0xe5,
2150 0xcf, 0x71, 0x88, 0xb0, 0xee, 0x51, 0x01, 0xc9, 0x09, 0x42, 0x71, 0xe7, 0x54, 0xf1, 0xc1, 0x4a,
2151 0x24, 0x26, 0x13, 0xab, 0xd7, 0x8e, 0x2c, 0x8a, 0x1d, 0x90, 0xc5, 0x6f, 0x2d, 0xab, 0xa3, 0x4c,
2152 0xa8, 0xa5, 0x5e, 0x3b, 0xb2, 0x0e, 0x75, 0xc9, 0x1f, 0x65, 0xda, 0x80, 0x74, 0x78, 0x4b, 0x59,
2153 0xd9, 0xa1, 0x26, 0x0f, 0x38, 0xd6, 0xa1, 0x2e, 0x27, 0x8f, 0xd9, 0x12, 0x48, 0x94, 0x9d, 0x23,
2154 0x3c, 0x48, 0x94, 0x07, 0x51, 0xd6, 0x3f, 0x22, 0x6a, 0x5b, 0x6a, 0x92, 0x06, 0x97, 0xf1, 0x27,
2155 0x90, 0xb2, 0xc7, 0xd7, 0x41, 0x62, 0x3c, 0xc8, 0x37, 0x36, 0xb9, 0xba, 0xd6, 0xb6, 0x54, 0xf0,
2156 0x5d, 0x59, 0xa0, 0x1f, 0x41, 0x92, 0x5f, 0x91, 0x59, 0x14, 0x41, 0xc0, 0x77, 0xd7, 0xde, 0x73,
2157 0x6a, 0x5b, 0x6a, 0xe2, 0xd2, 0xbf, 0x2a, 0x1e, 0x42, 0xc2, 0x36, 0xfa, 0xdc, 0x5d, 0xb4, 0x97,
2158 0xdd, 0x75, 0x8d, 0xad, 0xb6, 0xa5, 0xc6, 0x6d, 0x71, 0x30, 0xfc, 0x18, 0xd2, 0x62, 0x0d, 0xf4,
2159 0xca, 0xe4, 0x21, 0xee, 0xf0, 0x10, 0xef, 0x6d, 0x74, 0xfe, 0xd6, 0xb6, 0xd4, 0x14, 0x9d, 0xba,
2160 0x9e, 0x1c, 0x42, 0x9c, 0xf2, 0x1a, 0xf4, 0xaf, 0xdb, 0xf7, 0x37, 0x28, 0x57, 0xd5, 0x77, 0x41,
2161 0xa7, 0x90, 0x11, 0x23, 0xed, 0xd2, 0xa0, 0x2e, 0x71, 0xc6, 0xd9, 0x34, 0x6f, 0xd3, 0x1b, 0x05,
2162 0x49, 0x0b, 0xd7, 0x9a, 0xf0, 0x44, 0x87, 0x90, 0xf3, 0x7f, 0x08, 0x22, 0x9e, 0x6b, 0x7b, 0xae,
2163 0xe6, 0x60, 0x4a, 0x3c, 0xa7, 0x2b, 0x8e, 0xbe, 0x1d, 0xce, 0xf1, 0xaf, 0x0b, 0x8b, 0x33, 0x6e,
2164 0xa0, 0xfa, 0x7a, 0x76, 0x06, 0x7e, 0x08, 0x59, 0xdf, 0x99, 0x9d, 0x54, 0x0e, 0x31, 0x79, 0x93,
2165 0xa7, 0xdc, 0x75, 0x9b, 0xbb, 0xbe, 0x21, 0xf4, 0x65, 0xa1, 0x66, 0xed, 0x9e, 0x9e, 0x3b, 0xc6,
2166 0x11, 0x40, 0xd2, 0x1d, 0xdb, 0x1c, 0x48, 0xe5, 0x05, 0xc8, 0x2d, 0xaf, 0x33, 0x34, 0x5c, 0x5e,
2167 0x0d, 0x57, 0x1e, 0xa6, 0xee, 0xba, 0x22, 0x7c, 0x13, 0xe2, 0x0e, 0xee, 0x1b, 0xc4, 0xe2, 0xac,
2168 0x96, 0x54, 0xff, 0x0b, 0x15, 0x20, 0xc2, 0x80, 0x11, 0x15, 0xf3, 0xf6, 0xea, 0xba, 0x63, 0x96,
2169 0xca, 0xcf, 0x21, 0xfd, 0x04, 0xff, 0x1f, 0x26, 0x5e, 0xd2, 0x15, 0xfe, 0x19, 0x86, 0x6d, 0x76,
2170 0xcc, 0x9f, 0x92, 0x0e, 0xbd, 0xf5, 0x0c, 0xf1, 0x99, 0x19, 0xee, 0x81, 0x64, 0xeb, 0x7d, 0xac,
2171 0x51, 0xe3, 0x85, 0x38, 0x6b, 0x62, 0x6a, 0x92, 0x09, 0x5a, 0xc6, 0x0b, 0xd1, 0xb3, 0x98, 0xd2,
2172 0x25, 0xcf, 0x71, 0x90, 0x1a, 0x37, 0x6f, 0x33, 0xc1, 0xc2, 0x71, 0x13, 0x5d, 0x3c, 0x6e, 0x30,
2173 0xec, 0xb0, 0x05, 0x88, 0x36, 0x3f, 0xd4, 0xdd, 0xee, 0x25, 0x76, 0x78, 0xa1, 0x66, 0x8a, 0xdf,
2174 0x5f, 0x71, 0x04, 0xcf, 0xae, 0x2d, 0x60, 0x19, 0xfe, 0x89, 0x08, 0xa0, 0x6e, 0x0f, 0x66, 0x05,
2175 0xca, 0x23, 0xd8, 0x9e, 0xb3, 0x09, 0x7e, 0x8c, 0xdb, 0x42, 0x00, 0xf1, 0x52, 0xb9, 0x7d, 0xf2,
2176 0xb4, 0x2a, 0xba, 0x7f, 0xe3, 0xac, 0xa1, 0xf9, 0xdf, 0x61, 0x65, 0x08, 0xf2, 0xf5, 0x5c, 0xd4,
2177 0x26, 0x16, 0xc5, 0x68, 0x1f, 0xa2, 0x03, 0xd2, 0x11, 0x4f, 0xc3, 0xb5, 0x68, 0x73, 0x53, 0xf4,
2178 0x00, 0xb6, 0x2d, 0xfc, 0x0b, 0x57, 0x9b, 0xda, 0x2c, 0x81, 0x57, 0x9a, 0x89, 0x9b, 0xc1, 0x86,
2179 0x29, 0xcf, 0x40, 0x2e, 0xeb, 0x56, 0x17, 0x9b, 0x9f, 0x19, 0x33, 0x9e, 0x81, 0x5c, 0xc1, 0x26,
2180 0x76, 0xf1, 0x67, 0x35, 0x43, 0xf1, 0xf7, 0x71, 0x48, 0x9f, 0x92, 0x8e, 0x5f, 0x79, 0x26, 0x76,
2181 0xd0, 0x1f, 0x42, 0x20, 0x4d, 0x2a, 0x0d, 0x3d, 0x5c, 0xd1, 0xb7, 0xe6, 0xca, 0x31, 0xb7, 0x7a,
2182 0x73, 0x95, 0xd2, 0x2f, 0xff, 0xf5, 0xdf, 0xdf, 0x86, 0x0f, 0x95, 0x47, 0x85, 0xd1, 0x7e, 0xc1,
2183 0x4f, 0x98, 0x16, 0x5e, 0x5e, 0x2f, 0xe6, 0x55, 0x41, 0xe4, 0x4a, 0x0b, 0x2f, 0xc5, 0xe0, 0x15,
2184 0xff, 0x4d, 0xfc, 0x80, 0xf2, 0x89, 0x0e, 0x42, 0x0f, 0xd1, 0x6f, 0x42, 0x10, 0x17, 0x95, 0x88,
2185 0x56, 0x5c, 0xf9, 0x66, 0x6a, 0x75, 0x5d, 0x56, 0x1f, 0xf3, 0xac, 0x0e, 0xd0, 0xe3, 0x5b, 0x66,
2186 0x55, 0x78, 0x29, 0xb6, 0xf3, 0x15, 0xfa, 0x63, 0x08, 0x92, 0x01, 0xed, 0xd0, 0xfb, 0x1b, 0x97,
2187 0x41, 0xee, 0xe1, 0x26, 0xa6, 0x82, 0xc5, 0xca, 0x87, 0x3c, 0xcb, 0x7d, 0x54, 0xb8, 0x65, 0x96,
2188 0xe8, 0x4f, 0x21, 0x90, 0x26, 0x24, 0x5d, 0x85, 0xe6, 0x3c, 0x93, 0xd7, 0xed, 0xdb, 0x29, 0xcf,
2189 0xa8, 0xa2, 0x7c, 0xf4, 0x69, 0xf7, 0xed, 0xa0, 0xcb, 0x67, 0x64, 0xb0, 0xfe, 0x3a, 0x04, 0xd2,
2190 0x84, 0xe7, 0xab, 0x92, 0x9c, 0x2f, 0x86, 0xdc, 0x9b, 0x0b, 0x37, 0xcb, 0xea, 0xd0, 0x76, 0xc7,
2191 0x01, 0xaa, 0x0f, 0x3f, 0x35, 0xaa, 0x47, 0x43, 0x78, 0xab, 0x4b, 0x86, 0x4b, 0x53, 0x39, 0x62,
2192 0x77, 0x6a, 0xda, 0x64, 0xb3, 0x36, 0x43, 0x3f, 0xfb, 0xd8, 0x37, 0xeb, 0x13, 0x53, 0xb7, 0xfa,
2193 0x79, 0xe2, 0xf4, 0x0b, 0x7d, 0x6c, 0xf1, 0x9c, 0x0a, 0x42, 0xa5, 0xdb, 0x06, 0x5d, 0xfc, 0x9f,
2194 0xcf, 0x61, 0x30, 0xee, 0xc4, 0xb9, 0xf1, 0x07, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0xaf, 0x59,
2195 0x9b, 0xa1, 0x1f, 0x1a, 0x00, 0x00,