|
- // Code generated by protoc-gen-go. DO NOT EDIT.
- // source: google/cloud/dataproc/v1/jobs.proto
-
- package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1"
-
- import proto "github.com/golang/protobuf/proto"
- import fmt "fmt"
- import math "math"
- import empty "github.com/golang/protobuf/ptypes/empty"
- import timestamp "github.com/golang/protobuf/ptypes/timestamp"
- import _ "google.golang.org/genproto/googleapis/api/annotations"
- import field_mask "google.golang.org/genproto/protobuf/field_mask"
-
- import (
- context "golang.org/x/net/context"
- grpc "google.golang.org/grpc"
- )
-
- // Reference imports to suppress errors if they are not otherwise used.
- var _ = proto.Marshal
- var _ = fmt.Errorf
- var _ = math.Inf
-
- // This is a compile-time assertion to ensure that this generated file
- // is compatible with the proto package it is being compiled against.
- // A compilation error at this line likely means your copy of the
- // proto package needs to be updated.
- const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
-
- // The Log4j level for job execution. When running an
- // [Apache Hive](http://hive.apache.org/) job, Cloud
- // Dataproc configures the Hive client to an equivalent verbosity level.
- type LoggingConfig_Level int32
-
- const (
- // Level is unspecified. Use default level for log4j.
- LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0
- // Use ALL level for log4j.
- LoggingConfig_ALL LoggingConfig_Level = 1
- // Use TRACE level for log4j.
- LoggingConfig_TRACE LoggingConfig_Level = 2
- // Use DEBUG level for log4j.
- LoggingConfig_DEBUG LoggingConfig_Level = 3
- // Use INFO level for log4j.
- LoggingConfig_INFO LoggingConfig_Level = 4
- // Use WARN level for log4j.
- LoggingConfig_WARN LoggingConfig_Level = 5
- // Use ERROR level for log4j.
- LoggingConfig_ERROR LoggingConfig_Level = 6
- // Use FATAL level for log4j.
- LoggingConfig_FATAL LoggingConfig_Level = 7
- // Turn off log4j.
- LoggingConfig_OFF LoggingConfig_Level = 8
- )
-
- var LoggingConfig_Level_name = map[int32]string{
- 0: "LEVEL_UNSPECIFIED",
- 1: "ALL",
- 2: "TRACE",
- 3: "DEBUG",
- 4: "INFO",
- 5: "WARN",
- 6: "ERROR",
- 7: "FATAL",
- 8: "OFF",
- }
- var LoggingConfig_Level_value = map[string]int32{
- "LEVEL_UNSPECIFIED": 0,
- "ALL": 1,
- "TRACE": 2,
- "DEBUG": 3,
- "INFO": 4,
- "WARN": 5,
- "ERROR": 6,
- "FATAL": 7,
- "OFF": 8,
- }
-
- func (x LoggingConfig_Level) String() string {
- return proto.EnumName(LoggingConfig_Level_name, int32(x))
- }
- func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{0, 0}
- }
-
- // The job state.
- type JobStatus_State int32
-
- const (
- // The job state is unknown.
- JobStatus_STATE_UNSPECIFIED JobStatus_State = 0
- // The job is pending; it has been submitted, but is not yet running.
- JobStatus_PENDING JobStatus_State = 1
- // Job has been received by the service and completed initial setup;
- // it will soon be submitted to the cluster.
- JobStatus_SETUP_DONE JobStatus_State = 8
- // The job is running on the cluster.
- JobStatus_RUNNING JobStatus_State = 2
- // A CancelJob request has been received, but is pending.
- JobStatus_CANCEL_PENDING JobStatus_State = 3
- // Transient in-flight resources have been canceled, and the request to
- // cancel the running job has been issued to the cluster.
- JobStatus_CANCEL_STARTED JobStatus_State = 7
- // The job cancellation was successful.
- JobStatus_CANCELLED JobStatus_State = 4
- // The job has completed successfully.
- JobStatus_DONE JobStatus_State = 5
- // The job has completed, but encountered an error.
- JobStatus_ERROR JobStatus_State = 6
- // Job attempt has failed. The detail field contains failure details for
- // this attempt.
- //
- // Applies to restartable jobs only.
- JobStatus_ATTEMPT_FAILURE JobStatus_State = 9
- )
-
- var JobStatus_State_name = map[int32]string{
- 0: "STATE_UNSPECIFIED",
- 1: "PENDING",
- 8: "SETUP_DONE",
- 2: "RUNNING",
- 3: "CANCEL_PENDING",
- 7: "CANCEL_STARTED",
- 4: "CANCELLED",
- 5: "DONE",
- 6: "ERROR",
- 9: "ATTEMPT_FAILURE",
- }
- var JobStatus_State_value = map[string]int32{
- "STATE_UNSPECIFIED": 0,
- "PENDING": 1,
- "SETUP_DONE": 8,
- "RUNNING": 2,
- "CANCEL_PENDING": 3,
- "CANCEL_STARTED": 7,
- "CANCELLED": 4,
- "DONE": 5,
- "ERROR": 6,
- "ATTEMPT_FAILURE": 9,
- }
-
- func (x JobStatus_State) String() string {
- return proto.EnumName(JobStatus_State_name, int32(x))
- }
- func (JobStatus_State) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{9, 0}
- }
-
- // The job substate.
- type JobStatus_Substate int32
-
- const (
- // The job substate is unknown.
- JobStatus_UNSPECIFIED JobStatus_Substate = 0
- // The Job is submitted to the agent.
- //
- // Applies to RUNNING state.
- JobStatus_SUBMITTED JobStatus_Substate = 1
- // The Job has been received and is awaiting execution (it may be waiting
- // for a condition to be met). See the "details" field for the reason for
- // the delay.
- //
- // Applies to RUNNING state.
- JobStatus_QUEUED JobStatus_Substate = 2
- // The agent-reported status is out of date, which may be caused by a
- // loss of communication between the agent and Cloud Dataproc. If the
- // agent does not send a timely update, the job will fail.
- //
- // Applies to RUNNING state.
- JobStatus_STALE_STATUS JobStatus_Substate = 3
- )
-
- var JobStatus_Substate_name = map[int32]string{
- 0: "UNSPECIFIED",
- 1: "SUBMITTED",
- 2: "QUEUED",
- 3: "STALE_STATUS",
- }
- var JobStatus_Substate_value = map[string]int32{
- "UNSPECIFIED": 0,
- "SUBMITTED": 1,
- "QUEUED": 2,
- "STALE_STATUS": 3,
- }
-
- func (x JobStatus_Substate) String() string {
- return proto.EnumName(JobStatus_Substate_name, int32(x))
- }
- func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{9, 1}
- }
-
- // The application state, corresponding to
- // <code>YarnProtos.YarnApplicationStateProto</code>.
- type YarnApplication_State int32
-
- const (
- // Status is unspecified.
- YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0
- // Status is NEW.
- YarnApplication_NEW YarnApplication_State = 1
- // Status is NEW_SAVING.
- YarnApplication_NEW_SAVING YarnApplication_State = 2
- // Status is SUBMITTED.
- YarnApplication_SUBMITTED YarnApplication_State = 3
- // Status is ACCEPTED.
- YarnApplication_ACCEPTED YarnApplication_State = 4
- // Status is RUNNING.
- YarnApplication_RUNNING YarnApplication_State = 5
- // Status is FINISHED.
- YarnApplication_FINISHED YarnApplication_State = 6
- // Status is FAILED.
- YarnApplication_FAILED YarnApplication_State = 7
- // Status is KILLED.
- YarnApplication_KILLED YarnApplication_State = 8
- )
-
- var YarnApplication_State_name = map[int32]string{
- 0: "STATE_UNSPECIFIED",
- 1: "NEW",
- 2: "NEW_SAVING",
- 3: "SUBMITTED",
- 4: "ACCEPTED",
- 5: "RUNNING",
- 6: "FINISHED",
- 7: "FAILED",
- 8: "KILLED",
- }
- var YarnApplication_State_value = map[string]int32{
- "STATE_UNSPECIFIED": 0,
- "NEW": 1,
- "NEW_SAVING": 2,
- "SUBMITTED": 3,
- "ACCEPTED": 4,
- "RUNNING": 5,
- "FINISHED": 6,
- "FAILED": 7,
- "KILLED": 8,
- }
-
- func (x YarnApplication_State) String() string {
- return proto.EnumName(YarnApplication_State_name, int32(x))
- }
- func (YarnApplication_State) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{11, 0}
- }
-
- // A matcher that specifies categories of job states.
- type ListJobsRequest_JobStateMatcher int32
-
- const (
- // Match all jobs, regardless of state.
- ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0
- // Only match jobs in non-terminal states: PENDING, RUNNING, or
- // CANCEL_PENDING.
- ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1
- // Only match jobs in terminal states: CANCELLED, DONE, or ERROR.
- ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2
- )
-
- var ListJobsRequest_JobStateMatcher_name = map[int32]string{
- 0: "ALL",
- 1: "ACTIVE",
- 2: "NON_ACTIVE",
- }
- var ListJobsRequest_JobStateMatcher_value = map[string]int32{
- "ALL": 0,
- "ACTIVE": 1,
- "NON_ACTIVE": 2,
- }
-
- func (x ListJobsRequest_JobStateMatcher) String() string {
- return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x))
- }
- func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{16, 0}
- }
-
- // The runtime logging config of the job.
- type LoggingConfig struct {
- // The per-package log levels for the driver. This may include
- // "root" package name to configure rootLogger.
- // Examples:
- // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
- DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1.LoggingConfig_Level"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *LoggingConfig) Reset() { *m = LoggingConfig{} }
- func (m *LoggingConfig) String() string { return proto.CompactTextString(m) }
- func (*LoggingConfig) ProtoMessage() {}
- func (*LoggingConfig) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{0}
- }
- func (m *LoggingConfig) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_LoggingConfig.Unmarshal(m, b)
- }
- func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic)
- }
- func (dst *LoggingConfig) XXX_Merge(src proto.Message) {
- xxx_messageInfo_LoggingConfig.Merge(dst, src)
- }
- func (m *LoggingConfig) XXX_Size() int {
- return xxx_messageInfo_LoggingConfig.Size(m)
- }
- func (m *LoggingConfig) XXX_DiscardUnknown() {
- xxx_messageInfo_LoggingConfig.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo
-
- func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level {
- if m != nil {
- return m.DriverLogLevels
- }
- return nil
- }
-
- // A Cloud Dataproc job for running
- // [Apache Hadoop
- // MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
- // jobs on [Apache Hadoop
- // YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
- type HadoopJob struct {
- // Required. Indicates the location of the driver's main class. Specify
- // either the jar file that contains the main class or the main class name.
- // To specify both, add the jar file to `jar_file_uris`, and then specify
- // the main class name in this property.
- //
- // Types that are valid to be assigned to Driver:
- // *HadoopJob_MainJarFileUri
- // *HadoopJob_MainClass
- Driver isHadoopJob_Driver `protobuf_oneof:"driver"`
- // Optional. The arguments to pass to the driver. Do not
- // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as
- // job properties, since a collision may occur that causes an incorrect job
- // submission.
- Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
- // Optional. Jar file URIs to add to the CLASSPATHs of the
- // Hadoop driver and tasks.
- JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied
- // to the working directory of Hadoop drivers and distributed tasks. Useful
- // for naively parallel tasks.
- FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
- // Optional. HCFS URIs of archives to be extracted in the working directory of
- // Hadoop drivers and tasks. Supported file types:
- // .jar, .tar, .tar.gz, .tgz, or .zip.
- ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
- // Optional. A mapping of property names to values, used to configure Hadoop.
- // Properties that conflict with values set by the Cloud Dataproc API may be
- // overwritten. Can include properties set in /etc/hadoop/conf/*-site and
- // classes in user code.
- Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. The runtime log config for job execution.
- LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *HadoopJob) Reset() { *m = HadoopJob{} }
- func (m *HadoopJob) String() string { return proto.CompactTextString(m) }
- func (*HadoopJob) ProtoMessage() {}
- func (*HadoopJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{1}
- }
- func (m *HadoopJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_HadoopJob.Unmarshal(m, b)
- }
- func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic)
- }
- func (dst *HadoopJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_HadoopJob.Merge(dst, src)
- }
- func (m *HadoopJob) XXX_Size() int {
- return xxx_messageInfo_HadoopJob.Size(m)
- }
- func (m *HadoopJob) XXX_DiscardUnknown() {
- xxx_messageInfo_HadoopJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_HadoopJob proto.InternalMessageInfo
-
- type isHadoopJob_Driver interface {
- isHadoopJob_Driver()
- }
-
- type HadoopJob_MainJarFileUri struct {
- MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
- }
-
- type HadoopJob_MainClass struct {
- MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
- }
-
- func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {}
-
- func (*HadoopJob_MainClass) isHadoopJob_Driver() {}
-
- func (m *HadoopJob) GetDriver() isHadoopJob_Driver {
- if m != nil {
- return m.Driver
- }
- return nil
- }
-
- func (m *HadoopJob) GetMainJarFileUri() string {
- if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok {
- return x.MainJarFileUri
- }
- return ""
- }
-
- func (m *HadoopJob) GetMainClass() string {
- if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok {
- return x.MainClass
- }
- return ""
- }
-
- func (m *HadoopJob) GetArgs() []string {
- if m != nil {
- return m.Args
- }
- return nil
- }
-
- func (m *HadoopJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- func (m *HadoopJob) GetFileUris() []string {
- if m != nil {
- return m.FileUris
- }
- return nil
- }
-
- func (m *HadoopJob) GetArchiveUris() []string {
- if m != nil {
- return m.ArchiveUris
- }
- return nil
- }
-
- func (m *HadoopJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *HadoopJob) GetLoggingConfig() *LoggingConfig {
- if m != nil {
- return m.LoggingConfig
- }
- return nil
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{
- (*HadoopJob_MainJarFileUri)(nil),
- (*HadoopJob_MainClass)(nil),
- }
- }
-
- func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*HadoopJob)
- // driver
- switch x := m.Driver.(type) {
- case *HadoopJob_MainJarFileUri:
- b.EncodeVarint(1<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.MainJarFileUri)
- case *HadoopJob_MainClass:
- b.EncodeVarint(2<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.MainClass)
- case nil:
- default:
- return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x)
- }
- return nil
- }
-
- func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*HadoopJob)
- switch tag {
- case 1: // driver.main_jar_file_uri
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Driver = &HadoopJob_MainJarFileUri{x}
- return true, err
- case 2: // driver.main_class
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Driver = &HadoopJob_MainClass{x}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _HadoopJob_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*HadoopJob)
- // driver
- switch x := m.Driver.(type) {
- case *HadoopJob_MainJarFileUri:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
- n += len(x.MainJarFileUri)
- case *HadoopJob_MainClass:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.MainClass)))
- n += len(x.MainClass)
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/)
- // applications on YARN.
- type SparkJob struct {
- // Required. The specification of the main method to call to drive the job.
- // Specify either the jar file that contains the main class or the main class
- // name. To pass both a main jar and a main class in that jar, add the jar to
- // `CommonJob.jar_file_uris`, and then specify the main class name in
- // `main_class`.
- //
- // Types that are valid to be assigned to Driver:
- // *SparkJob_MainJarFileUri
- // *SparkJob_MainClass
- Driver isSparkJob_Driver `protobuf_oneof:"driver"`
- // Optional. The arguments to pass to the driver. Do not include arguments,
- // such as `--conf`, that can be set as job properties, since a collision may
- // occur that causes an incorrect job submission.
- Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
- // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
- // Spark driver and tasks.
- JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- // Optional. HCFS URIs of files to be copied to the working directory of
- // Spark drivers and distributed tasks. Useful for naively parallel tasks.
- FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
- // Optional. HCFS URIs of archives to be extracted in the working directory
- // of Spark drivers and tasks. Supported file types:
- // .jar, .tar, .tar.gz, .tgz, and .zip.
- ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
- // Optional. A mapping of property names to values, used to configure Spark.
- // Properties that conflict with values set by the Cloud Dataproc API may be
- // overwritten. Can include properties set in
- // /etc/spark/conf/spark-defaults.conf and classes in user code.
- Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. The runtime log config for job execution.
- LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *SparkJob) Reset() { *m = SparkJob{} }
- func (m *SparkJob) String() string { return proto.CompactTextString(m) }
- func (*SparkJob) ProtoMessage() {}
- func (*SparkJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{2}
- }
- func (m *SparkJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SparkJob.Unmarshal(m, b)
- }
- func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic)
- }
- func (dst *SparkJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SparkJob.Merge(dst, src)
- }
- func (m *SparkJob) XXX_Size() int {
- return xxx_messageInfo_SparkJob.Size(m)
- }
- func (m *SparkJob) XXX_DiscardUnknown() {
- xxx_messageInfo_SparkJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_SparkJob proto.InternalMessageInfo
-
- type isSparkJob_Driver interface {
- isSparkJob_Driver()
- }
-
- type SparkJob_MainJarFileUri struct {
- MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
- }
-
- type SparkJob_MainClass struct {
- MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
- }
-
- func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {}
-
- func (*SparkJob_MainClass) isSparkJob_Driver() {}
-
- func (m *SparkJob) GetDriver() isSparkJob_Driver {
- if m != nil {
- return m.Driver
- }
- return nil
- }
-
- func (m *SparkJob) GetMainJarFileUri() string {
- if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok {
- return x.MainJarFileUri
- }
- return ""
- }
-
- func (m *SparkJob) GetMainClass() string {
- if x, ok := m.GetDriver().(*SparkJob_MainClass); ok {
- return x.MainClass
- }
- return ""
- }
-
- func (m *SparkJob) GetArgs() []string {
- if m != nil {
- return m.Args
- }
- return nil
- }
-
- func (m *SparkJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- func (m *SparkJob) GetFileUris() []string {
- if m != nil {
- return m.FileUris
- }
- return nil
- }
-
- func (m *SparkJob) GetArchiveUris() []string {
- if m != nil {
- return m.ArchiveUris
- }
- return nil
- }
-
- func (m *SparkJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *SparkJob) GetLoggingConfig() *LoggingConfig {
- if m != nil {
- return m.LoggingConfig
- }
- return nil
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{
- (*SparkJob_MainJarFileUri)(nil),
- (*SparkJob_MainClass)(nil),
- }
- }
-
- func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*SparkJob)
- // driver
- switch x := m.Driver.(type) {
- case *SparkJob_MainJarFileUri:
- b.EncodeVarint(1<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.MainJarFileUri)
- case *SparkJob_MainClass:
- b.EncodeVarint(2<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.MainClass)
- case nil:
- default:
- return fmt.Errorf("SparkJob.Driver has unexpected type %T", x)
- }
- return nil
- }
-
- func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*SparkJob)
- switch tag {
- case 1: // driver.main_jar_file_uri
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Driver = &SparkJob_MainJarFileUri{x}
- return true, err
- case 2: // driver.main_class
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Driver = &SparkJob_MainClass{x}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _SparkJob_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*SparkJob)
- // driver
- switch x := m.Driver.(type) {
- case *SparkJob_MainJarFileUri:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
- n += len(x.MainJarFileUri)
- case *SparkJob_MainClass:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.MainClass)))
- n += len(x.MainClass)
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // A Cloud Dataproc job for running
- // [Apache
- // PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
- // applications on YARN.
- type PySparkJob struct {
- // Required. The HCFS URI of the main Python file to use as the driver. Must
- // be a .py file.
- MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"`
- // Optional. The arguments to pass to the driver. Do not include arguments,
- // such as `--conf`, that can be set as job properties, since a collision may
- // occur that causes an incorrect job submission.
- Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"`
- // Optional. HCFS file URIs of Python files to pass to the PySpark
- // framework. Supported file types: .py, .egg, and .zip.
- PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"`
- // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
- // Python driver and tasks.
- JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- // Optional. HCFS URIs of files to be copied to the working directory of
- // Python drivers and distributed tasks. Useful for naively parallel tasks.
- FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
- // Optional. HCFS URIs of archives to be extracted in the working directory of
- // .jar, .tar, .tar.gz, .tgz, and .zip.
- ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
- // Optional. A mapping of property names to values, used to configure PySpark.
- // Properties that conflict with values set by the Cloud Dataproc API may be
- // overwritten. Can include properties set in
- // /etc/spark/conf/spark-defaults.conf and classes in user code.
- Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. The runtime log config for job execution.
- LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *PySparkJob) Reset() { *m = PySparkJob{} }
- func (m *PySparkJob) String() string { return proto.CompactTextString(m) }
- func (*PySparkJob) ProtoMessage() {}
- func (*PySparkJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{3}
- }
- func (m *PySparkJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_PySparkJob.Unmarshal(m, b)
- }
- func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic)
- }
- func (dst *PySparkJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_PySparkJob.Merge(dst, src)
- }
- func (m *PySparkJob) XXX_Size() int {
- return xxx_messageInfo_PySparkJob.Size(m)
- }
- func (m *PySparkJob) XXX_DiscardUnknown() {
- xxx_messageInfo_PySparkJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_PySparkJob proto.InternalMessageInfo
-
- func (m *PySparkJob) GetMainPythonFileUri() string {
- if m != nil {
- return m.MainPythonFileUri
- }
- return ""
- }
-
- func (m *PySparkJob) GetArgs() []string {
- if m != nil {
- return m.Args
- }
- return nil
- }
-
- func (m *PySparkJob) GetPythonFileUris() []string {
- if m != nil {
- return m.PythonFileUris
- }
- return nil
- }
-
- func (m *PySparkJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- func (m *PySparkJob) GetFileUris() []string {
- if m != nil {
- return m.FileUris
- }
- return nil
- }
-
- func (m *PySparkJob) GetArchiveUris() []string {
- if m != nil {
- return m.ArchiveUris
- }
- return nil
- }
-
- func (m *PySparkJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *PySparkJob) GetLoggingConfig() *LoggingConfig {
- if m != nil {
- return m.LoggingConfig
- }
- return nil
- }
-
- // A list of queries to run on a cluster.
- type QueryList struct {
- // Required. The queries to execute. You do not need to terminate a query
- // with a semicolon. Multiple queries can be specified in one string
- // by separating each with a semicolon. Here is an example of an Cloud
- // Dataproc API snippet that uses a QueryList to specify a HiveJob:
- //
- // "hiveJob": {
- // "queryList": {
- // "queries": [
- // "query1",
- // "query2",
- // "query3;query4",
- // ]
- // }
- // }
- Queries []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *QueryList) Reset() { *m = QueryList{} }
- func (m *QueryList) String() string { return proto.CompactTextString(m) }
- func (*QueryList) ProtoMessage() {}
- func (*QueryList) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{4}
- }
- func (m *QueryList) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_QueryList.Unmarshal(m, b)
- }
- func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_QueryList.Marshal(b, m, deterministic)
- }
- func (dst *QueryList) XXX_Merge(src proto.Message) {
- xxx_messageInfo_QueryList.Merge(dst, src)
- }
- func (m *QueryList) XXX_Size() int {
- return xxx_messageInfo_QueryList.Size(m)
- }
- func (m *QueryList) XXX_DiscardUnknown() {
- xxx_messageInfo_QueryList.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_QueryList proto.InternalMessageInfo
-
- func (m *QueryList) GetQueries() []string {
- if m != nil {
- return m.Queries
- }
- return nil
- }
-
- // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/)
- // queries on YARN.
- type HiveJob struct {
- // Required. The sequence of Hive queries to execute, specified as either
- // an HCFS file URI or a list of queries.
- //
- // Types that are valid to be assigned to Queries:
- // *HiveJob_QueryFileUri
- // *HiveJob_QueryList
- Queries isHiveJob_Queries `protobuf_oneof:"queries"`
- // Optional. Whether to continue executing queries if a query fails.
- // The default value is `false`. Setting to `true` can be useful when
- // executing independent parallel queries.
- ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
- // Optional. Mapping of query variable names to values (equivalent to the
- // Hive command: `SET name="value";`).
- ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. A mapping of property names and values, used to configure Hive.
- // Properties that conflict with values set by the Cloud Dataproc API may be
- // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
- // /etc/hive/conf/hive-site.xml, and classes in user code.
- Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. HCFS URIs of jar files to add to the CLASSPATH of the
- // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes
- // and UDFs.
- JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *HiveJob) Reset() { *m = HiveJob{} }
- func (m *HiveJob) String() string { return proto.CompactTextString(m) }
- func (*HiveJob) ProtoMessage() {}
- func (*HiveJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{5}
- }
- func (m *HiveJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_HiveJob.Unmarshal(m, b)
- }
- func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic)
- }
- func (dst *HiveJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_HiveJob.Merge(dst, src)
- }
- func (m *HiveJob) XXX_Size() int {
- return xxx_messageInfo_HiveJob.Size(m)
- }
- func (m *HiveJob) XXX_DiscardUnknown() {
- xxx_messageInfo_HiveJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_HiveJob proto.InternalMessageInfo
-
- type isHiveJob_Queries interface {
- isHiveJob_Queries()
- }
-
- type HiveJob_QueryFileUri struct {
- QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
- }
-
- type HiveJob_QueryList struct {
- QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
- }
-
- func (*HiveJob_QueryFileUri) isHiveJob_Queries() {}
-
- func (*HiveJob_QueryList) isHiveJob_Queries() {}
-
- func (m *HiveJob) GetQueries() isHiveJob_Queries {
- if m != nil {
- return m.Queries
- }
- return nil
- }
-
- func (m *HiveJob) GetQueryFileUri() string {
- if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok {
- return x.QueryFileUri
- }
- return ""
- }
-
- func (m *HiveJob) GetQueryList() *QueryList {
- if x, ok := m.GetQueries().(*HiveJob_QueryList); ok {
- return x.QueryList
- }
- return nil
- }
-
- func (m *HiveJob) GetContinueOnFailure() bool {
- if m != nil {
- return m.ContinueOnFailure
- }
- return false
- }
-
- func (m *HiveJob) GetScriptVariables() map[string]string {
- if m != nil {
- return m.ScriptVariables
- }
- return nil
- }
-
- func (m *HiveJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *HiveJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{
- (*HiveJob_QueryFileUri)(nil),
- (*HiveJob_QueryList)(nil),
- }
- }
-
- func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*HiveJob)
- // queries
- switch x := m.Queries.(type) {
- case *HiveJob_QueryFileUri:
- b.EncodeVarint(1<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.QueryFileUri)
- case *HiveJob_QueryList:
- b.EncodeVarint(2<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.QueryList); err != nil {
- return err
- }
- case nil:
- default:
- return fmt.Errorf("HiveJob.Queries has unexpected type %T", x)
- }
- return nil
- }
-
- func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*HiveJob)
- switch tag {
- case 1: // queries.query_file_uri
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Queries = &HiveJob_QueryFileUri{x}
- return true, err
- case 2: // queries.query_list
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(QueryList)
- err := b.DecodeMessage(msg)
- m.Queries = &HiveJob_QueryList{msg}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _HiveJob_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*HiveJob)
- // queries
- switch x := m.Queries.(type) {
- case *HiveJob_QueryFileUri:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
- n += len(x.QueryFileUri)
- case *HiveJob_QueryList:
- s := proto.Size(x.QueryList)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // A Cloud Dataproc job for running [Apache Spark
- // SQL](http://spark.apache.org/sql/) queries.
- type SparkSqlJob struct {
- // Required. The sequence of Spark SQL queries to execute, specified as
- // either an HCFS file URI or as a list of queries.
- //
- // Types that are valid to be assigned to Queries:
- // *SparkSqlJob_QueryFileUri
- // *SparkSqlJob_QueryList
- Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"`
- // Optional. Mapping of query variable names to values (equivalent to the
- // Spark SQL command: SET `name="value";`).
- ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. A mapping of property names to values, used to configure
- // Spark SQL's SparkConf. Properties that conflict with values set by the
- // Cloud Dataproc API may be overwritten.
- Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
- JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- // Optional. The runtime log config for job execution.
- LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} }
- func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) }
- func (*SparkSqlJob) ProtoMessage() {}
- func (*SparkSqlJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{6}
- }
- func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b)
- }
- func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic)
- }
- func (dst *SparkSqlJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SparkSqlJob.Merge(dst, src)
- }
- func (m *SparkSqlJob) XXX_Size() int {
- return xxx_messageInfo_SparkSqlJob.Size(m)
- }
- func (m *SparkSqlJob) XXX_DiscardUnknown() {
- xxx_messageInfo_SparkSqlJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo
-
- type isSparkSqlJob_Queries interface {
- isSparkSqlJob_Queries()
- }
-
- type SparkSqlJob_QueryFileUri struct {
- QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
- }
-
- type SparkSqlJob_QueryList struct {
- QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
- }
-
- func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {}
-
- func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {}
-
- func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries {
- if m != nil {
- return m.Queries
- }
- return nil
- }
-
- func (m *SparkSqlJob) GetQueryFileUri() string {
- if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok {
- return x.QueryFileUri
- }
- return ""
- }
-
- func (m *SparkSqlJob) GetQueryList() *QueryList {
- if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok {
- return x.QueryList
- }
- return nil
- }
-
- func (m *SparkSqlJob) GetScriptVariables() map[string]string {
- if m != nil {
- return m.ScriptVariables
- }
- return nil
- }
-
- func (m *SparkSqlJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *SparkSqlJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig {
- if m != nil {
- return m.LoggingConfig
- }
- return nil
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{
- (*SparkSqlJob_QueryFileUri)(nil),
- (*SparkSqlJob_QueryList)(nil),
- }
- }
-
- func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*SparkSqlJob)
- // queries
- switch x := m.Queries.(type) {
- case *SparkSqlJob_QueryFileUri:
- b.EncodeVarint(1<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.QueryFileUri)
- case *SparkSqlJob_QueryList:
- b.EncodeVarint(2<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.QueryList); err != nil {
- return err
- }
- case nil:
- default:
- return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x)
- }
- return nil
- }
-
- func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*SparkSqlJob)
- switch tag {
- case 1: // queries.query_file_uri
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Queries = &SparkSqlJob_QueryFileUri{x}
- return true, err
- case 2: // queries.query_list
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(QueryList)
- err := b.DecodeMessage(msg)
- m.Queries = &SparkSqlJob_QueryList{msg}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*SparkSqlJob)
- // queries
- switch x := m.Queries.(type) {
- case *SparkSqlJob_QueryFileUri:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
- n += len(x.QueryFileUri)
- case *SparkSqlJob_QueryList:
- s := proto.Size(x.QueryList)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/)
- // queries on YARN.
- type PigJob struct {
- // Required. The sequence of Pig queries to execute, specified as an HCFS
- // file URI or a list of queries.
- //
- // Types that are valid to be assigned to Queries:
- // *PigJob_QueryFileUri
- // *PigJob_QueryList
- Queries isPigJob_Queries `protobuf_oneof:"queries"`
- // Optional. Whether to continue executing queries if a query fails.
- // The default value is `false`. Setting to `true` can be useful when
- // executing independent parallel queries.
- ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
- // Optional. Mapping of query variable names to values (equivalent to the Pig
- // command: `name=[value]`).
- ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. A mapping of property names to values, used to configure Pig.
- // Properties that conflict with values set by the Cloud Dataproc API may be
- // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
- // /etc/pig/conf/pig.properties, and classes in user code.
- Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. HCFS URIs of jar files to add to the CLASSPATH of
- // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
- JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
- // Optional. The runtime log config for job execution.
- LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *PigJob) Reset() { *m = PigJob{} }
- func (m *PigJob) String() string { return proto.CompactTextString(m) }
- func (*PigJob) ProtoMessage() {}
- func (*PigJob) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{7}
- }
- func (m *PigJob) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_PigJob.Unmarshal(m, b)
- }
- func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_PigJob.Marshal(b, m, deterministic)
- }
- func (dst *PigJob) XXX_Merge(src proto.Message) {
- xxx_messageInfo_PigJob.Merge(dst, src)
- }
- func (m *PigJob) XXX_Size() int {
- return xxx_messageInfo_PigJob.Size(m)
- }
- func (m *PigJob) XXX_DiscardUnknown() {
- xxx_messageInfo_PigJob.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_PigJob proto.InternalMessageInfo
-
- type isPigJob_Queries interface {
- isPigJob_Queries()
- }
-
- type PigJob_QueryFileUri struct {
- QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
- }
-
- type PigJob_QueryList struct {
- QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
- }
-
- func (*PigJob_QueryFileUri) isPigJob_Queries() {}
-
- func (*PigJob_QueryList) isPigJob_Queries() {}
-
- func (m *PigJob) GetQueries() isPigJob_Queries {
- if m != nil {
- return m.Queries
- }
- return nil
- }
-
- func (m *PigJob) GetQueryFileUri() string {
- if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok {
- return x.QueryFileUri
- }
- return ""
- }
-
- func (m *PigJob) GetQueryList() *QueryList {
- if x, ok := m.GetQueries().(*PigJob_QueryList); ok {
- return x.QueryList
- }
- return nil
- }
-
- func (m *PigJob) GetContinueOnFailure() bool {
- if m != nil {
- return m.ContinueOnFailure
- }
- return false
- }
-
- func (m *PigJob) GetScriptVariables() map[string]string {
- if m != nil {
- return m.ScriptVariables
- }
- return nil
- }
-
- func (m *PigJob) GetProperties() map[string]string {
- if m != nil {
- return m.Properties
- }
- return nil
- }
-
- func (m *PigJob) GetJarFileUris() []string {
- if m != nil {
- return m.JarFileUris
- }
- return nil
- }
-
- func (m *PigJob) GetLoggingConfig() *LoggingConfig {
- if m != nil {
- return m.LoggingConfig
- }
- return nil
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{
- (*PigJob_QueryFileUri)(nil),
- (*PigJob_QueryList)(nil),
- }
- }
-
- func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*PigJob)
- // queries
- switch x := m.Queries.(type) {
- case *PigJob_QueryFileUri:
- b.EncodeVarint(1<<3 | proto.WireBytes)
- b.EncodeStringBytes(x.QueryFileUri)
- case *PigJob_QueryList:
- b.EncodeVarint(2<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.QueryList); err != nil {
- return err
- }
- case nil:
- default:
- return fmt.Errorf("PigJob.Queries has unexpected type %T", x)
- }
- return nil
- }
-
- func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*PigJob)
- switch tag {
- case 1: // queries.query_file_uri
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- x, err := b.DecodeStringBytes()
- m.Queries = &PigJob_QueryFileUri{x}
- return true, err
- case 2: // queries.query_list
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(QueryList)
- err := b.DecodeMessage(msg)
- m.Queries = &PigJob_QueryList{msg}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _PigJob_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*PigJob)
- // queries
- switch x := m.Queries.(type) {
- case *PigJob_QueryFileUri:
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
- n += len(x.QueryFileUri)
- case *PigJob_QueryList:
- s := proto.Size(x.QueryList)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // Cloud Dataproc job config.
- type JobPlacement struct {
- // Required. The name of the cluster where the job will be submitted.
- ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
- // Output only. A cluster UUID generated by the Cloud Dataproc service when
- // the job is submitted.
- ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *JobPlacement) Reset() { *m = JobPlacement{} }
- func (m *JobPlacement) String() string { return proto.CompactTextString(m) }
- func (*JobPlacement) ProtoMessage() {}
- func (*JobPlacement) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{8}
- }
- func (m *JobPlacement) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_JobPlacement.Unmarshal(m, b)
- }
- func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic)
- }
- func (dst *JobPlacement) XXX_Merge(src proto.Message) {
- xxx_messageInfo_JobPlacement.Merge(dst, src)
- }
- func (m *JobPlacement) XXX_Size() int {
- return xxx_messageInfo_JobPlacement.Size(m)
- }
- func (m *JobPlacement) XXX_DiscardUnknown() {
- xxx_messageInfo_JobPlacement.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_JobPlacement proto.InternalMessageInfo
-
- func (m *JobPlacement) GetClusterName() string {
- if m != nil {
- return m.ClusterName
- }
- return ""
- }
-
- func (m *JobPlacement) GetClusterUuid() string {
- if m != nil {
- return m.ClusterUuid
- }
- return ""
- }
-
- // Cloud Dataproc job status.
- type JobStatus struct {
- // Output only. A state message specifying the overall job state.
- State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.JobStatus_State" json:"state,omitempty"`
- // Output only. Optional job state details, such as an error
- // description if the state is <code>ERROR</code>.
- Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"`
- // Output only. The time when this state was entered.
- StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"`
- // Output only. Additional state information, which includes
- // status reported by the agent.
- Substate JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1.JobStatus_Substate" json:"substate,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *JobStatus) Reset() { *m = JobStatus{} }
- func (m *JobStatus) String() string { return proto.CompactTextString(m) }
- func (*JobStatus) ProtoMessage() {}
- func (*JobStatus) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{9}
- }
- func (m *JobStatus) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_JobStatus.Unmarshal(m, b)
- }
- func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic)
- }
- func (dst *JobStatus) XXX_Merge(src proto.Message) {
- xxx_messageInfo_JobStatus.Merge(dst, src)
- }
- func (m *JobStatus) XXX_Size() int {
- return xxx_messageInfo_JobStatus.Size(m)
- }
- func (m *JobStatus) XXX_DiscardUnknown() {
- xxx_messageInfo_JobStatus.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_JobStatus proto.InternalMessageInfo
-
- func (m *JobStatus) GetState() JobStatus_State {
- if m != nil {
- return m.State
- }
- return JobStatus_STATE_UNSPECIFIED
- }
-
- func (m *JobStatus) GetDetails() string {
- if m != nil {
- return m.Details
- }
- return ""
- }
-
- func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp {
- if m != nil {
- return m.StateStartTime
- }
- return nil
- }
-
- func (m *JobStatus) GetSubstate() JobStatus_Substate {
- if m != nil {
- return m.Substate
- }
- return JobStatus_UNSPECIFIED
- }
-
- // Encapsulates the full scoping used to reference a job.
- type JobReference struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Optional. The job ID, which must be unique within the project. The job ID
- // is generated by the server upon job submission or provided by the user as a
- // means to perform retries without creating duplicate jobs. The ID must
- // contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
- // hyphens (-). The maximum length is 100 characters.
- JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *JobReference) Reset() { *m = JobReference{} }
- func (m *JobReference) String() string { return proto.CompactTextString(m) }
- func (*JobReference) ProtoMessage() {}
- func (*JobReference) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{10}
- }
- func (m *JobReference) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_JobReference.Unmarshal(m, b)
- }
- func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_JobReference.Marshal(b, m, deterministic)
- }
- func (dst *JobReference) XXX_Merge(src proto.Message) {
- xxx_messageInfo_JobReference.Merge(dst, src)
- }
- func (m *JobReference) XXX_Size() int {
- return xxx_messageInfo_JobReference.Size(m)
- }
- func (m *JobReference) XXX_DiscardUnknown() {
- xxx_messageInfo_JobReference.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_JobReference proto.InternalMessageInfo
-
- func (m *JobReference) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *JobReference) GetJobId() string {
- if m != nil {
- return m.JobId
- }
- return ""
- }
-
- // A YARN application created by a job. Application information is a subset of
- // <code>org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto</code>.
- //
- // **Beta Feature**: This report is available for testing purposes only. It may
- // be changed before final release.
- type YarnApplication struct {
- // Required. The application name.
- Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
- // Required. The application state.
- State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1.YarnApplication_State" json:"state,omitempty"`
- // Required. The numerical progress of the application, from 1 to 100.
- Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"`
- // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or
- // TimelineServer that provides application-specific information. The URL uses
- // the internal hostname, and requires a proxy server for resolution and,
- // possibly, access.
- TrackingUrl string `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *YarnApplication) Reset() { *m = YarnApplication{} }
- func (m *YarnApplication) String() string { return proto.CompactTextString(m) }
- func (*YarnApplication) ProtoMessage() {}
- func (*YarnApplication) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{11}
- }
- func (m *YarnApplication) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_YarnApplication.Unmarshal(m, b)
- }
- func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic)
- }
- func (dst *YarnApplication) XXX_Merge(src proto.Message) {
- xxx_messageInfo_YarnApplication.Merge(dst, src)
- }
- func (m *YarnApplication) XXX_Size() int {
- return xxx_messageInfo_YarnApplication.Size(m)
- }
- func (m *YarnApplication) XXX_DiscardUnknown() {
- xxx_messageInfo_YarnApplication.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_YarnApplication proto.InternalMessageInfo
-
- func (m *YarnApplication) GetName() string {
- if m != nil {
- return m.Name
- }
- return ""
- }
-
- func (m *YarnApplication) GetState() YarnApplication_State {
- if m != nil {
- return m.State
- }
- return YarnApplication_STATE_UNSPECIFIED
- }
-
- func (m *YarnApplication) GetProgress() float32 {
- if m != nil {
- return m.Progress
- }
- return 0
- }
-
- func (m *YarnApplication) GetTrackingUrl() string {
- if m != nil {
- return m.TrackingUrl
- }
- return ""
- }
-
- // A Cloud Dataproc job resource.
- type Job struct {
- // Optional. The fully qualified reference to the job, which can be used to
- // obtain the equivalent REST path of the job resource. If this property
- // is not specified when a job is created, the server generates a
- // <code>job_id</code>.
- Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"`
- // Required. Job information, including how, when, and where to
- // run the job.
- Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"`
- // Required. The application/framework-specific portion of the job.
- //
- // Types that are valid to be assigned to TypeJob:
- // *Job_HadoopJob
- // *Job_SparkJob
- // *Job_PysparkJob
- // *Job_HiveJob
- // *Job_PigJob
- // *Job_SparkSqlJob
- TypeJob isJob_TypeJob `protobuf_oneof:"type_job"`
- // Output only. The job status. Additional application-specific
- // status information may be contained in the <code>type_job</code>
- // and <code>yarn_applications</code> fields.
- Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"`
- // Output only. The previous job status.
- StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"`
- // Output only. The collection of YARN applications spun up by this job.
- //
- // **Beta** Feature: This report is available for testing purposes only. It
- // may be changed before final release.
- YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"`
- // Output only. A URI pointing to the location of the stdout of the job's
- // driver program.
- DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"`
- // Output only. If present, the location of miscellaneous control files
- // which may be used as part of job setup and handling. If not present,
- // control files may be placed in the same location as `driver_output_uri`.
- DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"`
- // Optional. The labels to associate with this job.
- // Label **keys** must contain 1 to 63 characters, and must conform to
- // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
- // Label **values** may be empty, but, if present, must contain 1 to 63
- // characters, and must conform to [RFC
- // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
- // associated with a job.
- Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
- // Optional. Job scheduling configuration.
- Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"`
- // Output only. A UUID that uniquely identifies a job within the project
- // over time. This is in contrast to a user-settable reference.job_id that
- // may be reused over time.
- JobUuid string `protobuf:"bytes,22,opt,name=job_uuid,json=jobUuid,proto3" json:"job_uuid,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *Job) Reset() { *m = Job{} }
- func (m *Job) String() string { return proto.CompactTextString(m) }
- func (*Job) ProtoMessage() {}
- func (*Job) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{12}
- }
- func (m *Job) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_Job.Unmarshal(m, b)
- }
- func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_Job.Marshal(b, m, deterministic)
- }
- func (dst *Job) XXX_Merge(src proto.Message) {
- xxx_messageInfo_Job.Merge(dst, src)
- }
- func (m *Job) XXX_Size() int {
- return xxx_messageInfo_Job.Size(m)
- }
- func (m *Job) XXX_DiscardUnknown() {
- xxx_messageInfo_Job.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_Job proto.InternalMessageInfo
-
- func (m *Job) GetReference() *JobReference {
- if m != nil {
- return m.Reference
- }
- return nil
- }
-
- func (m *Job) GetPlacement() *JobPlacement {
- if m != nil {
- return m.Placement
- }
- return nil
- }
-
- type isJob_TypeJob interface {
- isJob_TypeJob()
- }
-
- type Job_HadoopJob struct {
- HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"`
- }
-
- type Job_SparkJob struct {
- SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"`
- }
-
- type Job_PysparkJob struct {
- PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"`
- }
-
- type Job_HiveJob struct {
- HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"`
- }
-
- type Job_PigJob struct {
- PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"`
- }
-
- type Job_SparkSqlJob struct {
- SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"`
- }
-
- func (*Job_HadoopJob) isJob_TypeJob() {}
-
- func (*Job_SparkJob) isJob_TypeJob() {}
-
- func (*Job_PysparkJob) isJob_TypeJob() {}
-
- func (*Job_HiveJob) isJob_TypeJob() {}
-
- func (*Job_PigJob) isJob_TypeJob() {}
-
- func (*Job_SparkSqlJob) isJob_TypeJob() {}
-
- func (m *Job) GetTypeJob() isJob_TypeJob {
- if m != nil {
- return m.TypeJob
- }
- return nil
- }
-
- func (m *Job) GetHadoopJob() *HadoopJob {
- if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok {
- return x.HadoopJob
- }
- return nil
- }
-
- func (m *Job) GetSparkJob() *SparkJob {
- if x, ok := m.GetTypeJob().(*Job_SparkJob); ok {
- return x.SparkJob
- }
- return nil
- }
-
- func (m *Job) GetPysparkJob() *PySparkJob {
- if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok {
- return x.PysparkJob
- }
- return nil
- }
-
- func (m *Job) GetHiveJob() *HiveJob {
- if x, ok := m.GetTypeJob().(*Job_HiveJob); ok {
- return x.HiveJob
- }
- return nil
- }
-
- func (m *Job) GetPigJob() *PigJob {
- if x, ok := m.GetTypeJob().(*Job_PigJob); ok {
- return x.PigJob
- }
- return nil
- }
-
- func (m *Job) GetSparkSqlJob() *SparkSqlJob {
- if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok {
- return x.SparkSqlJob
- }
- return nil
- }
-
- func (m *Job) GetStatus() *JobStatus {
- if m != nil {
- return m.Status
- }
- return nil
- }
-
- func (m *Job) GetStatusHistory() []*JobStatus {
- if m != nil {
- return m.StatusHistory
- }
- return nil
- }
-
- func (m *Job) GetYarnApplications() []*YarnApplication {
- if m != nil {
- return m.YarnApplications
- }
- return nil
- }
-
- func (m *Job) GetDriverOutputResourceUri() string {
- if m != nil {
- return m.DriverOutputResourceUri
- }
- return ""
- }
-
- func (m *Job) GetDriverControlFilesUri() string {
- if m != nil {
- return m.DriverControlFilesUri
- }
- return ""
- }
-
- func (m *Job) GetLabels() map[string]string {
- if m != nil {
- return m.Labels
- }
- return nil
- }
-
- func (m *Job) GetScheduling() *JobScheduling {
- if m != nil {
- return m.Scheduling
- }
- return nil
- }
-
- func (m *Job) GetJobUuid() string {
- if m != nil {
- return m.JobUuid
- }
- return ""
- }
-
- // XXX_OneofFuncs is for the internal use of the proto package.
- func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
- return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{
- (*Job_HadoopJob)(nil),
- (*Job_SparkJob)(nil),
- (*Job_PysparkJob)(nil),
- (*Job_HiveJob)(nil),
- (*Job_PigJob)(nil),
- (*Job_SparkSqlJob)(nil),
- }
- }
-
- func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
- m := msg.(*Job)
- // type_job
- switch x := m.TypeJob.(type) {
- case *Job_HadoopJob:
- b.EncodeVarint(3<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.HadoopJob); err != nil {
- return err
- }
- case *Job_SparkJob:
- b.EncodeVarint(4<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.SparkJob); err != nil {
- return err
- }
- case *Job_PysparkJob:
- b.EncodeVarint(5<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.PysparkJob); err != nil {
- return err
- }
- case *Job_HiveJob:
- b.EncodeVarint(6<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.HiveJob); err != nil {
- return err
- }
- case *Job_PigJob:
- b.EncodeVarint(7<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.PigJob); err != nil {
- return err
- }
- case *Job_SparkSqlJob:
- b.EncodeVarint(12<<3 | proto.WireBytes)
- if err := b.EncodeMessage(x.SparkSqlJob); err != nil {
- return err
- }
- case nil:
- default:
- return fmt.Errorf("Job.TypeJob has unexpected type %T", x)
- }
- return nil
- }
-
- func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
- m := msg.(*Job)
- switch tag {
- case 3: // type_job.hadoop_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(HadoopJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_HadoopJob{msg}
- return true, err
- case 4: // type_job.spark_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(SparkJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_SparkJob{msg}
- return true, err
- case 5: // type_job.pyspark_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(PySparkJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_PysparkJob{msg}
- return true, err
- case 6: // type_job.hive_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(HiveJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_HiveJob{msg}
- return true, err
- case 7: // type_job.pig_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(PigJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_PigJob{msg}
- return true, err
- case 12: // type_job.spark_sql_job
- if wire != proto.WireBytes {
- return true, proto.ErrInternalBadWireType
- }
- msg := new(SparkSqlJob)
- err := b.DecodeMessage(msg)
- m.TypeJob = &Job_SparkSqlJob{msg}
- return true, err
- default:
- return false, nil
- }
- }
-
- func _Job_OneofSizer(msg proto.Message) (n int) {
- m := msg.(*Job)
- // type_job
- switch x := m.TypeJob.(type) {
- case *Job_HadoopJob:
- s := proto.Size(x.HadoopJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case *Job_SparkJob:
- s := proto.Size(x.SparkJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case *Job_PysparkJob:
- s := proto.Size(x.PysparkJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case *Job_HiveJob:
- s := proto.Size(x.HiveJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case *Job_PigJob:
- s := proto.Size(x.PigJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case *Job_SparkSqlJob:
- s := proto.Size(x.SparkSqlJob)
- n += 1 // tag and wire
- n += proto.SizeVarint(uint64(s))
- n += s
- case nil:
- default:
- panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
- }
- return n
- }
-
- // Job scheduling options.
- type JobScheduling struct {
- // Optional. Maximum number of times per hour a driver may be restarted as
- // a result of driver terminating with non-zero code before job is
- // reported failed.
- //
- // A job may be reported as thrashing if driver exits with non-zero code
- // 4 times within 10 minute window.
- //
- // Maximum value is 10.
- MaxFailuresPerHour int32 `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *JobScheduling) Reset() { *m = JobScheduling{} }
- func (m *JobScheduling) String() string { return proto.CompactTextString(m) }
- func (*JobScheduling) ProtoMessage() {}
- func (*JobScheduling) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{13}
- }
- func (m *JobScheduling) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_JobScheduling.Unmarshal(m, b)
- }
- func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic)
- }
- func (dst *JobScheduling) XXX_Merge(src proto.Message) {
- xxx_messageInfo_JobScheduling.Merge(dst, src)
- }
- func (m *JobScheduling) XXX_Size() int {
- return xxx_messageInfo_JobScheduling.Size(m)
- }
- func (m *JobScheduling) XXX_DiscardUnknown() {
- xxx_messageInfo_JobScheduling.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_JobScheduling proto.InternalMessageInfo
-
- func (m *JobScheduling) GetMaxFailuresPerHour() int32 {
- if m != nil {
- return m.MaxFailuresPerHour
- }
- return 0
- }
-
- // A request to submit a job.
- type SubmitJobRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
- // Required. The job resource.
- Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"`
- // Optional. A unique id used to identify the request. If the server
- // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest]
- // requests with the same id, then the second request will be ignored and the
- // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
- // is returned.
- //
- // It is recommended to always set this value to a
- // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
- //
- // The id must contain only letters (a-z, A-Z), numbers (0-9),
- // underscores (_), and hyphens (-). The maximum length is 40 characters.
- RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} }
- func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) }
- func (*SubmitJobRequest) ProtoMessage() {}
- func (*SubmitJobRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{14}
- }
- func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b)
- }
- func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic)
- }
- func (dst *SubmitJobRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_SubmitJobRequest.Merge(dst, src)
- }
- func (m *SubmitJobRequest) XXX_Size() int {
- return xxx_messageInfo_SubmitJobRequest.Size(m)
- }
- func (m *SubmitJobRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo
-
- func (m *SubmitJobRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *SubmitJobRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *SubmitJobRequest) GetJob() *Job {
- if m != nil {
- return m.Job
- }
- return nil
- }
-
- func (m *SubmitJobRequest) GetRequestId() string {
- if m != nil {
- return m.RequestId
- }
- return ""
- }
-
- // A request to get the resource representation for a job in a project.
- type GetJobRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
- // Required. The job ID.
- JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *GetJobRequest) Reset() { *m = GetJobRequest{} }
- func (m *GetJobRequest) String() string { return proto.CompactTextString(m) }
- func (*GetJobRequest) ProtoMessage() {}
- func (*GetJobRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{15}
- }
- func (m *GetJobRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_GetJobRequest.Unmarshal(m, b)
- }
- func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic)
- }
- func (dst *GetJobRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_GetJobRequest.Merge(dst, src)
- }
- func (m *GetJobRequest) XXX_Size() int {
- return xxx_messageInfo_GetJobRequest.Size(m)
- }
- func (m *GetJobRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_GetJobRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo
-
- func (m *GetJobRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *GetJobRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *GetJobRequest) GetJobId() string {
- if m != nil {
- return m.JobId
- }
- return ""
- }
-
- // A request to list jobs in a project.
- type ListJobsRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"`
- // Optional. The number of results to return in each response.
- PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
- // Optional. The page token, returned by a previous call, to request the
- // next page of results.
- PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
- // Optional. If set, the returned jobs list includes only jobs that were
- // submitted to the named cluster.
- ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
- // Optional. Specifies enumerated categories of jobs to list.
- // (default = match ALL jobs).
- //
- // If `filter` is provided, `jobStateMatcher` will be ignored.
- JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"`
- // Optional. A filter constraining the jobs to list. Filters are
- // case-sensitive and have the following syntax:
- //
- // [field = value] AND [field [= value]] ...
- //
- // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label
- // key. **value** can be `*` to match all values.
- // `status.state` can be either `ACTIVE` or `NON_ACTIVE`.
- // Only the logical `AND` operator is supported; space-separated items are
- // treated as having an implicit `AND` operator.
- //
- // Example filter:
- //
- // status.state = ACTIVE AND labels.env = staging AND labels.starred = *
- Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} }
- func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) }
- func (*ListJobsRequest) ProtoMessage() {}
- func (*ListJobsRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{16}
- }
- func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b)
- }
- func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic)
- }
- func (dst *ListJobsRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ListJobsRequest.Merge(dst, src)
- }
- func (m *ListJobsRequest) XXX_Size() int {
- return xxx_messageInfo_ListJobsRequest.Size(m)
- }
- func (m *ListJobsRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_ListJobsRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo
-
- func (m *ListJobsRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *ListJobsRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *ListJobsRequest) GetPageSize() int32 {
- if m != nil {
- return m.PageSize
- }
- return 0
- }
-
- func (m *ListJobsRequest) GetPageToken() string {
- if m != nil {
- return m.PageToken
- }
- return ""
- }
-
- func (m *ListJobsRequest) GetClusterName() string {
- if m != nil {
- return m.ClusterName
- }
- return ""
- }
-
- func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher {
- if m != nil {
- return m.JobStateMatcher
- }
- return ListJobsRequest_ALL
- }
-
- func (m *ListJobsRequest) GetFilter() string {
- if m != nil {
- return m.Filter
- }
- return ""
- }
-
- // A request to update a job.
- type UpdateJobRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"`
- // Required. The job ID.
- JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
- // Required. The changes to the job.
- Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"`
- // Required. Specifies the path, relative to <code>Job</code>, of
- // the field to update. For example, to update the labels of a Job the
- // <code>update_mask</code> parameter would be specified as
- // <code>labels</code>, and the `PATCH` request body would specify the new
- // value. <strong>Note:</strong> Currently, <code>labels</code> is the only
- // field that can be updated.
- UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} }
- func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) }
- func (*UpdateJobRequest) ProtoMessage() {}
- func (*UpdateJobRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{17}
- }
- func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b)
- }
- func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic)
- }
- func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_UpdateJobRequest.Merge(dst, src)
- }
- func (m *UpdateJobRequest) XXX_Size() int {
- return xxx_messageInfo_UpdateJobRequest.Size(m)
- }
- func (m *UpdateJobRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo
-
- func (m *UpdateJobRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *UpdateJobRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *UpdateJobRequest) GetJobId() string {
- if m != nil {
- return m.JobId
- }
- return ""
- }
-
- func (m *UpdateJobRequest) GetJob() *Job {
- if m != nil {
- return m.Job
- }
- return nil
- }
-
- func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask {
- if m != nil {
- return m.UpdateMask
- }
- return nil
- }
-
- // A list of jobs in a project.
- type ListJobsResponse struct {
- // Output only. Jobs list.
- Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"`
- // Optional. This token is included in the response if there are more results
- // to fetch. To fetch additional results, provide this value as the
- // `page_token` in a subsequent <code>ListJobsRequest</code>.
- NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} }
- func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) }
- func (*ListJobsResponse) ProtoMessage() {}
- func (*ListJobsResponse) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{18}
- }
- func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b)
- }
- func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic)
- }
- func (dst *ListJobsResponse) XXX_Merge(src proto.Message) {
- xxx_messageInfo_ListJobsResponse.Merge(dst, src)
- }
- func (m *ListJobsResponse) XXX_Size() int {
- return xxx_messageInfo_ListJobsResponse.Size(m)
- }
- func (m *ListJobsResponse) XXX_DiscardUnknown() {
- xxx_messageInfo_ListJobsResponse.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo
-
- func (m *ListJobsResponse) GetJobs() []*Job {
- if m != nil {
- return m.Jobs
- }
- return nil
- }
-
- func (m *ListJobsResponse) GetNextPageToken() string {
- if m != nil {
- return m.NextPageToken
- }
- return ""
- }
-
- // A request to cancel a job.
- type CancelJobRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
- // Required. The job ID.
- JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} }
- func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) }
- func (*CancelJobRequest) ProtoMessage() {}
- func (*CancelJobRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{19}
- }
- func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b)
- }
- func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic)
- }
- func (dst *CancelJobRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_CancelJobRequest.Merge(dst, src)
- }
- func (m *CancelJobRequest) XXX_Size() int {
- return xxx_messageInfo_CancelJobRequest.Size(m)
- }
- func (m *CancelJobRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_CancelJobRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo
-
- func (m *CancelJobRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *CancelJobRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *CancelJobRequest) GetJobId() string {
- if m != nil {
- return m.JobId
- }
- return ""
- }
-
- // A request to delete a job.
- type DeleteJobRequest struct {
- // Required. The ID of the Google Cloud Platform project that the job
- // belongs to.
- ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
- // Required. The Cloud Dataproc region in which to handle the request.
- Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
- // Required. The job ID.
- JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
- XXX_NoUnkeyedLiteral struct{} `json:"-"`
- XXX_unrecognized []byte `json:"-"`
- XXX_sizecache int32 `json:"-"`
- }
-
- func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} }
- func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) }
- func (*DeleteJobRequest) ProtoMessage() {}
- func (*DeleteJobRequest) Descriptor() ([]byte, []int) {
- return fileDescriptor_jobs_695072de36c8e540, []int{20}
- }
- func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error {
- return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b)
- }
- func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
- return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic)
- }
- func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) {
- xxx_messageInfo_DeleteJobRequest.Merge(dst, src)
- }
- func (m *DeleteJobRequest) XXX_Size() int {
- return xxx_messageInfo_DeleteJobRequest.Size(m)
- }
- func (m *DeleteJobRequest) XXX_DiscardUnknown() {
- xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m)
- }
-
- var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo
-
- func (m *DeleteJobRequest) GetProjectId() string {
- if m != nil {
- return m.ProjectId
- }
- return ""
- }
-
- func (m *DeleteJobRequest) GetRegion() string {
- if m != nil {
- return m.Region
- }
- return ""
- }
-
- func (m *DeleteJobRequest) GetJobId() string {
- if m != nil {
- return m.JobId
- }
- return ""
- }
-
- func init() {
- proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1.LoggingConfig")
- proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry")
- proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1.HadoopJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HadoopJob.PropertiesEntry")
- proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1.SparkJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkJob.PropertiesEntry")
- proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1.PySparkJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PySparkJob.PropertiesEntry")
- proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1.QueryList")
- proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1.HiveJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.PropertiesEntry")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry")
- proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1.SparkSqlJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry")
- proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1.PigJob")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.PropertiesEntry")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry")
- proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1.JobPlacement")
- proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1.JobStatus")
- proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1.JobReference")
- proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1.YarnApplication")
- proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1.Job")
- proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.Job.LabelsEntry")
- proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1.JobScheduling")
- proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1.SubmitJobRequest")
- proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1.GetJobRequest")
- proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1.ListJobsRequest")
- proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1.UpdateJobRequest")
- proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1.ListJobsResponse")
- proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1.CancelJobRequest")
- proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1.DeleteJobRequest")
- proto.RegisterEnum("google.cloud.dataproc.v1.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value)
- proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_State", JobStatus_State_name, JobStatus_State_value)
- proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value)
- proto.RegisterEnum("google.cloud.dataproc.v1.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value)
- proto.RegisterEnum("google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value)
- }
-
- // Reference imports to suppress errors if they are not otherwise used.
- var _ context.Context
- var _ grpc.ClientConn
-
- // This is a compile-time assertion to ensure that this generated file
- // is compatible with the grpc package it is being compiled against.
- const _ = grpc.SupportPackageIsVersion4
-
- // JobControllerClient is the client API for JobController service.
- //
- // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
- type JobControllerClient interface {
- // Submits a job to a cluster.
- SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error)
- // Gets the resource representation for a job in a project.
- GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error)
- // Lists regions/{region}/jobs in a project.
- ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error)
- // Updates a job in a project.
- UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error)
- // Starts a job cancellation request. To access the job resource
- // after cancellation, call
- // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
- // or
- // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
- CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error)
- // Deletes the job from the project. If the job is active, the delete fails,
- // and the response returns `FAILED_PRECONDITION`.
- DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error)
- }
-
- type jobControllerClient struct {
- cc *grpc.ClientConn
- }
-
- func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient {
- return &jobControllerClient{cc}
- }
-
- func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) {
- out := new(Job)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/SubmitJob", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) {
- out := new(Job)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/GetJob", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) {
- out := new(ListJobsResponse)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/ListJobs", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) {
- out := new(Job)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/UpdateJob", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) {
- out := new(Job)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/CancelJob", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
- out := new(empty.Empty)
- err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/DeleteJob", in, out, opts...)
- if err != nil {
- return nil, err
- }
- return out, nil
- }
-
- // JobControllerServer is the server API for JobController service.
- type JobControllerServer interface {
- // Submits a job to a cluster.
- SubmitJob(context.Context, *SubmitJobRequest) (*Job, error)
- // Gets the resource representation for a job in a project.
- GetJob(context.Context, *GetJobRequest) (*Job, error)
- // Lists regions/{region}/jobs in a project.
- ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error)
- // Updates a job in a project.
- UpdateJob(context.Context, *UpdateJobRequest) (*Job, error)
- // Starts a job cancellation request. To access the job resource
- // after cancellation, call
- // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
- // or
- // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
- CancelJob(context.Context, *CancelJobRequest) (*Job, error)
- // Deletes the job from the project. If the job is active, the delete fails,
- // and the response returns `FAILED_PRECONDITION`.
- DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error)
- }
-
- func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) {
- s.RegisterService(&_JobController_serviceDesc, srv)
- }
-
- func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(SubmitJobRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).SubmitJob(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/SubmitJob",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(GetJobRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).GetJob(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/GetJob",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(ListJobsRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).ListJobs(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/ListJobs",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(UpdateJobRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).UpdateJob(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/UpdateJob",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(CancelJobRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).CancelJob(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/CancelJob",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
- in := new(DeleteJobRequest)
- if err := dec(in); err != nil {
- return nil, err
- }
- if interceptor == nil {
- return srv.(JobControllerServer).DeleteJob(ctx, in)
- }
- info := &grpc.UnaryServerInfo{
- Server: srv,
- FullMethod: "/google.cloud.dataproc.v1.JobController/DeleteJob",
- }
- handler := func(ctx context.Context, req interface{}) (interface{}, error) {
- return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest))
- }
- return interceptor(ctx, in, info, handler)
- }
-
- var _JobController_serviceDesc = grpc.ServiceDesc{
- ServiceName: "google.cloud.dataproc.v1.JobController",
- HandlerType: (*JobControllerServer)(nil),
- Methods: []grpc.MethodDesc{
- {
- MethodName: "SubmitJob",
- Handler: _JobController_SubmitJob_Handler,
- },
- {
- MethodName: "GetJob",
- Handler: _JobController_GetJob_Handler,
- },
- {
- MethodName: "ListJobs",
- Handler: _JobController_ListJobs_Handler,
- },
- {
- MethodName: "UpdateJob",
- Handler: _JobController_UpdateJob_Handler,
- },
- {
- MethodName: "CancelJob",
- Handler: _JobController_CancelJob_Handler,
- },
- {
- MethodName: "DeleteJob",
- Handler: _JobController_DeleteJob_Handler,
- },
- },
- Streams: []grpc.StreamDesc{},
- Metadata: "google/cloud/dataproc/v1/jobs.proto",
- }
-
- func init() {
- proto.RegisterFile("google/cloud/dataproc/v1/jobs.proto", fileDescriptor_jobs_695072de36c8e540)
- }
-
- var fileDescriptor_jobs_695072de36c8e540 = []byte{
- // 2320 bytes of a gzipped FileDescriptorProto
- 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0xcf, 0x73, 0x23, 0x47,
- 0xf5, 0xb7, 0x7e, 0x6b, 0x9e, 0x6c, 0x79, 0xdc, 0xd9, 0xcd, 0x57, 0x5f, 0x25, 0xa9, 0x38, 0xb3,
- 0x64, 0xf1, 0x2e, 0x20, 0x61, 0x05, 0x36, 0x1b, 0x1b, 0xd8, 0xc8, 0xd2, 0x78, 0x25, 0x47, 0x2b,
- 0x6b, 0x47, 0xd2, 0x6e, 0x41, 0x15, 0x35, 0x3b, 0x92, 0xda, 0xf2, 0xd8, 0xa3, 0x99, 0xf1, 0xf4,
- 0x8c, 0x6b, 0x95, 0xad, 0xbd, 0x70, 0xe1, 0x48, 0x01, 0xa7, 0x50, 0xc5, 0x85, 0x1b, 0x7f, 0x00,
- 0x5c, 0x28, 0x8a, 0x0b, 0x67, 0x2e, 0x5c, 0xa9, 0x9c, 0x38, 0x72, 0xe2, 0x2f, 0xa0, 0xba, 0x7b,
- 0x46, 0x96, 0x64, 0xeb, 0x87, 0x77, 0x21, 0x95, 0xe4, 0xe4, 0x9e, 0x7e, 0x3f, 0xfa, 0x75, 0x7f,
- 0x3e, 0xfd, 0xde, 0x6b, 0x19, 0x6e, 0xf5, 0x2d, 0xab, 0x6f, 0xe0, 0x7c, 0xd7, 0xb0, 0xbc, 0x5e,
- 0xbe, 0xa7, 0xb9, 0x9a, 0xed, 0x58, 0xdd, 0xfc, 0xf9, 0x76, 0xfe, 0xc4, 0xea, 0x90, 0x9c, 0xed,
- 0x58, 0xae, 0x85, 0x32, 0x5c, 0x29, 0xc7, 0x94, 0x72, 0x81, 0x52, 0xee, 0x7c, 0x3b, 0xfb, 0xb6,
- 0x6f, 0xae, 0xd9, 0x7a, 0x5e, 0x33, 0x4d, 0xcb, 0xd5, 0x5c, 0xdd, 0x32, 0x7d, 0xbb, 0xec, 0x5b,
- 0xbe, 0x94, 0x7d, 0x75, 0xbc, 0xa3, 0x3c, 0x1e, 0xd8, 0xee, 0xd0, 0x17, 0x6e, 0x4e, 0x0b, 0x8f,
- 0x74, 0x6c, 0xf4, 0xd4, 0x81, 0x46, 0x4e, 0x7d, 0x8d, 0x77, 0xa7, 0x35, 0x5c, 0x7d, 0x80, 0x89,
- 0xab, 0x0d, 0x6c, 0xae, 0x20, 0x7d, 0x1e, 0x86, 0xb5, 0x9a, 0xd5, 0xef, 0xeb, 0x66, 0xbf, 0x64,
- 0x99, 0x47, 0x7a, 0x1f, 0x1d, 0xc3, 0x46, 0xcf, 0xd1, 0xcf, 0xb1, 0xa3, 0x1a, 0x56, 0x5f, 0x35,
- 0xf0, 0x39, 0x36, 0x48, 0x26, 0xbc, 0x19, 0xd9, 0x4a, 0x15, 0x7e, 0x90, 0x9b, 0xb5, 0x8b, 0xdc,
- 0x84, 0x8f, 0x5c, 0x99, 0x39, 0xa8, 0x59, 0xfd, 0x1a, 0x33, 0x97, 0x4d, 0xd7, 0x19, 0x2a, 0xeb,
- 0xbd, 0xc9, 0xd9, 0xec, 0x19, 0xdc, 0xb8, 0x4a, 0x11, 0x89, 0x10, 0x39, 0xc5, 0xc3, 0x4c, 0x68,
- 0x33, 0xb4, 0x25, 0x28, 0x74, 0x88, 0x4a, 0x10, 0x3b, 0xd7, 0x0c, 0x0f, 0x67, 0xc2, 0x9b, 0xa1,
- 0xad, 0x74, 0xe1, 0x3b, 0xcb, 0xc6, 0xc1, 0xbc, 0x2a, 0xdc, 0x76, 0x27, 0x7c, 0x3f, 0x24, 0xd9,
- 0x10, 0x63, 0x73, 0xe8, 0x26, 0x6c, 0xd4, 0xe4, 0x27, 0x72, 0x4d, 0x6d, 0xd7, 0x9b, 0x0d, 0xb9,
- 0x54, 0xdd, 0xaf, 0xca, 0x65, 0x71, 0x05, 0x25, 0x20, 0x52, 0xac, 0xd5, 0xc4, 0x10, 0x12, 0x20,
- 0xd6, 0x52, 0x8a, 0x25, 0x59, 0x0c, 0xd3, 0x61, 0x59, 0xde, 0x6b, 0x3f, 0x14, 0x23, 0x28, 0x09,
- 0xd1, 0x6a, 0x7d, 0xff, 0x50, 0x8c, 0xd2, 0xd1, 0xd3, 0xa2, 0x52, 0x17, 0x63, 0x54, 0x2c, 0x2b,
- 0xca, 0xa1, 0x22, 0xc6, 0xe9, 0x70, 0xbf, 0xd8, 0x2a, 0xd6, 0xc4, 0x04, 0x75, 0x74, 0xb8, 0xbf,
- 0x2f, 0x26, 0xa5, 0xbf, 0x44, 0x40, 0xa8, 0x68, 0x3d, 0xcb, 0xb2, 0x0f, 0xac, 0x0e, 0xfa, 0x16,
- 0x6c, 0x0c, 0x34, 0xdd, 0x54, 0x4f, 0x34, 0x47, 0x3d, 0xd2, 0x0d, 0xac, 0x7a, 0x8e, 0xce, 0x37,
- 0x5a, 0x59, 0x51, 0xd2, 0x54, 0x74, 0xa0, 0x39, 0xfb, 0xba, 0x81, 0xdb, 0x8e, 0x8e, 0xde, 0x05,
- 0x60, 0xca, 0x5d, 0x43, 0x23, 0x84, 0x6d, 0x9d, 0x6a, 0x09, 0x74, 0xae, 0x44, 0xa7, 0x10, 0x82,
- 0xa8, 0xe6, 0xf4, 0x49, 0x26, 0xb2, 0x19, 0xd9, 0x12, 0x14, 0x36, 0x46, 0x12, 0xac, 0x8d, 0x3b,
- 0x27, 0x99, 0x28, 0x13, 0xa6, 0x4e, 0x46, 0x7e, 0x09, 0x7a, 0x0b, 0x84, 0x0b, 0x79, 0x8c, 0xc9,
- 0x93, 0x47, 0x81, 0xf0, 0x3d, 0x58, 0xd5, 0x9c, 0xee, 0xb1, 0x7e, 0xee, 0xcb, 0xe3, 0xdc, 0xde,
- 0x9f, 0x63, 0x2a, 0x4d, 0x00, 0xdb, 0xb1, 0x6c, 0xec, 0xb8, 0x3a, 0x26, 0x99, 0x04, 0xe3, 0xc6,
- 0x07, 0xb3, 0x31, 0x19, 0x6d, 0x3f, 0xd7, 0x18, 0x59, 0x71, 0x4a, 0x8c, 0xb9, 0x41, 0x75, 0x48,
- 0x1b, 0x1c, 0x3c, 0xb5, 0xcb, 0xd0, 0xcb, 0x24, 0x37, 0x43, 0x5b, 0xa9, 0xc2, 0x37, 0x97, 0x04,
- 0x5b, 0x59, 0x33, 0xc6, 0x3f, 0xb3, 0x3f, 0x84, 0xf5, 0xa9, 0xe5, 0xae, 0x20, 0xd6, 0x8d, 0x71,
- 0x62, 0x09, 0x63, 0x4c, 0xd9, 0x4b, 0x42, 0x9c, 0xf3, 0x55, 0xfa, 0x73, 0x04, 0x92, 0x4d, 0x5b,
- 0x73, 0x4e, 0xbf, 0x3e, 0x00, 0x2a, 0x57, 0x00, 0x58, 0x98, 0x7d, 0xce, 0xc1, 0xee, 0xbf, 0x9a,
- 0xf8, 0xfd, 0x35, 0x02, 0xd0, 0x18, 0x8e, 0x10, 0xcc, 0xc3, 0x0d, 0x06, 0x8a, 0x3d, 0x74, 0x8f,
- 0x2d, 0x73, 0x0a, 0x44, 0x85, 0xa1, 0xdb, 0x60, 0xa2, 0x00, 0xc5, 0x00, 0xa4, 0xf0, 0x18, 0x48,
- 0x5b, 0x20, 0x4e, 0xd9, 0x07, 0x20, 0xa6, 0xed, 0x71, 0xe3, 0x2f, 0x06, 0xce, 0xd6, 0x15, 0x70,
- 0x7e, 0x6f, 0xf6, 0xb1, 0x5f, 0x1c, 0xc6, 0x57, 0x08, 0x50, 0xe9, 0x7d, 0x10, 0x1e, 0x7b, 0xd8,
- 0x19, 0xd6, 0x74, 0xe2, 0xa2, 0x0c, 0x24, 0xce, 0x3c, 0xec, 0xd0, 0xed, 0x86, 0xd8, 0x79, 0x04,
- 0x9f, 0xd2, 0x2f, 0xa2, 0x90, 0xa8, 0xe8, 0xe7, 0x98, 0x42, 0x7d, 0x1b, 0xd2, 0x74, 0x7a, 0x78,
- 0xf9, 0xa6, 0xae, 0xb2, 0xf9, 0x00, 0xe1, 0x32, 0x00, 0xd7, 0x33, 0x74, 0xe2, 0xb2, 0x95, 0x53,
- 0x85, 0x5b, 0xb3, 0x77, 0x39, 0x0a, 0x83, 0x5e, 0xe6, 0xb3, 0x51, 0x4c, 0x39, 0x78, 0xa3, 0x6b,
- 0x99, 0xae, 0x6e, 0x7a, 0x58, 0xa5, 0xc4, 0xd0, 0x74, 0xc3, 0x73, 0x70, 0x26, 0xb2, 0x19, 0xda,
- 0x4a, 0x2a, 0x1b, 0x81, 0xe8, 0xd0, 0xdc, 0xe7, 0x02, 0xa4, 0x81, 0x48, 0xba, 0x8e, 0x6e, 0xbb,
- 0xea, 0xb9, 0xe6, 0xe8, 0x5a, 0xc7, 0xc0, 0x9c, 0x1c, 0xa9, 0xc2, 0xbd, 0x39, 0xb9, 0x94, 0x6f,
- 0x2d, 0xd7, 0x64, 0x96, 0x4f, 0x02, 0x43, 0xbf, 0xc2, 0x92, 0xc9, 0x59, 0xf4, 0x78, 0x82, 0x18,
- 0x31, 0xe6, 0x7c, 0x7b, 0xb1, 0xf3, 0x79, 0xac, 0xb8, 0xc4, 0xe7, 0xf8, 0x25, 0x3e, 0x67, 0xf7,
- 0xe0, 0xc6, 0x55, 0xf1, 0x5d, 0x07, 0xee, 0xd7, 0xbd, 0xfe, 0xc2, 0x88, 0x20, 0xd2, 0x9f, 0xa2,
- 0x90, 0x62, 0x84, 0x6f, 0x9e, 0x19, 0x5f, 0x3c, 0x2b, 0xf0, 0x15, 0x28, 0x47, 0x18, 0x10, 0x3b,
- 0x0b, 0x12, 0x2e, 0x0f, 0x77, 0x49, 0xa4, 0xdb, 0x13, 0x48, 0x73, 0x1a, 0x7d, 0x7f, 0xb9, 0x05,
- 0xae, 0x85, 0xf6, 0xfd, 0xcb, 0xd9, 0xeb, 0x72, 0x9e, 0x88, 0xbf, 0x56, 0x9e, 0xf8, 0x72, 0xb1,
- 0xe7, 0x1f, 0x51, 0x88, 0x37, 0xf4, 0xfe, 0x97, 0x3f, 0x9d, 0x3c, 0x9b, 0x99, 0x4e, 0xe6, 0xf0,
- 0x80, 0xef, 0x6c, 0x49, 0x8e, 0x35, 0xae, 0xc8, 0x26, 0xdf, 0x5d, 0xe8, 0xfb, 0x35, 0x93, 0xc9,
- 0x15, 0xf4, 0x4a, 0x7c, 0x8d, 0xe8, 0xd5, 0x82, 0xd5, 0x03, 0xab, 0xd3, 0x30, 0xb4, 0x2e, 0x1e,
- 0x60, 0xd3, 0xa5, 0xd5, 0xbe, 0x6b, 0x78, 0xc4, 0xc5, 0x8e, 0x6a, 0x6a, 0x03, 0xec, 0xfb, 0x4b,
- 0xf9, 0x73, 0x75, 0x6d, 0x80, 0xc7, 0x55, 0x3c, 0x4f, 0xef, 0xf9, 0xee, 0x03, 0x95, 0xb6, 0xa7,
- 0xf7, 0xa4, 0x7f, 0x45, 0x40, 0x38, 0xb0, 0x3a, 0x4d, 0x57, 0x73, 0x3d, 0x82, 0x1e, 0x40, 0x8c,
- 0xb8, 0x9a, 0xcb, 0x9d, 0xa5, 0x0b, 0x77, 0x66, 0x1f, 0xdc, 0xc8, 0x26, 0x47, 0xff, 0x60, 0x85,
- 0xdb, 0xd1, 0x6a, 0xdb, 0xc3, 0xae, 0xa6, 0x1b, 0x7e, 0x13, 0xab, 0x04, 0x9f, 0xa8, 0x0c, 0x22,
- 0x53, 0x51, 0x89, 0xab, 0x39, 0xae, 0x4a, 0x5f, 0x97, 0xfe, 0xed, 0xcf, 0x06, 0xab, 0x04, 0x4f,
- 0xcf, 0x5c, 0x2b, 0x78, 0x7a, 0x2a, 0x69, 0x66, 0xd3, 0xa4, 0x26, 0x74, 0x12, 0x55, 0x20, 0x49,
- 0xbc, 0x0e, 0x8f, 0x31, 0xc1, 0x62, 0xfc, 0xf6, 0x52, 0x31, 0xfa, 0x36, 0xca, 0xc8, 0x5a, 0xfa,
- 0x7d, 0x08, 0x62, 0x2c, 0x74, 0xfa, 0xc0, 0x6b, 0xb6, 0x8a, 0x2d, 0x79, 0xea, 0x81, 0x97, 0x82,
- 0x44, 0x43, 0xae, 0x97, 0xab, 0xf5, 0x87, 0x62, 0x08, 0xa5, 0x01, 0x9a, 0x72, 0xab, 0xdd, 0x50,
- 0xcb, 0x87, 0x75, 0x59, 0x4c, 0x52, 0xa1, 0xd2, 0xae, 0xd7, 0xa9, 0x30, 0x8c, 0x10, 0xa4, 0x4b,
- 0xc5, 0x7a, 0x49, 0xae, 0xa9, 0x81, 0x41, 0x64, 0x6c, 0xae, 0xd9, 0x2a, 0x2a, 0x2d, 0xb9, 0x2c,
- 0x26, 0xd0, 0x1a, 0x08, 0x7c, 0xae, 0x26, 0x97, 0xf9, 0xc3, 0x90, 0x79, 0x9b, 0x78, 0x18, 0xbe,
- 0x01, 0xeb, 0xc5, 0x56, 0x4b, 0x7e, 0xd4, 0x68, 0xa9, 0xfb, 0xc5, 0x6a, 0xad, 0xad, 0xc8, 0xa2,
- 0x20, 0x55, 0x20, 0x19, 0xec, 0x00, 0xad, 0x43, 0x6a, 0x32, 0xce, 0x35, 0x10, 0x9a, 0xed, 0xbd,
- 0x47, 0xd5, 0x16, 0x5d, 0x24, 0x84, 0x00, 0xe2, 0x8f, 0xdb, 0x72, 0x5b, 0x2e, 0x8b, 0x61, 0x24,
- 0xc2, 0x6a, 0xb3, 0x55, 0xac, 0xc9, 0x34, 0x86, 0x56, 0xbb, 0x29, 0x46, 0xa4, 0x32, 0x23, 0x91,
- 0x82, 0x8f, 0xb0, 0x83, 0xcd, 0x2e, 0x46, 0xef, 0xb0, 0x8b, 0x7a, 0x82, 0xbb, 0xae, 0xaa, 0xf7,
- 0x7c, 0x0a, 0x09, 0xfe, 0x4c, 0xb5, 0x87, 0x6e, 0x42, 0xfc, 0xc4, 0xea, 0xa8, 0x23, 0xea, 0xc4,
- 0x4e, 0xac, 0x4e, 0xb5, 0x27, 0xfd, 0x21, 0x0c, 0xeb, 0x3f, 0xd6, 0x1c, 0xb3, 0x68, 0xdb, 0x86,
- 0xde, 0x65, 0xbf, 0x42, 0xd0, 0xde, 0x77, 0x8c, 0x86, 0x6c, 0x8c, 0xe4, 0x80, 0x4e, 0xfc, 0x31,
- 0x9e, 0x9f, 0x0d, 0xd5, 0x94, 0xb7, 0x49, 0x52, 0x65, 0x21, 0x69, 0x3b, 0x56, 0xdf, 0xc1, 0x84,
- 0xb0, 0xa4, 0x16, 0x56, 0x46, 0xdf, 0x94, 0xe2, 0xae, 0xa3, 0x75, 0x4f, 0xe9, 0xa5, 0xf7, 0x1c,
- 0x23, 0x13, 0xe5, 0x14, 0x0f, 0xe6, 0xda, 0x8e, 0x21, 0xfd, 0x7c, 0x11, 0xd2, 0x09, 0x88, 0xd4,
- 0xe5, 0xa7, 0x1c, 0xe5, 0xba, 0xfc, 0x54, 0x6d, 0x16, 0x9f, 0x70, 0x60, 0x27, 0x8e, 0x36, 0x82,
- 0x56, 0x21, 0x59, 0x2c, 0x95, 0xe4, 0x46, 0x8b, 0xc1, 0x37, 0x46, 0x81, 0x18, 0x15, 0xed, 0x57,
- 0xeb, 0xd5, 0x66, 0x45, 0x2e, 0x8b, 0x71, 0x8a, 0x01, 0x05, 0x8f, 0x81, 0x0e, 0x10, 0xff, 0xa4,
- 0xca, 0x10, 0x4f, 0x4a, 0xff, 0x4e, 0x42, 0x84, 0x96, 0x87, 0x32, 0x08, 0x4e, 0x00, 0x01, 0x3b,
- 0xb0, 0x54, 0xe1, 0xf6, 0x5c, 0x1a, 0x8f, 0x00, 0x53, 0x2e, 0x0c, 0xa9, 0x17, 0x3b, 0xc8, 0x06,
- 0x7e, 0xed, 0x98, 0xef, 0x65, 0x94, 0x3b, 0x94, 0x0b, 0x43, 0x5a, 0x82, 0x8e, 0xd9, 0xab, 0x5b,
- 0x3d, 0xb1, 0x3a, 0xec, 0x78, 0xe7, 0x96, 0xa0, 0xd1, 0x0b, 0x9d, 0x96, 0xa0, 0xe3, 0xd1, 0xaf,
- 0x15, 0x45, 0x10, 0x08, 0x6d, 0x14, 0x98, 0x93, 0x28, 0x73, 0x22, 0x2d, 0x7e, 0x25, 0x56, 0x56,
- 0x94, 0x24, 0x09, 0x5e, 0x5b, 0x0f, 0x21, 0x65, 0x0f, 0x2f, 0x9c, 0xc4, 0x98, 0x93, 0x6f, 0x2c,
- 0xf3, 0x36, 0xa9, 0xac, 0x28, 0xe0, 0x9b, 0x52, 0x47, 0x3f, 0x82, 0x24, 0x7b, 0x03, 0x51, 0x2f,
- 0x3c, 0xc3, 0xbc, 0xb7, 0xb0, 0x91, 0xad, 0xac, 0x28, 0x89, 0x63, 0xff, 0x2d, 0xb0, 0x0b, 0x09,
- 0x5b, 0xef, 0x33, 0x73, 0x5e, 0x3f, 0x36, 0x17, 0x55, 0xae, 0xca, 0x8a, 0x12, 0xb7, 0x79, 0xe5,
- 0xff, 0x04, 0xd6, 0xf8, 0x1e, 0xc8, 0x99, 0xc1, 0x5c, 0xac, 0x32, 0x17, 0xef, 0x2f, 0xd5, 0x60,
- 0x55, 0x56, 0x94, 0x14, 0x19, 0xeb, 0x3f, 0x77, 0x21, 0x4e, 0x58, 0x02, 0xf3, 0xdf, 0x53, 0xb7,
- 0x96, 0xc8, 0x75, 0x8a, 0x6f, 0x82, 0x0e, 0x20, 0xcd, 0x47, 0xea, 0xb1, 0x4e, 0x5c, 0xcb, 0x19,
- 0x66, 0xd6, 0x58, 0x1d, 0x5e, 0xca, 0xc9, 0x1a, 0x37, 0xad, 0x70, 0x4b, 0xf4, 0x04, 0x36, 0x86,
- 0x9a, 0x63, 0xaa, 0xda, 0xc5, 0x15, 0x25, 0x19, 0x81, 0xb9, 0xbb, 0xb3, 0xf4, 0xa5, 0x56, 0xc4,
- 0xe1, 0xe4, 0x04, 0x41, 0xbb, 0x90, 0xf5, 0x7f, 0x41, 0xb4, 0x3c, 0xd7, 0xf6, 0x5c, 0xd5, 0xc1,
- 0xc4, 0xf2, 0x9c, 0x2e, 0xef, 0x99, 0x36, 0xd8, 0x5d, 0xfe, 0x3f, 0xae, 0x71, 0xc8, 0x14, 0x14,
- 0x5f, 0x4e, 0x9b, 0xa7, 0x0f, 0x21, 0xe3, 0x1b, 0xd3, 0x16, 0xc7, 0xb1, 0x0c, 0xd6, 0x1d, 0x10,
- 0x66, 0xba, 0xce, 0x4c, 0x6f, 0x72, 0x79, 0x89, 0x8b, 0x69, 0x9f, 0x40, 0xa8, 0x61, 0x11, 0xe2,
- 0x86, 0xd6, 0xc1, 0x06, 0xc9, 0xa0, 0x45, 0x5b, 0xa0, 0x6d, 0x49, 0x8d, 0xe9, 0xf2, 0x96, 0xc4,
- 0x37, 0x44, 0x0f, 0x01, 0x48, 0xf7, 0x18, 0xf7, 0x3c, 0x43, 0x37, 0xfb, 0x99, 0x1b, 0x8b, 0xda,
- 0x0c, 0x7a, 0xb0, 0x23, 0x75, 0x65, 0xcc, 0x14, 0xfd, 0x3f, 0x24, 0x69, 0x86, 0x65, 0xe5, 0xf9,
- 0x4d, 0x5e, 0x31, 0x4f, 0xac, 0x0e, 0x2d, 0xcd, 0xd9, 0x8f, 0x20, 0x35, 0xb6, 0xf4, 0xb5, 0xda,
- 0x06, 0x80, 0xa4, 0x3b, 0xb4, 0xd9, 0x15, 0x90, 0xf6, 0x60, 0x6d, 0x62, 0x79, 0xb4, 0x0d, 0x37,
- 0x07, 0xda, 0xf3, 0xa0, 0x4d, 0x24, 0xaa, 0x8d, 0x1d, 0xf5, 0xd8, 0xf2, 0x1c, 0xe6, 0x3a, 0xa6,
- 0xa0, 0x81, 0xf6, 0xdc, 0xef, 0x14, 0x49, 0x03, 0x3b, 0x15, 0xcb, 0x73, 0xa4, 0xcf, 0x42, 0x20,
- 0x36, 0xbd, 0xce, 0x40, 0x77, 0x59, 0x32, 0x3a, 0xf3, 0x30, 0x71, 0x17, 0xd5, 0x8e, 0x37, 0x21,
- 0xee, 0xe0, 0xbe, 0x6e, 0x99, 0x2c, 0xa9, 0x08, 0x8a, 0xff, 0x85, 0xf2, 0x10, 0xa1, 0xf7, 0x82,
- 0x27, 0xac, 0x77, 0xe6, 0xa7, 0x3d, 0xaa, 0x49, 0xd7, 0x71, 0xf8, 0x92, 0x74, 0x1d, 0x9e, 0xe0,
- 0x05, 0x7f, 0xa6, 0xda, 0x93, 0x7e, 0x0a, 0x6b, 0x0f, 0xf1, 0x7f, 0x21, 0xae, 0x19, 0xb5, 0xee,
- 0xf3, 0x30, 0xac, 0xd3, 0x2e, 0xfb, 0xc0, 0xea, 0x90, 0x6b, 0xaf, 0x10, 0x9f, 0x58, 0xe1, 0x2d,
- 0x10, 0x6c, 0xad, 0x8f, 0x55, 0xa2, 0x7f, 0xca, 0x31, 0x8b, 0x29, 0x49, 0x3a, 0xd1, 0xd4, 0x3f,
- 0xe5, 0x95, 0x98, 0x0a, 0x5d, 0xeb, 0x14, 0x07, 0xa1, 0x31, 0xf5, 0x16, 0x9d, 0xb8, 0xd4, 0xed,
- 0x45, 0x2f, 0x77, 0x7b, 0x18, 0x36, 0xe8, 0x06, 0x78, 0x97, 0x35, 0xd0, 0xdc, 0xee, 0x31, 0x76,
- 0x58, 0x1a, 0x4d, 0x17, 0x3e, 0x9a, 0xd3, 0x01, 0x4f, 0xee, 0x2d, 0xc8, 0x01, 0xf8, 0x11, 0x77,
- 0xa0, 0xac, 0x9f, 0x4c, 0x4e, 0xd0, 0xdd, 0x1d, 0xe9, 0x86, 0x8b, 0x1d, 0x96, 0x1d, 0x05, 0xc5,
- 0xff, 0x92, 0xee, 0xc1, 0xfa, 0x94, 0x6d, 0xf0, 0x1b, 0xf9, 0x0a, 0x2d, 0x82, 0xc5, 0x52, 0xab,
- 0xfa, 0x44, 0xf6, 0x8b, 0xec, 0x61, 0x5d, 0xf5, 0xbf, 0xc3, 0xd2, 0xdf, 0x42, 0x20, 0xb6, 0xed,
- 0x9e, 0xe6, 0xe2, 0x57, 0xc1, 0x30, 0x3c, 0x03, 0xc3, 0xc8, 0x18, 0x86, 0x01, 0xe5, 0xa2, 0x4b,
- 0x53, 0x6e, 0x17, 0x52, 0x1e, 0x0b, 0x89, 0xfd, 0x87, 0xc4, 0xaf, 0x45, 0x97, 0xfb, 0xd4, 0x7d,
- 0x1d, 0x1b, 0xbd, 0x47, 0x1a, 0x39, 0x55, 0x80, 0xab, 0xd3, 0xb1, 0x34, 0x00, 0xf1, 0xe2, 0x50,
- 0x89, 0x6d, 0x99, 0x04, 0xa3, 0x6d, 0x88, 0x9e, 0x58, 0x1d, 0xfe, 0x13, 0xd4, 0xc2, 0x10, 0x98,
- 0x2a, 0xba, 0x0d, 0xeb, 0x26, 0x7e, 0xee, 0xaa, 0x63, 0xac, 0xe0, 0x9b, 0x5d, 0xa3, 0xd3, 0x8d,
- 0x80, 0x19, 0xd2, 0x33, 0x10, 0x4b, 0x9a, 0xd9, 0xc5, 0xc6, 0xff, 0xec, 0x0a, 0x3c, 0x03, 0xb1,
- 0x8c, 0x0d, 0xfc, 0x6a, 0x00, 0x2d, 0xb3, 0x42, 0xe1, 0x8f, 0x09, 0x96, 0xa4, 0xfc, 0x44, 0x6d,
- 0x60, 0x07, 0x7d, 0x16, 0x02, 0x61, 0x94, 0x71, 0xd0, 0xdd, 0x39, 0xe5, 0x73, 0x2a, 0x2d, 0x65,
- 0xe7, 0x1f, 0xae, 0x54, 0xfc, 0xd9, 0xdf, 0xff, 0xf9, 0xeb, 0xf0, 0xae, 0x74, 0x2f, 0x7f, 0xbe,
- 0x9d, 0xf7, 0x03, 0x26, 0xf9, 0x17, 0x17, 0x9b, 0x79, 0x99, 0xe7, 0xb1, 0x92, 0xfc, 0x0b, 0x3e,
- 0x78, 0xc9, 0xfe, 0x3b, 0xb7, 0x43, 0xd8, 0x42, 0x3b, 0xa1, 0xbb, 0xe8, 0x57, 0x21, 0x88, 0xf3,
- 0x94, 0x83, 0xe6, 0xe4, 0xfc, 0x89, 0xa4, 0xb4, 0x28, 0xaa, 0x8f, 0x59, 0x54, 0x3b, 0xe8, 0xfe,
- 0x35, 0xa3, 0xca, 0xbf, 0xe0, 0xc7, 0xf9, 0x12, 0xfd, 0x26, 0x04, 0xc9, 0x80, 0x76, 0xe8, 0xce,
- 0xd2, 0xf7, 0x3d, 0x7b, 0x77, 0x19, 0x55, 0xce, 0x62, 0xe9, 0x43, 0x16, 0xe5, 0x36, 0xca, 0x5f,
- 0x33, 0x4a, 0xf4, 0xdb, 0x10, 0x08, 0xa3, 0x3b, 0x3e, 0x0f, 0xcd, 0xe9, 0x44, 0xb0, 0xe8, 0xdc,
- 0x64, 0x16, 0xd1, 0x83, 0xc2, 0x2b, 0x9f, 0xdb, 0x0e, 0xbb, 0xef, 0xbf, 0x0b, 0x81, 0x30, 0xba,
- 0x44, 0xf3, 0xe2, 0x9b, 0xbe, 0x69, 0x8b, 0xe2, 0x3b, 0x60, 0xf1, 0x95, 0xa5, 0x07, 0xaf, 0x1c,
- 0x5f, 0x97, 0xad, 0x48, 0x69, 0xf7, 0xcb, 0x10, 0x08, 0xa3, 0x7b, 0x38, 0x2f, 0xc8, 0xe9, 0xcb,
- 0x9a, 0x7d, 0xf3, 0x52, 0xe6, 0x92, 0x07, 0xb6, 0x3b, 0x0c, 0x58, 0x77, 0xf7, 0x95, 0x4f, 0x6f,
- 0x6f, 0x00, 0x6f, 0x77, 0xad, 0xc1, 0xcc, 0x50, 0xf6, 0x04, 0xca, 0x9f, 0x06, 0x5d, 0xb5, 0x11,
- 0xfa, 0xc9, 0xc7, 0xbe, 0x5a, 0xdf, 0x32, 0x34, 0xb3, 0x9f, 0xb3, 0x9c, 0x7e, 0xbe, 0x8f, 0x4d,
- 0x16, 0x53, 0x9e, 0x8b, 0x34, 0x5b, 0x27, 0x97, 0xff, 0x3b, 0xbe, 0x1b, 0x8c, 0x3b, 0x71, 0xa6,
- 0xfc, 0xc1, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa1, 0x58, 0x75, 0xc0, 0x49, 0x1f, 0x00, 0x00,
- }
|