You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 

3120 lines
111 KiB

  1. // Code generated by protoc-gen-go. DO NOT EDIT.
  2. // source: google/cloud/dataproc/v1/jobs.proto
  3. package dataproc // import "google.golang.org/genproto/googleapis/cloud/dataproc/v1"
  4. import proto "github.com/golang/protobuf/proto"
  5. import fmt "fmt"
  6. import math "math"
  7. import empty "github.com/golang/protobuf/ptypes/empty"
  8. import timestamp "github.com/golang/protobuf/ptypes/timestamp"
  9. import _ "google.golang.org/genproto/googleapis/api/annotations"
  10. import field_mask "google.golang.org/genproto/protobuf/field_mask"
  11. import (
  12. context "golang.org/x/net/context"
  13. grpc "google.golang.org/grpc"
  14. )
  15. // Reference imports to suppress errors if they are not otherwise used.
  16. var _ = proto.Marshal
  17. var _ = fmt.Errorf
  18. var _ = math.Inf
  19. // This is a compile-time assertion to ensure that this generated file
  20. // is compatible with the proto package it is being compiled against.
  21. // A compilation error at this line likely means your copy of the
  22. // proto package needs to be updated.
  23. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
  24. // The Log4j level for job execution. When running an
  25. // [Apache Hive](http://hive.apache.org/) job, Cloud
  26. // Dataproc configures the Hive client to an equivalent verbosity level.
  27. type LoggingConfig_Level int32
  28. const (
  29. // Level is unspecified. Use default level for log4j.
  30. LoggingConfig_LEVEL_UNSPECIFIED LoggingConfig_Level = 0
  31. // Use ALL level for log4j.
  32. LoggingConfig_ALL LoggingConfig_Level = 1
  33. // Use TRACE level for log4j.
  34. LoggingConfig_TRACE LoggingConfig_Level = 2
  35. // Use DEBUG level for log4j.
  36. LoggingConfig_DEBUG LoggingConfig_Level = 3
  37. // Use INFO level for log4j.
  38. LoggingConfig_INFO LoggingConfig_Level = 4
  39. // Use WARN level for log4j.
  40. LoggingConfig_WARN LoggingConfig_Level = 5
  41. // Use ERROR level for log4j.
  42. LoggingConfig_ERROR LoggingConfig_Level = 6
  43. // Use FATAL level for log4j.
  44. LoggingConfig_FATAL LoggingConfig_Level = 7
  45. // Turn off log4j.
  46. LoggingConfig_OFF LoggingConfig_Level = 8
  47. )
  48. var LoggingConfig_Level_name = map[int32]string{
  49. 0: "LEVEL_UNSPECIFIED",
  50. 1: "ALL",
  51. 2: "TRACE",
  52. 3: "DEBUG",
  53. 4: "INFO",
  54. 5: "WARN",
  55. 6: "ERROR",
  56. 7: "FATAL",
  57. 8: "OFF",
  58. }
  59. var LoggingConfig_Level_value = map[string]int32{
  60. "LEVEL_UNSPECIFIED": 0,
  61. "ALL": 1,
  62. "TRACE": 2,
  63. "DEBUG": 3,
  64. "INFO": 4,
  65. "WARN": 5,
  66. "ERROR": 6,
  67. "FATAL": 7,
  68. "OFF": 8,
  69. }
  70. func (x LoggingConfig_Level) String() string {
  71. return proto.EnumName(LoggingConfig_Level_name, int32(x))
  72. }
  73. func (LoggingConfig_Level) EnumDescriptor() ([]byte, []int) {
  74. return fileDescriptor_jobs_695072de36c8e540, []int{0, 0}
  75. }
  76. // The job state.
  77. type JobStatus_State int32
  78. const (
  79. // The job state is unknown.
  80. JobStatus_STATE_UNSPECIFIED JobStatus_State = 0
  81. // The job is pending; it has been submitted, but is not yet running.
  82. JobStatus_PENDING JobStatus_State = 1
  83. // Job has been received by the service and completed initial setup;
  84. // it will soon be submitted to the cluster.
  85. JobStatus_SETUP_DONE JobStatus_State = 8
  86. // The job is running on the cluster.
  87. JobStatus_RUNNING JobStatus_State = 2
  88. // A CancelJob request has been received, but is pending.
  89. JobStatus_CANCEL_PENDING JobStatus_State = 3
  90. // Transient in-flight resources have been canceled, and the request to
  91. // cancel the running job has been issued to the cluster.
  92. JobStatus_CANCEL_STARTED JobStatus_State = 7
  93. // The job cancellation was successful.
  94. JobStatus_CANCELLED JobStatus_State = 4
  95. // The job has completed successfully.
  96. JobStatus_DONE JobStatus_State = 5
  97. // The job has completed, but encountered an error.
  98. JobStatus_ERROR JobStatus_State = 6
  99. // Job attempt has failed. The detail field contains failure details for
  100. // this attempt.
  101. //
  102. // Applies to restartable jobs only.
  103. JobStatus_ATTEMPT_FAILURE JobStatus_State = 9
  104. )
  105. var JobStatus_State_name = map[int32]string{
  106. 0: "STATE_UNSPECIFIED",
  107. 1: "PENDING",
  108. 8: "SETUP_DONE",
  109. 2: "RUNNING",
  110. 3: "CANCEL_PENDING",
  111. 7: "CANCEL_STARTED",
  112. 4: "CANCELLED",
  113. 5: "DONE",
  114. 6: "ERROR",
  115. 9: "ATTEMPT_FAILURE",
  116. }
  117. var JobStatus_State_value = map[string]int32{
  118. "STATE_UNSPECIFIED": 0,
  119. "PENDING": 1,
  120. "SETUP_DONE": 8,
  121. "RUNNING": 2,
  122. "CANCEL_PENDING": 3,
  123. "CANCEL_STARTED": 7,
  124. "CANCELLED": 4,
  125. "DONE": 5,
  126. "ERROR": 6,
  127. "ATTEMPT_FAILURE": 9,
  128. }
  129. func (x JobStatus_State) String() string {
  130. return proto.EnumName(JobStatus_State_name, int32(x))
  131. }
  132. func (JobStatus_State) EnumDescriptor() ([]byte, []int) {
  133. return fileDescriptor_jobs_695072de36c8e540, []int{9, 0}
  134. }
  135. // The job substate.
  136. type JobStatus_Substate int32
  137. const (
  138. // The job substate is unknown.
  139. JobStatus_UNSPECIFIED JobStatus_Substate = 0
  140. // The Job is submitted to the agent.
  141. //
  142. // Applies to RUNNING state.
  143. JobStatus_SUBMITTED JobStatus_Substate = 1
  144. // The Job has been received and is awaiting execution (it may be waiting
  145. // for a condition to be met). See the "details" field for the reason for
  146. // the delay.
  147. //
  148. // Applies to RUNNING state.
  149. JobStatus_QUEUED JobStatus_Substate = 2
  150. // The agent-reported status is out of date, which may be caused by a
  151. // loss of communication between the agent and Cloud Dataproc. If the
  152. // agent does not send a timely update, the job will fail.
  153. //
  154. // Applies to RUNNING state.
  155. JobStatus_STALE_STATUS JobStatus_Substate = 3
  156. )
  157. var JobStatus_Substate_name = map[int32]string{
  158. 0: "UNSPECIFIED",
  159. 1: "SUBMITTED",
  160. 2: "QUEUED",
  161. 3: "STALE_STATUS",
  162. }
  163. var JobStatus_Substate_value = map[string]int32{
  164. "UNSPECIFIED": 0,
  165. "SUBMITTED": 1,
  166. "QUEUED": 2,
  167. "STALE_STATUS": 3,
  168. }
  169. func (x JobStatus_Substate) String() string {
  170. return proto.EnumName(JobStatus_Substate_name, int32(x))
  171. }
  172. func (JobStatus_Substate) EnumDescriptor() ([]byte, []int) {
  173. return fileDescriptor_jobs_695072de36c8e540, []int{9, 1}
  174. }
  175. // The application state, corresponding to
  176. // <code>YarnProtos.YarnApplicationStateProto</code>.
  177. type YarnApplication_State int32
  178. const (
  179. // Status is unspecified.
  180. YarnApplication_STATE_UNSPECIFIED YarnApplication_State = 0
  181. // Status is NEW.
  182. YarnApplication_NEW YarnApplication_State = 1
  183. // Status is NEW_SAVING.
  184. YarnApplication_NEW_SAVING YarnApplication_State = 2
  185. // Status is SUBMITTED.
  186. YarnApplication_SUBMITTED YarnApplication_State = 3
  187. // Status is ACCEPTED.
  188. YarnApplication_ACCEPTED YarnApplication_State = 4
  189. // Status is RUNNING.
  190. YarnApplication_RUNNING YarnApplication_State = 5
  191. // Status is FINISHED.
  192. YarnApplication_FINISHED YarnApplication_State = 6
  193. // Status is FAILED.
  194. YarnApplication_FAILED YarnApplication_State = 7
  195. // Status is KILLED.
  196. YarnApplication_KILLED YarnApplication_State = 8
  197. )
  198. var YarnApplication_State_name = map[int32]string{
  199. 0: "STATE_UNSPECIFIED",
  200. 1: "NEW",
  201. 2: "NEW_SAVING",
  202. 3: "SUBMITTED",
  203. 4: "ACCEPTED",
  204. 5: "RUNNING",
  205. 6: "FINISHED",
  206. 7: "FAILED",
  207. 8: "KILLED",
  208. }
  209. var YarnApplication_State_value = map[string]int32{
  210. "STATE_UNSPECIFIED": 0,
  211. "NEW": 1,
  212. "NEW_SAVING": 2,
  213. "SUBMITTED": 3,
  214. "ACCEPTED": 4,
  215. "RUNNING": 5,
  216. "FINISHED": 6,
  217. "FAILED": 7,
  218. "KILLED": 8,
  219. }
  220. func (x YarnApplication_State) String() string {
  221. return proto.EnumName(YarnApplication_State_name, int32(x))
  222. }
  223. func (YarnApplication_State) EnumDescriptor() ([]byte, []int) {
  224. return fileDescriptor_jobs_695072de36c8e540, []int{11, 0}
  225. }
  226. // A matcher that specifies categories of job states.
  227. type ListJobsRequest_JobStateMatcher int32
  228. const (
  229. // Match all jobs, regardless of state.
  230. ListJobsRequest_ALL ListJobsRequest_JobStateMatcher = 0
  231. // Only match jobs in non-terminal states: PENDING, RUNNING, or
  232. // CANCEL_PENDING.
  233. ListJobsRequest_ACTIVE ListJobsRequest_JobStateMatcher = 1
  234. // Only match jobs in terminal states: CANCELLED, DONE, or ERROR.
  235. ListJobsRequest_NON_ACTIVE ListJobsRequest_JobStateMatcher = 2
  236. )
  237. var ListJobsRequest_JobStateMatcher_name = map[int32]string{
  238. 0: "ALL",
  239. 1: "ACTIVE",
  240. 2: "NON_ACTIVE",
  241. }
  242. var ListJobsRequest_JobStateMatcher_value = map[string]int32{
  243. "ALL": 0,
  244. "ACTIVE": 1,
  245. "NON_ACTIVE": 2,
  246. }
  247. func (x ListJobsRequest_JobStateMatcher) String() string {
  248. return proto.EnumName(ListJobsRequest_JobStateMatcher_name, int32(x))
  249. }
  250. func (ListJobsRequest_JobStateMatcher) EnumDescriptor() ([]byte, []int) {
  251. return fileDescriptor_jobs_695072de36c8e540, []int{16, 0}
  252. }
  253. // The runtime logging config of the job.
  254. type LoggingConfig struct {
  255. // The per-package log levels for the driver. This may include
  256. // "root" package name to configure rootLogger.
  257. // Examples:
  258. // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
  259. DriverLogLevels map[string]LoggingConfig_Level `protobuf:"bytes,2,rep,name=driver_log_levels,json=driverLogLevels,proto3" json:"driver_log_levels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3,enum=google.cloud.dataproc.v1.LoggingConfig_Level"`
  260. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  261. XXX_unrecognized []byte `json:"-"`
  262. XXX_sizecache int32 `json:"-"`
  263. }
  264. func (m *LoggingConfig) Reset() { *m = LoggingConfig{} }
  265. func (m *LoggingConfig) String() string { return proto.CompactTextString(m) }
  266. func (*LoggingConfig) ProtoMessage() {}
  267. func (*LoggingConfig) Descriptor() ([]byte, []int) {
  268. return fileDescriptor_jobs_695072de36c8e540, []int{0}
  269. }
  270. func (m *LoggingConfig) XXX_Unmarshal(b []byte) error {
  271. return xxx_messageInfo_LoggingConfig.Unmarshal(m, b)
  272. }
  273. func (m *LoggingConfig) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  274. return xxx_messageInfo_LoggingConfig.Marshal(b, m, deterministic)
  275. }
  276. func (dst *LoggingConfig) XXX_Merge(src proto.Message) {
  277. xxx_messageInfo_LoggingConfig.Merge(dst, src)
  278. }
  279. func (m *LoggingConfig) XXX_Size() int {
  280. return xxx_messageInfo_LoggingConfig.Size(m)
  281. }
  282. func (m *LoggingConfig) XXX_DiscardUnknown() {
  283. xxx_messageInfo_LoggingConfig.DiscardUnknown(m)
  284. }
  285. var xxx_messageInfo_LoggingConfig proto.InternalMessageInfo
  286. func (m *LoggingConfig) GetDriverLogLevels() map[string]LoggingConfig_Level {
  287. if m != nil {
  288. return m.DriverLogLevels
  289. }
  290. return nil
  291. }
  292. // A Cloud Dataproc job for running
  293. // [Apache Hadoop
  294. // MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html)
  295. // jobs on [Apache Hadoop
  296. // YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html).
  297. type HadoopJob struct {
  298. // Required. Indicates the location of the driver's main class. Specify
  299. // either the jar file that contains the main class or the main class name.
  300. // To specify both, add the jar file to `jar_file_uris`, and then specify
  301. // the main class name in this property.
  302. //
  303. // Types that are valid to be assigned to Driver:
  304. // *HadoopJob_MainJarFileUri
  305. // *HadoopJob_MainClass
  306. Driver isHadoopJob_Driver `protobuf_oneof:"driver"`
  307. // Optional. The arguments to pass to the driver. Do not
  308. // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as
  309. // job properties, since a collision may occur that causes an incorrect job
  310. // submission.
  311. Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
  312. // Optional. Jar file URIs to add to the CLASSPATHs of the
  313. // Hadoop driver and tasks.
  314. JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  315. // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied
  316. // to the working directory of Hadoop drivers and distributed tasks. Useful
  317. // for naively parallel tasks.
  318. FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
  319. // Optional. HCFS URIs of archives to be extracted in the working directory of
  320. // Hadoop drivers and tasks. Supported file types:
  321. // .jar, .tar, .tar.gz, .tgz, or .zip.
  322. ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
  323. // Optional. A mapping of property names to values, used to configure Hadoop.
  324. // Properties that conflict with values set by the Cloud Dataproc API may be
  325. // overwritten. Can include properties set in /etc/hadoop/conf/*-site and
  326. // classes in user code.
  327. Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  328. // Optional. The runtime log config for job execution.
  329. LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
  330. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  331. XXX_unrecognized []byte `json:"-"`
  332. XXX_sizecache int32 `json:"-"`
  333. }
  334. func (m *HadoopJob) Reset() { *m = HadoopJob{} }
  335. func (m *HadoopJob) String() string { return proto.CompactTextString(m) }
  336. func (*HadoopJob) ProtoMessage() {}
  337. func (*HadoopJob) Descriptor() ([]byte, []int) {
  338. return fileDescriptor_jobs_695072de36c8e540, []int{1}
  339. }
  340. func (m *HadoopJob) XXX_Unmarshal(b []byte) error {
  341. return xxx_messageInfo_HadoopJob.Unmarshal(m, b)
  342. }
  343. func (m *HadoopJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  344. return xxx_messageInfo_HadoopJob.Marshal(b, m, deterministic)
  345. }
  346. func (dst *HadoopJob) XXX_Merge(src proto.Message) {
  347. xxx_messageInfo_HadoopJob.Merge(dst, src)
  348. }
  349. func (m *HadoopJob) XXX_Size() int {
  350. return xxx_messageInfo_HadoopJob.Size(m)
  351. }
  352. func (m *HadoopJob) XXX_DiscardUnknown() {
  353. xxx_messageInfo_HadoopJob.DiscardUnknown(m)
  354. }
  355. var xxx_messageInfo_HadoopJob proto.InternalMessageInfo
  356. type isHadoopJob_Driver interface {
  357. isHadoopJob_Driver()
  358. }
  359. type HadoopJob_MainJarFileUri struct {
  360. MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
  361. }
  362. type HadoopJob_MainClass struct {
  363. MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
  364. }
  365. func (*HadoopJob_MainJarFileUri) isHadoopJob_Driver() {}
  366. func (*HadoopJob_MainClass) isHadoopJob_Driver() {}
  367. func (m *HadoopJob) GetDriver() isHadoopJob_Driver {
  368. if m != nil {
  369. return m.Driver
  370. }
  371. return nil
  372. }
  373. func (m *HadoopJob) GetMainJarFileUri() string {
  374. if x, ok := m.GetDriver().(*HadoopJob_MainJarFileUri); ok {
  375. return x.MainJarFileUri
  376. }
  377. return ""
  378. }
  379. func (m *HadoopJob) GetMainClass() string {
  380. if x, ok := m.GetDriver().(*HadoopJob_MainClass); ok {
  381. return x.MainClass
  382. }
  383. return ""
  384. }
  385. func (m *HadoopJob) GetArgs() []string {
  386. if m != nil {
  387. return m.Args
  388. }
  389. return nil
  390. }
  391. func (m *HadoopJob) GetJarFileUris() []string {
  392. if m != nil {
  393. return m.JarFileUris
  394. }
  395. return nil
  396. }
  397. func (m *HadoopJob) GetFileUris() []string {
  398. if m != nil {
  399. return m.FileUris
  400. }
  401. return nil
  402. }
  403. func (m *HadoopJob) GetArchiveUris() []string {
  404. if m != nil {
  405. return m.ArchiveUris
  406. }
  407. return nil
  408. }
  409. func (m *HadoopJob) GetProperties() map[string]string {
  410. if m != nil {
  411. return m.Properties
  412. }
  413. return nil
  414. }
  415. func (m *HadoopJob) GetLoggingConfig() *LoggingConfig {
  416. if m != nil {
  417. return m.LoggingConfig
  418. }
  419. return nil
  420. }
  421. // XXX_OneofFuncs is for the internal use of the proto package.
  422. func (*HadoopJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  423. return _HadoopJob_OneofMarshaler, _HadoopJob_OneofUnmarshaler, _HadoopJob_OneofSizer, []interface{}{
  424. (*HadoopJob_MainJarFileUri)(nil),
  425. (*HadoopJob_MainClass)(nil),
  426. }
  427. }
  428. func _HadoopJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  429. m := msg.(*HadoopJob)
  430. // driver
  431. switch x := m.Driver.(type) {
  432. case *HadoopJob_MainJarFileUri:
  433. b.EncodeVarint(1<<3 | proto.WireBytes)
  434. b.EncodeStringBytes(x.MainJarFileUri)
  435. case *HadoopJob_MainClass:
  436. b.EncodeVarint(2<<3 | proto.WireBytes)
  437. b.EncodeStringBytes(x.MainClass)
  438. case nil:
  439. default:
  440. return fmt.Errorf("HadoopJob.Driver has unexpected type %T", x)
  441. }
  442. return nil
  443. }
  444. func _HadoopJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  445. m := msg.(*HadoopJob)
  446. switch tag {
  447. case 1: // driver.main_jar_file_uri
  448. if wire != proto.WireBytes {
  449. return true, proto.ErrInternalBadWireType
  450. }
  451. x, err := b.DecodeStringBytes()
  452. m.Driver = &HadoopJob_MainJarFileUri{x}
  453. return true, err
  454. case 2: // driver.main_class
  455. if wire != proto.WireBytes {
  456. return true, proto.ErrInternalBadWireType
  457. }
  458. x, err := b.DecodeStringBytes()
  459. m.Driver = &HadoopJob_MainClass{x}
  460. return true, err
  461. default:
  462. return false, nil
  463. }
  464. }
  465. func _HadoopJob_OneofSizer(msg proto.Message) (n int) {
  466. m := msg.(*HadoopJob)
  467. // driver
  468. switch x := m.Driver.(type) {
  469. case *HadoopJob_MainJarFileUri:
  470. n += 1 // tag and wire
  471. n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
  472. n += len(x.MainJarFileUri)
  473. case *HadoopJob_MainClass:
  474. n += 1 // tag and wire
  475. n += proto.SizeVarint(uint64(len(x.MainClass)))
  476. n += len(x.MainClass)
  477. case nil:
  478. default:
  479. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  480. }
  481. return n
  482. }
  483. // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/)
  484. // applications on YARN.
  485. type SparkJob struct {
  486. // Required. The specification of the main method to call to drive the job.
  487. // Specify either the jar file that contains the main class or the main class
  488. // name. To pass both a main jar and a main class in that jar, add the jar to
  489. // `CommonJob.jar_file_uris`, and then specify the main class name in
  490. // `main_class`.
  491. //
  492. // Types that are valid to be assigned to Driver:
  493. // *SparkJob_MainJarFileUri
  494. // *SparkJob_MainClass
  495. Driver isSparkJob_Driver `protobuf_oneof:"driver"`
  496. // Optional. The arguments to pass to the driver. Do not include arguments,
  497. // such as `--conf`, that can be set as job properties, since a collision may
  498. // occur that causes an incorrect job submission.
  499. Args []string `protobuf:"bytes,3,rep,name=args,proto3" json:"args,omitempty"`
  500. // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
  501. // Spark driver and tasks.
  502. JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  503. // Optional. HCFS URIs of files to be copied to the working directory of
  504. // Spark drivers and distributed tasks. Useful for naively parallel tasks.
  505. FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
  506. // Optional. HCFS URIs of archives to be extracted in the working directory
  507. // of Spark drivers and tasks. Supported file types:
  508. // .jar, .tar, .tar.gz, .tgz, and .zip.
  509. ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
  510. // Optional. A mapping of property names to values, used to configure Spark.
  511. // Properties that conflict with values set by the Cloud Dataproc API may be
  512. // overwritten. Can include properties set in
  513. // /etc/spark/conf/spark-defaults.conf and classes in user code.
  514. Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  515. // Optional. The runtime log config for job execution.
  516. LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
  517. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  518. XXX_unrecognized []byte `json:"-"`
  519. XXX_sizecache int32 `json:"-"`
  520. }
  521. func (m *SparkJob) Reset() { *m = SparkJob{} }
  522. func (m *SparkJob) String() string { return proto.CompactTextString(m) }
  523. func (*SparkJob) ProtoMessage() {}
  524. func (*SparkJob) Descriptor() ([]byte, []int) {
  525. return fileDescriptor_jobs_695072de36c8e540, []int{2}
  526. }
  527. func (m *SparkJob) XXX_Unmarshal(b []byte) error {
  528. return xxx_messageInfo_SparkJob.Unmarshal(m, b)
  529. }
  530. func (m *SparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  531. return xxx_messageInfo_SparkJob.Marshal(b, m, deterministic)
  532. }
  533. func (dst *SparkJob) XXX_Merge(src proto.Message) {
  534. xxx_messageInfo_SparkJob.Merge(dst, src)
  535. }
  536. func (m *SparkJob) XXX_Size() int {
  537. return xxx_messageInfo_SparkJob.Size(m)
  538. }
  539. func (m *SparkJob) XXX_DiscardUnknown() {
  540. xxx_messageInfo_SparkJob.DiscardUnknown(m)
  541. }
  542. var xxx_messageInfo_SparkJob proto.InternalMessageInfo
  543. type isSparkJob_Driver interface {
  544. isSparkJob_Driver()
  545. }
  546. type SparkJob_MainJarFileUri struct {
  547. MainJarFileUri string `protobuf:"bytes,1,opt,name=main_jar_file_uri,json=mainJarFileUri,proto3,oneof"`
  548. }
  549. type SparkJob_MainClass struct {
  550. MainClass string `protobuf:"bytes,2,opt,name=main_class,json=mainClass,proto3,oneof"`
  551. }
  552. func (*SparkJob_MainJarFileUri) isSparkJob_Driver() {}
  553. func (*SparkJob_MainClass) isSparkJob_Driver() {}
  554. func (m *SparkJob) GetDriver() isSparkJob_Driver {
  555. if m != nil {
  556. return m.Driver
  557. }
  558. return nil
  559. }
  560. func (m *SparkJob) GetMainJarFileUri() string {
  561. if x, ok := m.GetDriver().(*SparkJob_MainJarFileUri); ok {
  562. return x.MainJarFileUri
  563. }
  564. return ""
  565. }
  566. func (m *SparkJob) GetMainClass() string {
  567. if x, ok := m.GetDriver().(*SparkJob_MainClass); ok {
  568. return x.MainClass
  569. }
  570. return ""
  571. }
  572. func (m *SparkJob) GetArgs() []string {
  573. if m != nil {
  574. return m.Args
  575. }
  576. return nil
  577. }
  578. func (m *SparkJob) GetJarFileUris() []string {
  579. if m != nil {
  580. return m.JarFileUris
  581. }
  582. return nil
  583. }
  584. func (m *SparkJob) GetFileUris() []string {
  585. if m != nil {
  586. return m.FileUris
  587. }
  588. return nil
  589. }
  590. func (m *SparkJob) GetArchiveUris() []string {
  591. if m != nil {
  592. return m.ArchiveUris
  593. }
  594. return nil
  595. }
  596. func (m *SparkJob) GetProperties() map[string]string {
  597. if m != nil {
  598. return m.Properties
  599. }
  600. return nil
  601. }
  602. func (m *SparkJob) GetLoggingConfig() *LoggingConfig {
  603. if m != nil {
  604. return m.LoggingConfig
  605. }
  606. return nil
  607. }
  608. // XXX_OneofFuncs is for the internal use of the proto package.
  609. func (*SparkJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  610. return _SparkJob_OneofMarshaler, _SparkJob_OneofUnmarshaler, _SparkJob_OneofSizer, []interface{}{
  611. (*SparkJob_MainJarFileUri)(nil),
  612. (*SparkJob_MainClass)(nil),
  613. }
  614. }
  615. func _SparkJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  616. m := msg.(*SparkJob)
  617. // driver
  618. switch x := m.Driver.(type) {
  619. case *SparkJob_MainJarFileUri:
  620. b.EncodeVarint(1<<3 | proto.WireBytes)
  621. b.EncodeStringBytes(x.MainJarFileUri)
  622. case *SparkJob_MainClass:
  623. b.EncodeVarint(2<<3 | proto.WireBytes)
  624. b.EncodeStringBytes(x.MainClass)
  625. case nil:
  626. default:
  627. return fmt.Errorf("SparkJob.Driver has unexpected type %T", x)
  628. }
  629. return nil
  630. }
  631. func _SparkJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  632. m := msg.(*SparkJob)
  633. switch tag {
  634. case 1: // driver.main_jar_file_uri
  635. if wire != proto.WireBytes {
  636. return true, proto.ErrInternalBadWireType
  637. }
  638. x, err := b.DecodeStringBytes()
  639. m.Driver = &SparkJob_MainJarFileUri{x}
  640. return true, err
  641. case 2: // driver.main_class
  642. if wire != proto.WireBytes {
  643. return true, proto.ErrInternalBadWireType
  644. }
  645. x, err := b.DecodeStringBytes()
  646. m.Driver = &SparkJob_MainClass{x}
  647. return true, err
  648. default:
  649. return false, nil
  650. }
  651. }
  652. func _SparkJob_OneofSizer(msg proto.Message) (n int) {
  653. m := msg.(*SparkJob)
  654. // driver
  655. switch x := m.Driver.(type) {
  656. case *SparkJob_MainJarFileUri:
  657. n += 1 // tag and wire
  658. n += proto.SizeVarint(uint64(len(x.MainJarFileUri)))
  659. n += len(x.MainJarFileUri)
  660. case *SparkJob_MainClass:
  661. n += 1 // tag and wire
  662. n += proto.SizeVarint(uint64(len(x.MainClass)))
  663. n += len(x.MainClass)
  664. case nil:
  665. default:
  666. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  667. }
  668. return n
  669. }
  670. // A Cloud Dataproc job for running
  671. // [Apache
  672. // PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html)
  673. // applications on YARN.
  674. type PySparkJob struct {
  675. // Required. The HCFS URI of the main Python file to use as the driver. Must
  676. // be a .py file.
  677. MainPythonFileUri string `protobuf:"bytes,1,opt,name=main_python_file_uri,json=mainPythonFileUri,proto3" json:"main_python_file_uri,omitempty"`
  678. // Optional. The arguments to pass to the driver. Do not include arguments,
  679. // such as `--conf`, that can be set as job properties, since a collision may
  680. // occur that causes an incorrect job submission.
  681. Args []string `protobuf:"bytes,2,rep,name=args,proto3" json:"args,omitempty"`
  682. // Optional. HCFS file URIs of Python files to pass to the PySpark
  683. // framework. Supported file types: .py, .egg, and .zip.
  684. PythonFileUris []string `protobuf:"bytes,3,rep,name=python_file_uris,json=pythonFileUris,proto3" json:"python_file_uris,omitempty"`
  685. // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the
  686. // Python driver and tasks.
  687. JarFileUris []string `protobuf:"bytes,4,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  688. // Optional. HCFS URIs of files to be copied to the working directory of
  689. // Python drivers and distributed tasks. Useful for naively parallel tasks.
  690. FileUris []string `protobuf:"bytes,5,rep,name=file_uris,json=fileUris,proto3" json:"file_uris,omitempty"`
  691. // Optional. HCFS URIs of archives to be extracted in the working directory of
  692. // .jar, .tar, .tar.gz, .tgz, and .zip.
  693. ArchiveUris []string `protobuf:"bytes,6,rep,name=archive_uris,json=archiveUris,proto3" json:"archive_uris,omitempty"`
  694. // Optional. A mapping of property names to values, used to configure PySpark.
  695. // Properties that conflict with values set by the Cloud Dataproc API may be
  696. // overwritten. Can include properties set in
  697. // /etc/spark/conf/spark-defaults.conf and classes in user code.
  698. Properties map[string]string `protobuf:"bytes,7,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  699. // Optional. The runtime log config for job execution.
  700. LoggingConfig *LoggingConfig `protobuf:"bytes,8,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
  701. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  702. XXX_unrecognized []byte `json:"-"`
  703. XXX_sizecache int32 `json:"-"`
  704. }
  705. func (m *PySparkJob) Reset() { *m = PySparkJob{} }
  706. func (m *PySparkJob) String() string { return proto.CompactTextString(m) }
  707. func (*PySparkJob) ProtoMessage() {}
  708. func (*PySparkJob) Descriptor() ([]byte, []int) {
  709. return fileDescriptor_jobs_695072de36c8e540, []int{3}
  710. }
  711. func (m *PySparkJob) XXX_Unmarshal(b []byte) error {
  712. return xxx_messageInfo_PySparkJob.Unmarshal(m, b)
  713. }
  714. func (m *PySparkJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  715. return xxx_messageInfo_PySparkJob.Marshal(b, m, deterministic)
  716. }
  717. func (dst *PySparkJob) XXX_Merge(src proto.Message) {
  718. xxx_messageInfo_PySparkJob.Merge(dst, src)
  719. }
  720. func (m *PySparkJob) XXX_Size() int {
  721. return xxx_messageInfo_PySparkJob.Size(m)
  722. }
  723. func (m *PySparkJob) XXX_DiscardUnknown() {
  724. xxx_messageInfo_PySparkJob.DiscardUnknown(m)
  725. }
  726. var xxx_messageInfo_PySparkJob proto.InternalMessageInfo
  727. func (m *PySparkJob) GetMainPythonFileUri() string {
  728. if m != nil {
  729. return m.MainPythonFileUri
  730. }
  731. return ""
  732. }
  733. func (m *PySparkJob) GetArgs() []string {
  734. if m != nil {
  735. return m.Args
  736. }
  737. return nil
  738. }
  739. func (m *PySparkJob) GetPythonFileUris() []string {
  740. if m != nil {
  741. return m.PythonFileUris
  742. }
  743. return nil
  744. }
  745. func (m *PySparkJob) GetJarFileUris() []string {
  746. if m != nil {
  747. return m.JarFileUris
  748. }
  749. return nil
  750. }
  751. func (m *PySparkJob) GetFileUris() []string {
  752. if m != nil {
  753. return m.FileUris
  754. }
  755. return nil
  756. }
  757. func (m *PySparkJob) GetArchiveUris() []string {
  758. if m != nil {
  759. return m.ArchiveUris
  760. }
  761. return nil
  762. }
  763. func (m *PySparkJob) GetProperties() map[string]string {
  764. if m != nil {
  765. return m.Properties
  766. }
  767. return nil
  768. }
  769. func (m *PySparkJob) GetLoggingConfig() *LoggingConfig {
  770. if m != nil {
  771. return m.LoggingConfig
  772. }
  773. return nil
  774. }
  775. // A list of queries to run on a cluster.
  776. type QueryList struct {
  777. // Required. The queries to execute. You do not need to terminate a query
  778. // with a semicolon. Multiple queries can be specified in one string
  779. // by separating each with a semicolon. Here is an example of an Cloud
  780. // Dataproc API snippet that uses a QueryList to specify a HiveJob:
  781. //
  782. // "hiveJob": {
  783. // "queryList": {
  784. // "queries": [
  785. // "query1",
  786. // "query2",
  787. // "query3;query4",
  788. // ]
  789. // }
  790. // }
  791. Queries []string `protobuf:"bytes,1,rep,name=queries,proto3" json:"queries,omitempty"`
  792. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  793. XXX_unrecognized []byte `json:"-"`
  794. XXX_sizecache int32 `json:"-"`
  795. }
  796. func (m *QueryList) Reset() { *m = QueryList{} }
  797. func (m *QueryList) String() string { return proto.CompactTextString(m) }
  798. func (*QueryList) ProtoMessage() {}
  799. func (*QueryList) Descriptor() ([]byte, []int) {
  800. return fileDescriptor_jobs_695072de36c8e540, []int{4}
  801. }
  802. func (m *QueryList) XXX_Unmarshal(b []byte) error {
  803. return xxx_messageInfo_QueryList.Unmarshal(m, b)
  804. }
  805. func (m *QueryList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  806. return xxx_messageInfo_QueryList.Marshal(b, m, deterministic)
  807. }
  808. func (dst *QueryList) XXX_Merge(src proto.Message) {
  809. xxx_messageInfo_QueryList.Merge(dst, src)
  810. }
  811. func (m *QueryList) XXX_Size() int {
  812. return xxx_messageInfo_QueryList.Size(m)
  813. }
  814. func (m *QueryList) XXX_DiscardUnknown() {
  815. xxx_messageInfo_QueryList.DiscardUnknown(m)
  816. }
  817. var xxx_messageInfo_QueryList proto.InternalMessageInfo
  818. func (m *QueryList) GetQueries() []string {
  819. if m != nil {
  820. return m.Queries
  821. }
  822. return nil
  823. }
  824. // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/)
  825. // queries on YARN.
  826. type HiveJob struct {
  827. // Required. The sequence of Hive queries to execute, specified as either
  828. // an HCFS file URI or a list of queries.
  829. //
  830. // Types that are valid to be assigned to Queries:
  831. // *HiveJob_QueryFileUri
  832. // *HiveJob_QueryList
  833. Queries isHiveJob_Queries `protobuf_oneof:"queries"`
  834. // Optional. Whether to continue executing queries if a query fails.
  835. // The default value is `false`. Setting to `true` can be useful when
  836. // executing independent parallel queries.
  837. ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
  838. // Optional. Mapping of query variable names to values (equivalent to the
  839. // Hive command: `SET name="value";`).
  840. ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  841. // Optional. A mapping of property names and values, used to configure Hive.
  842. // Properties that conflict with values set by the Cloud Dataproc API may be
  843. // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
  844. // /etc/hive/conf/hive-site.xml, and classes in user code.
  845. Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  846. // Optional. HCFS URIs of jar files to add to the CLASSPATH of the
  847. // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes
  848. // and UDFs.
  849. JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  850. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  851. XXX_unrecognized []byte `json:"-"`
  852. XXX_sizecache int32 `json:"-"`
  853. }
  854. func (m *HiveJob) Reset() { *m = HiveJob{} }
  855. func (m *HiveJob) String() string { return proto.CompactTextString(m) }
  856. func (*HiveJob) ProtoMessage() {}
  857. func (*HiveJob) Descriptor() ([]byte, []int) {
  858. return fileDescriptor_jobs_695072de36c8e540, []int{5}
  859. }
  860. func (m *HiveJob) XXX_Unmarshal(b []byte) error {
  861. return xxx_messageInfo_HiveJob.Unmarshal(m, b)
  862. }
  863. func (m *HiveJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  864. return xxx_messageInfo_HiveJob.Marshal(b, m, deterministic)
  865. }
  866. func (dst *HiveJob) XXX_Merge(src proto.Message) {
  867. xxx_messageInfo_HiveJob.Merge(dst, src)
  868. }
  869. func (m *HiveJob) XXX_Size() int {
  870. return xxx_messageInfo_HiveJob.Size(m)
  871. }
  872. func (m *HiveJob) XXX_DiscardUnknown() {
  873. xxx_messageInfo_HiveJob.DiscardUnknown(m)
  874. }
  875. var xxx_messageInfo_HiveJob proto.InternalMessageInfo
  876. type isHiveJob_Queries interface {
  877. isHiveJob_Queries()
  878. }
  879. type HiveJob_QueryFileUri struct {
  880. QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
  881. }
  882. type HiveJob_QueryList struct {
  883. QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
  884. }
  885. func (*HiveJob_QueryFileUri) isHiveJob_Queries() {}
  886. func (*HiveJob_QueryList) isHiveJob_Queries() {}
  887. func (m *HiveJob) GetQueries() isHiveJob_Queries {
  888. if m != nil {
  889. return m.Queries
  890. }
  891. return nil
  892. }
  893. func (m *HiveJob) GetQueryFileUri() string {
  894. if x, ok := m.GetQueries().(*HiveJob_QueryFileUri); ok {
  895. return x.QueryFileUri
  896. }
  897. return ""
  898. }
  899. func (m *HiveJob) GetQueryList() *QueryList {
  900. if x, ok := m.GetQueries().(*HiveJob_QueryList); ok {
  901. return x.QueryList
  902. }
  903. return nil
  904. }
  905. func (m *HiveJob) GetContinueOnFailure() bool {
  906. if m != nil {
  907. return m.ContinueOnFailure
  908. }
  909. return false
  910. }
  911. func (m *HiveJob) GetScriptVariables() map[string]string {
  912. if m != nil {
  913. return m.ScriptVariables
  914. }
  915. return nil
  916. }
  917. func (m *HiveJob) GetProperties() map[string]string {
  918. if m != nil {
  919. return m.Properties
  920. }
  921. return nil
  922. }
  923. func (m *HiveJob) GetJarFileUris() []string {
  924. if m != nil {
  925. return m.JarFileUris
  926. }
  927. return nil
  928. }
  929. // XXX_OneofFuncs is for the internal use of the proto package.
  930. func (*HiveJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  931. return _HiveJob_OneofMarshaler, _HiveJob_OneofUnmarshaler, _HiveJob_OneofSizer, []interface{}{
  932. (*HiveJob_QueryFileUri)(nil),
  933. (*HiveJob_QueryList)(nil),
  934. }
  935. }
  936. func _HiveJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  937. m := msg.(*HiveJob)
  938. // queries
  939. switch x := m.Queries.(type) {
  940. case *HiveJob_QueryFileUri:
  941. b.EncodeVarint(1<<3 | proto.WireBytes)
  942. b.EncodeStringBytes(x.QueryFileUri)
  943. case *HiveJob_QueryList:
  944. b.EncodeVarint(2<<3 | proto.WireBytes)
  945. if err := b.EncodeMessage(x.QueryList); err != nil {
  946. return err
  947. }
  948. case nil:
  949. default:
  950. return fmt.Errorf("HiveJob.Queries has unexpected type %T", x)
  951. }
  952. return nil
  953. }
  954. func _HiveJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  955. m := msg.(*HiveJob)
  956. switch tag {
  957. case 1: // queries.query_file_uri
  958. if wire != proto.WireBytes {
  959. return true, proto.ErrInternalBadWireType
  960. }
  961. x, err := b.DecodeStringBytes()
  962. m.Queries = &HiveJob_QueryFileUri{x}
  963. return true, err
  964. case 2: // queries.query_list
  965. if wire != proto.WireBytes {
  966. return true, proto.ErrInternalBadWireType
  967. }
  968. msg := new(QueryList)
  969. err := b.DecodeMessage(msg)
  970. m.Queries = &HiveJob_QueryList{msg}
  971. return true, err
  972. default:
  973. return false, nil
  974. }
  975. }
  976. func _HiveJob_OneofSizer(msg proto.Message) (n int) {
  977. m := msg.(*HiveJob)
  978. // queries
  979. switch x := m.Queries.(type) {
  980. case *HiveJob_QueryFileUri:
  981. n += 1 // tag and wire
  982. n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
  983. n += len(x.QueryFileUri)
  984. case *HiveJob_QueryList:
  985. s := proto.Size(x.QueryList)
  986. n += 1 // tag and wire
  987. n += proto.SizeVarint(uint64(s))
  988. n += s
  989. case nil:
  990. default:
  991. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  992. }
  993. return n
  994. }
  995. // A Cloud Dataproc job for running [Apache Spark
  996. // SQL](http://spark.apache.org/sql/) queries.
  997. type SparkSqlJob struct {
  998. // Required. The sequence of Spark SQL queries to execute, specified as
  999. // either an HCFS file URI or as a list of queries.
  1000. //
  1001. // Types that are valid to be assigned to Queries:
  1002. // *SparkSqlJob_QueryFileUri
  1003. // *SparkSqlJob_QueryList
  1004. Queries isSparkSqlJob_Queries `protobuf_oneof:"queries"`
  1005. // Optional. Mapping of query variable names to values (equivalent to the
  1006. // Spark SQL command: SET `name="value";`).
  1007. ScriptVariables map[string]string `protobuf:"bytes,3,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  1008. // Optional. A mapping of property names to values, used to configure
  1009. // Spark SQL's SparkConf. Properties that conflict with values set by the
  1010. // Cloud Dataproc API may be overwritten.
  1011. Properties map[string]string `protobuf:"bytes,4,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  1012. // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH.
  1013. JarFileUris []string `protobuf:"bytes,56,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  1014. // Optional. The runtime log config for job execution.
  1015. LoggingConfig *LoggingConfig `protobuf:"bytes,6,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
  1016. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1017. XXX_unrecognized []byte `json:"-"`
  1018. XXX_sizecache int32 `json:"-"`
  1019. }
  1020. func (m *SparkSqlJob) Reset() { *m = SparkSqlJob{} }
  1021. func (m *SparkSqlJob) String() string { return proto.CompactTextString(m) }
  1022. func (*SparkSqlJob) ProtoMessage() {}
  1023. func (*SparkSqlJob) Descriptor() ([]byte, []int) {
  1024. return fileDescriptor_jobs_695072de36c8e540, []int{6}
  1025. }
  1026. func (m *SparkSqlJob) XXX_Unmarshal(b []byte) error {
  1027. return xxx_messageInfo_SparkSqlJob.Unmarshal(m, b)
  1028. }
  1029. func (m *SparkSqlJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1030. return xxx_messageInfo_SparkSqlJob.Marshal(b, m, deterministic)
  1031. }
  1032. func (dst *SparkSqlJob) XXX_Merge(src proto.Message) {
  1033. xxx_messageInfo_SparkSqlJob.Merge(dst, src)
  1034. }
  1035. func (m *SparkSqlJob) XXX_Size() int {
  1036. return xxx_messageInfo_SparkSqlJob.Size(m)
  1037. }
  1038. func (m *SparkSqlJob) XXX_DiscardUnknown() {
  1039. xxx_messageInfo_SparkSqlJob.DiscardUnknown(m)
  1040. }
  1041. var xxx_messageInfo_SparkSqlJob proto.InternalMessageInfo
  1042. type isSparkSqlJob_Queries interface {
  1043. isSparkSqlJob_Queries()
  1044. }
  1045. type SparkSqlJob_QueryFileUri struct {
  1046. QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
  1047. }
  1048. type SparkSqlJob_QueryList struct {
  1049. QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
  1050. }
  1051. func (*SparkSqlJob_QueryFileUri) isSparkSqlJob_Queries() {}
  1052. func (*SparkSqlJob_QueryList) isSparkSqlJob_Queries() {}
  1053. func (m *SparkSqlJob) GetQueries() isSparkSqlJob_Queries {
  1054. if m != nil {
  1055. return m.Queries
  1056. }
  1057. return nil
  1058. }
  1059. func (m *SparkSqlJob) GetQueryFileUri() string {
  1060. if x, ok := m.GetQueries().(*SparkSqlJob_QueryFileUri); ok {
  1061. return x.QueryFileUri
  1062. }
  1063. return ""
  1064. }
  1065. func (m *SparkSqlJob) GetQueryList() *QueryList {
  1066. if x, ok := m.GetQueries().(*SparkSqlJob_QueryList); ok {
  1067. return x.QueryList
  1068. }
  1069. return nil
  1070. }
  1071. func (m *SparkSqlJob) GetScriptVariables() map[string]string {
  1072. if m != nil {
  1073. return m.ScriptVariables
  1074. }
  1075. return nil
  1076. }
  1077. func (m *SparkSqlJob) GetProperties() map[string]string {
  1078. if m != nil {
  1079. return m.Properties
  1080. }
  1081. return nil
  1082. }
  1083. func (m *SparkSqlJob) GetJarFileUris() []string {
  1084. if m != nil {
  1085. return m.JarFileUris
  1086. }
  1087. return nil
  1088. }
  1089. func (m *SparkSqlJob) GetLoggingConfig() *LoggingConfig {
  1090. if m != nil {
  1091. return m.LoggingConfig
  1092. }
  1093. return nil
  1094. }
  1095. // XXX_OneofFuncs is for the internal use of the proto package.
  1096. func (*SparkSqlJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  1097. return _SparkSqlJob_OneofMarshaler, _SparkSqlJob_OneofUnmarshaler, _SparkSqlJob_OneofSizer, []interface{}{
  1098. (*SparkSqlJob_QueryFileUri)(nil),
  1099. (*SparkSqlJob_QueryList)(nil),
  1100. }
  1101. }
  1102. func _SparkSqlJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  1103. m := msg.(*SparkSqlJob)
  1104. // queries
  1105. switch x := m.Queries.(type) {
  1106. case *SparkSqlJob_QueryFileUri:
  1107. b.EncodeVarint(1<<3 | proto.WireBytes)
  1108. b.EncodeStringBytes(x.QueryFileUri)
  1109. case *SparkSqlJob_QueryList:
  1110. b.EncodeVarint(2<<3 | proto.WireBytes)
  1111. if err := b.EncodeMessage(x.QueryList); err != nil {
  1112. return err
  1113. }
  1114. case nil:
  1115. default:
  1116. return fmt.Errorf("SparkSqlJob.Queries has unexpected type %T", x)
  1117. }
  1118. return nil
  1119. }
  1120. func _SparkSqlJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  1121. m := msg.(*SparkSqlJob)
  1122. switch tag {
  1123. case 1: // queries.query_file_uri
  1124. if wire != proto.WireBytes {
  1125. return true, proto.ErrInternalBadWireType
  1126. }
  1127. x, err := b.DecodeStringBytes()
  1128. m.Queries = &SparkSqlJob_QueryFileUri{x}
  1129. return true, err
  1130. case 2: // queries.query_list
  1131. if wire != proto.WireBytes {
  1132. return true, proto.ErrInternalBadWireType
  1133. }
  1134. msg := new(QueryList)
  1135. err := b.DecodeMessage(msg)
  1136. m.Queries = &SparkSqlJob_QueryList{msg}
  1137. return true, err
  1138. default:
  1139. return false, nil
  1140. }
  1141. }
  1142. func _SparkSqlJob_OneofSizer(msg proto.Message) (n int) {
  1143. m := msg.(*SparkSqlJob)
  1144. // queries
  1145. switch x := m.Queries.(type) {
  1146. case *SparkSqlJob_QueryFileUri:
  1147. n += 1 // tag and wire
  1148. n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
  1149. n += len(x.QueryFileUri)
  1150. case *SparkSqlJob_QueryList:
  1151. s := proto.Size(x.QueryList)
  1152. n += 1 // tag and wire
  1153. n += proto.SizeVarint(uint64(s))
  1154. n += s
  1155. case nil:
  1156. default:
  1157. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  1158. }
  1159. return n
  1160. }
  1161. // A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/)
  1162. // queries on YARN.
  1163. type PigJob struct {
  1164. // Required. The sequence of Pig queries to execute, specified as an HCFS
  1165. // file URI or a list of queries.
  1166. //
  1167. // Types that are valid to be assigned to Queries:
  1168. // *PigJob_QueryFileUri
  1169. // *PigJob_QueryList
  1170. Queries isPigJob_Queries `protobuf_oneof:"queries"`
  1171. // Optional. Whether to continue executing queries if a query fails.
  1172. // The default value is `false`. Setting to `true` can be useful when
  1173. // executing independent parallel queries.
  1174. ContinueOnFailure bool `protobuf:"varint,3,opt,name=continue_on_failure,json=continueOnFailure,proto3" json:"continue_on_failure,omitempty"`
  1175. // Optional. Mapping of query variable names to values (equivalent to the Pig
  1176. // command: `name=[value]`).
  1177. ScriptVariables map[string]string `protobuf:"bytes,4,rep,name=script_variables,json=scriptVariables,proto3" json:"script_variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  1178. // Optional. A mapping of property names to values, used to configure Pig.
  1179. // Properties that conflict with values set by the Cloud Dataproc API may be
  1180. // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,
  1181. // /etc/pig/conf/pig.properties, and classes in user code.
  1182. Properties map[string]string `protobuf:"bytes,5,rep,name=properties,proto3" json:"properties,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  1183. // Optional. HCFS URIs of jar files to add to the CLASSPATH of
  1184. // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.
  1185. JarFileUris []string `protobuf:"bytes,6,rep,name=jar_file_uris,json=jarFileUris,proto3" json:"jar_file_uris,omitempty"`
  1186. // Optional. The runtime log config for job execution.
  1187. LoggingConfig *LoggingConfig `protobuf:"bytes,7,opt,name=logging_config,json=loggingConfig,proto3" json:"logging_config,omitempty"`
  1188. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1189. XXX_unrecognized []byte `json:"-"`
  1190. XXX_sizecache int32 `json:"-"`
  1191. }
  1192. func (m *PigJob) Reset() { *m = PigJob{} }
  1193. func (m *PigJob) String() string { return proto.CompactTextString(m) }
  1194. func (*PigJob) ProtoMessage() {}
  1195. func (*PigJob) Descriptor() ([]byte, []int) {
  1196. return fileDescriptor_jobs_695072de36c8e540, []int{7}
  1197. }
  1198. func (m *PigJob) XXX_Unmarshal(b []byte) error {
  1199. return xxx_messageInfo_PigJob.Unmarshal(m, b)
  1200. }
  1201. func (m *PigJob) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1202. return xxx_messageInfo_PigJob.Marshal(b, m, deterministic)
  1203. }
  1204. func (dst *PigJob) XXX_Merge(src proto.Message) {
  1205. xxx_messageInfo_PigJob.Merge(dst, src)
  1206. }
  1207. func (m *PigJob) XXX_Size() int {
  1208. return xxx_messageInfo_PigJob.Size(m)
  1209. }
  1210. func (m *PigJob) XXX_DiscardUnknown() {
  1211. xxx_messageInfo_PigJob.DiscardUnknown(m)
  1212. }
  1213. var xxx_messageInfo_PigJob proto.InternalMessageInfo
  1214. type isPigJob_Queries interface {
  1215. isPigJob_Queries()
  1216. }
  1217. type PigJob_QueryFileUri struct {
  1218. QueryFileUri string `protobuf:"bytes,1,opt,name=query_file_uri,json=queryFileUri,proto3,oneof"`
  1219. }
  1220. type PigJob_QueryList struct {
  1221. QueryList *QueryList `protobuf:"bytes,2,opt,name=query_list,json=queryList,proto3,oneof"`
  1222. }
  1223. func (*PigJob_QueryFileUri) isPigJob_Queries() {}
  1224. func (*PigJob_QueryList) isPigJob_Queries() {}
  1225. func (m *PigJob) GetQueries() isPigJob_Queries {
  1226. if m != nil {
  1227. return m.Queries
  1228. }
  1229. return nil
  1230. }
  1231. func (m *PigJob) GetQueryFileUri() string {
  1232. if x, ok := m.GetQueries().(*PigJob_QueryFileUri); ok {
  1233. return x.QueryFileUri
  1234. }
  1235. return ""
  1236. }
  1237. func (m *PigJob) GetQueryList() *QueryList {
  1238. if x, ok := m.GetQueries().(*PigJob_QueryList); ok {
  1239. return x.QueryList
  1240. }
  1241. return nil
  1242. }
  1243. func (m *PigJob) GetContinueOnFailure() bool {
  1244. if m != nil {
  1245. return m.ContinueOnFailure
  1246. }
  1247. return false
  1248. }
  1249. func (m *PigJob) GetScriptVariables() map[string]string {
  1250. if m != nil {
  1251. return m.ScriptVariables
  1252. }
  1253. return nil
  1254. }
  1255. func (m *PigJob) GetProperties() map[string]string {
  1256. if m != nil {
  1257. return m.Properties
  1258. }
  1259. return nil
  1260. }
  1261. func (m *PigJob) GetJarFileUris() []string {
  1262. if m != nil {
  1263. return m.JarFileUris
  1264. }
  1265. return nil
  1266. }
  1267. func (m *PigJob) GetLoggingConfig() *LoggingConfig {
  1268. if m != nil {
  1269. return m.LoggingConfig
  1270. }
  1271. return nil
  1272. }
  1273. // XXX_OneofFuncs is for the internal use of the proto package.
  1274. func (*PigJob) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  1275. return _PigJob_OneofMarshaler, _PigJob_OneofUnmarshaler, _PigJob_OneofSizer, []interface{}{
  1276. (*PigJob_QueryFileUri)(nil),
  1277. (*PigJob_QueryList)(nil),
  1278. }
  1279. }
  1280. func _PigJob_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  1281. m := msg.(*PigJob)
  1282. // queries
  1283. switch x := m.Queries.(type) {
  1284. case *PigJob_QueryFileUri:
  1285. b.EncodeVarint(1<<3 | proto.WireBytes)
  1286. b.EncodeStringBytes(x.QueryFileUri)
  1287. case *PigJob_QueryList:
  1288. b.EncodeVarint(2<<3 | proto.WireBytes)
  1289. if err := b.EncodeMessage(x.QueryList); err != nil {
  1290. return err
  1291. }
  1292. case nil:
  1293. default:
  1294. return fmt.Errorf("PigJob.Queries has unexpected type %T", x)
  1295. }
  1296. return nil
  1297. }
  1298. func _PigJob_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  1299. m := msg.(*PigJob)
  1300. switch tag {
  1301. case 1: // queries.query_file_uri
  1302. if wire != proto.WireBytes {
  1303. return true, proto.ErrInternalBadWireType
  1304. }
  1305. x, err := b.DecodeStringBytes()
  1306. m.Queries = &PigJob_QueryFileUri{x}
  1307. return true, err
  1308. case 2: // queries.query_list
  1309. if wire != proto.WireBytes {
  1310. return true, proto.ErrInternalBadWireType
  1311. }
  1312. msg := new(QueryList)
  1313. err := b.DecodeMessage(msg)
  1314. m.Queries = &PigJob_QueryList{msg}
  1315. return true, err
  1316. default:
  1317. return false, nil
  1318. }
  1319. }
  1320. func _PigJob_OneofSizer(msg proto.Message) (n int) {
  1321. m := msg.(*PigJob)
  1322. // queries
  1323. switch x := m.Queries.(type) {
  1324. case *PigJob_QueryFileUri:
  1325. n += 1 // tag and wire
  1326. n += proto.SizeVarint(uint64(len(x.QueryFileUri)))
  1327. n += len(x.QueryFileUri)
  1328. case *PigJob_QueryList:
  1329. s := proto.Size(x.QueryList)
  1330. n += 1 // tag and wire
  1331. n += proto.SizeVarint(uint64(s))
  1332. n += s
  1333. case nil:
  1334. default:
  1335. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  1336. }
  1337. return n
  1338. }
  1339. // Cloud Dataproc job config.
  1340. type JobPlacement struct {
  1341. // Required. The name of the cluster where the job will be submitted.
  1342. ClusterName string `protobuf:"bytes,1,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
  1343. // Output only. A cluster UUID generated by the Cloud Dataproc service when
  1344. // the job is submitted.
  1345. ClusterUuid string `protobuf:"bytes,2,opt,name=cluster_uuid,json=clusterUuid,proto3" json:"cluster_uuid,omitempty"`
  1346. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1347. XXX_unrecognized []byte `json:"-"`
  1348. XXX_sizecache int32 `json:"-"`
  1349. }
  1350. func (m *JobPlacement) Reset() { *m = JobPlacement{} }
  1351. func (m *JobPlacement) String() string { return proto.CompactTextString(m) }
  1352. func (*JobPlacement) ProtoMessage() {}
  1353. func (*JobPlacement) Descriptor() ([]byte, []int) {
  1354. return fileDescriptor_jobs_695072de36c8e540, []int{8}
  1355. }
  1356. func (m *JobPlacement) XXX_Unmarshal(b []byte) error {
  1357. return xxx_messageInfo_JobPlacement.Unmarshal(m, b)
  1358. }
  1359. func (m *JobPlacement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1360. return xxx_messageInfo_JobPlacement.Marshal(b, m, deterministic)
  1361. }
  1362. func (dst *JobPlacement) XXX_Merge(src proto.Message) {
  1363. xxx_messageInfo_JobPlacement.Merge(dst, src)
  1364. }
  1365. func (m *JobPlacement) XXX_Size() int {
  1366. return xxx_messageInfo_JobPlacement.Size(m)
  1367. }
  1368. func (m *JobPlacement) XXX_DiscardUnknown() {
  1369. xxx_messageInfo_JobPlacement.DiscardUnknown(m)
  1370. }
  1371. var xxx_messageInfo_JobPlacement proto.InternalMessageInfo
  1372. func (m *JobPlacement) GetClusterName() string {
  1373. if m != nil {
  1374. return m.ClusterName
  1375. }
  1376. return ""
  1377. }
  1378. func (m *JobPlacement) GetClusterUuid() string {
  1379. if m != nil {
  1380. return m.ClusterUuid
  1381. }
  1382. return ""
  1383. }
  1384. // Cloud Dataproc job status.
  1385. type JobStatus struct {
  1386. // Output only. A state message specifying the overall job state.
  1387. State JobStatus_State `protobuf:"varint,1,opt,name=state,proto3,enum=google.cloud.dataproc.v1.JobStatus_State" json:"state,omitempty"`
  1388. // Output only. Optional job state details, such as an error
  1389. // description if the state is <code>ERROR</code>.
  1390. Details string `protobuf:"bytes,2,opt,name=details,proto3" json:"details,omitempty"`
  1391. // Output only. The time when this state was entered.
  1392. StateStartTime *timestamp.Timestamp `protobuf:"bytes,6,opt,name=state_start_time,json=stateStartTime,proto3" json:"state_start_time,omitempty"`
  1393. // Output only. Additional state information, which includes
  1394. // status reported by the agent.
  1395. Substate JobStatus_Substate `protobuf:"varint,7,opt,name=substate,proto3,enum=google.cloud.dataproc.v1.JobStatus_Substate" json:"substate,omitempty"`
  1396. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1397. XXX_unrecognized []byte `json:"-"`
  1398. XXX_sizecache int32 `json:"-"`
  1399. }
  1400. func (m *JobStatus) Reset() { *m = JobStatus{} }
  1401. func (m *JobStatus) String() string { return proto.CompactTextString(m) }
  1402. func (*JobStatus) ProtoMessage() {}
  1403. func (*JobStatus) Descriptor() ([]byte, []int) {
  1404. return fileDescriptor_jobs_695072de36c8e540, []int{9}
  1405. }
  1406. func (m *JobStatus) XXX_Unmarshal(b []byte) error {
  1407. return xxx_messageInfo_JobStatus.Unmarshal(m, b)
  1408. }
  1409. func (m *JobStatus) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1410. return xxx_messageInfo_JobStatus.Marshal(b, m, deterministic)
  1411. }
  1412. func (dst *JobStatus) XXX_Merge(src proto.Message) {
  1413. xxx_messageInfo_JobStatus.Merge(dst, src)
  1414. }
  1415. func (m *JobStatus) XXX_Size() int {
  1416. return xxx_messageInfo_JobStatus.Size(m)
  1417. }
  1418. func (m *JobStatus) XXX_DiscardUnknown() {
  1419. xxx_messageInfo_JobStatus.DiscardUnknown(m)
  1420. }
  1421. var xxx_messageInfo_JobStatus proto.InternalMessageInfo
  1422. func (m *JobStatus) GetState() JobStatus_State {
  1423. if m != nil {
  1424. return m.State
  1425. }
  1426. return JobStatus_STATE_UNSPECIFIED
  1427. }
  1428. func (m *JobStatus) GetDetails() string {
  1429. if m != nil {
  1430. return m.Details
  1431. }
  1432. return ""
  1433. }
  1434. func (m *JobStatus) GetStateStartTime() *timestamp.Timestamp {
  1435. if m != nil {
  1436. return m.StateStartTime
  1437. }
  1438. return nil
  1439. }
  1440. func (m *JobStatus) GetSubstate() JobStatus_Substate {
  1441. if m != nil {
  1442. return m.Substate
  1443. }
  1444. return JobStatus_UNSPECIFIED
  1445. }
  1446. // Encapsulates the full scoping used to reference a job.
  1447. type JobReference struct {
  1448. // Required. The ID of the Google Cloud Platform project that the job
  1449. // belongs to.
  1450. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  1451. // Optional. The job ID, which must be unique within the project. The job ID
  1452. // is generated by the server upon job submission or provided by the user as a
  1453. // means to perform retries without creating duplicate jobs. The ID must
  1454. // contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
  1455. // hyphens (-). The maximum length is 100 characters.
  1456. JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
  1457. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1458. XXX_unrecognized []byte `json:"-"`
  1459. XXX_sizecache int32 `json:"-"`
  1460. }
  1461. func (m *JobReference) Reset() { *m = JobReference{} }
  1462. func (m *JobReference) String() string { return proto.CompactTextString(m) }
  1463. func (*JobReference) ProtoMessage() {}
  1464. func (*JobReference) Descriptor() ([]byte, []int) {
  1465. return fileDescriptor_jobs_695072de36c8e540, []int{10}
  1466. }
  1467. func (m *JobReference) XXX_Unmarshal(b []byte) error {
  1468. return xxx_messageInfo_JobReference.Unmarshal(m, b)
  1469. }
  1470. func (m *JobReference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1471. return xxx_messageInfo_JobReference.Marshal(b, m, deterministic)
  1472. }
  1473. func (dst *JobReference) XXX_Merge(src proto.Message) {
  1474. xxx_messageInfo_JobReference.Merge(dst, src)
  1475. }
  1476. func (m *JobReference) XXX_Size() int {
  1477. return xxx_messageInfo_JobReference.Size(m)
  1478. }
  1479. func (m *JobReference) XXX_DiscardUnknown() {
  1480. xxx_messageInfo_JobReference.DiscardUnknown(m)
  1481. }
  1482. var xxx_messageInfo_JobReference proto.InternalMessageInfo
  1483. func (m *JobReference) GetProjectId() string {
  1484. if m != nil {
  1485. return m.ProjectId
  1486. }
  1487. return ""
  1488. }
  1489. func (m *JobReference) GetJobId() string {
  1490. if m != nil {
  1491. return m.JobId
  1492. }
  1493. return ""
  1494. }
  1495. // A YARN application created by a job. Application information is a subset of
  1496. // <code>org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto</code>.
  1497. //
  1498. // **Beta Feature**: This report is available for testing purposes only. It may
  1499. // be changed before final release.
  1500. type YarnApplication struct {
  1501. // Required. The application name.
  1502. Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
  1503. // Required. The application state.
  1504. State YarnApplication_State `protobuf:"varint,2,opt,name=state,proto3,enum=google.cloud.dataproc.v1.YarnApplication_State" json:"state,omitempty"`
  1505. // Required. The numerical progress of the application, from 1 to 100.
  1506. Progress float32 `protobuf:"fixed32,3,opt,name=progress,proto3" json:"progress,omitempty"`
  1507. // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or
  1508. // TimelineServer that provides application-specific information. The URL uses
  1509. // the internal hostname, and requires a proxy server for resolution and,
  1510. // possibly, access.
  1511. TrackingUrl string `protobuf:"bytes,4,opt,name=tracking_url,json=trackingUrl,proto3" json:"tracking_url,omitempty"`
  1512. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1513. XXX_unrecognized []byte `json:"-"`
  1514. XXX_sizecache int32 `json:"-"`
  1515. }
  1516. func (m *YarnApplication) Reset() { *m = YarnApplication{} }
  1517. func (m *YarnApplication) String() string { return proto.CompactTextString(m) }
  1518. func (*YarnApplication) ProtoMessage() {}
  1519. func (*YarnApplication) Descriptor() ([]byte, []int) {
  1520. return fileDescriptor_jobs_695072de36c8e540, []int{11}
  1521. }
  1522. func (m *YarnApplication) XXX_Unmarshal(b []byte) error {
  1523. return xxx_messageInfo_YarnApplication.Unmarshal(m, b)
  1524. }
  1525. func (m *YarnApplication) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1526. return xxx_messageInfo_YarnApplication.Marshal(b, m, deterministic)
  1527. }
  1528. func (dst *YarnApplication) XXX_Merge(src proto.Message) {
  1529. xxx_messageInfo_YarnApplication.Merge(dst, src)
  1530. }
  1531. func (m *YarnApplication) XXX_Size() int {
  1532. return xxx_messageInfo_YarnApplication.Size(m)
  1533. }
  1534. func (m *YarnApplication) XXX_DiscardUnknown() {
  1535. xxx_messageInfo_YarnApplication.DiscardUnknown(m)
  1536. }
  1537. var xxx_messageInfo_YarnApplication proto.InternalMessageInfo
  1538. func (m *YarnApplication) GetName() string {
  1539. if m != nil {
  1540. return m.Name
  1541. }
  1542. return ""
  1543. }
  1544. func (m *YarnApplication) GetState() YarnApplication_State {
  1545. if m != nil {
  1546. return m.State
  1547. }
  1548. return YarnApplication_STATE_UNSPECIFIED
  1549. }
  1550. func (m *YarnApplication) GetProgress() float32 {
  1551. if m != nil {
  1552. return m.Progress
  1553. }
  1554. return 0
  1555. }
  1556. func (m *YarnApplication) GetTrackingUrl() string {
  1557. if m != nil {
  1558. return m.TrackingUrl
  1559. }
  1560. return ""
  1561. }
  1562. // A Cloud Dataproc job resource.
  1563. type Job struct {
  1564. // Optional. The fully qualified reference to the job, which can be used to
  1565. // obtain the equivalent REST path of the job resource. If this property
  1566. // is not specified when a job is created, the server generates a
  1567. // <code>job_id</code>.
  1568. Reference *JobReference `protobuf:"bytes,1,opt,name=reference,proto3" json:"reference,omitempty"`
  1569. // Required. Job information, including how, when, and where to
  1570. // run the job.
  1571. Placement *JobPlacement `protobuf:"bytes,2,opt,name=placement,proto3" json:"placement,omitempty"`
  1572. // Required. The application/framework-specific portion of the job.
  1573. //
  1574. // Types that are valid to be assigned to TypeJob:
  1575. // *Job_HadoopJob
  1576. // *Job_SparkJob
  1577. // *Job_PysparkJob
  1578. // *Job_HiveJob
  1579. // *Job_PigJob
  1580. // *Job_SparkSqlJob
  1581. TypeJob isJob_TypeJob `protobuf_oneof:"type_job"`
  1582. // Output only. The job status. Additional application-specific
  1583. // status information may be contained in the <code>type_job</code>
  1584. // and <code>yarn_applications</code> fields.
  1585. Status *JobStatus `protobuf:"bytes,8,opt,name=status,proto3" json:"status,omitempty"`
  1586. // Output only. The previous job status.
  1587. StatusHistory []*JobStatus `protobuf:"bytes,13,rep,name=status_history,json=statusHistory,proto3" json:"status_history,omitempty"`
  1588. // Output only. The collection of YARN applications spun up by this job.
  1589. //
  1590. // **Beta** Feature: This report is available for testing purposes only. It
  1591. // may be changed before final release.
  1592. YarnApplications []*YarnApplication `protobuf:"bytes,9,rep,name=yarn_applications,json=yarnApplications,proto3" json:"yarn_applications,omitempty"`
  1593. // Output only. A URI pointing to the location of the stdout of the job's
  1594. // driver program.
  1595. DriverOutputResourceUri string `protobuf:"bytes,17,opt,name=driver_output_resource_uri,json=driverOutputResourceUri,proto3" json:"driver_output_resource_uri,omitempty"`
  1596. // Output only. If present, the location of miscellaneous control files
  1597. // which may be used as part of job setup and handling. If not present,
  1598. // control files may be placed in the same location as `driver_output_uri`.
  1599. DriverControlFilesUri string `protobuf:"bytes,15,opt,name=driver_control_files_uri,json=driverControlFilesUri,proto3" json:"driver_control_files_uri,omitempty"`
  1600. // Optional. The labels to associate with this job.
  1601. // Label **keys** must contain 1 to 63 characters, and must conform to
  1602. // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt).
  1603. // Label **values** may be empty, but, if present, must contain 1 to 63
  1604. // characters, and must conform to [RFC
  1605. // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be
  1606. // associated with a job.
  1607. Labels map[string]string `protobuf:"bytes,18,rep,name=labels,proto3" json:"labels,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
  1608. // Optional. Job scheduling configuration.
  1609. Scheduling *JobScheduling `protobuf:"bytes,20,opt,name=scheduling,proto3" json:"scheduling,omitempty"`
  1610. // Output only. A UUID that uniquely identifies a job within the project
  1611. // over time. This is in contrast to a user-settable reference.job_id that
  1612. // may be reused over time.
  1613. JobUuid string `protobuf:"bytes,22,opt,name=job_uuid,json=jobUuid,proto3" json:"job_uuid,omitempty"`
  1614. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1615. XXX_unrecognized []byte `json:"-"`
  1616. XXX_sizecache int32 `json:"-"`
  1617. }
  1618. func (m *Job) Reset() { *m = Job{} }
  1619. func (m *Job) String() string { return proto.CompactTextString(m) }
  1620. func (*Job) ProtoMessage() {}
  1621. func (*Job) Descriptor() ([]byte, []int) {
  1622. return fileDescriptor_jobs_695072de36c8e540, []int{12}
  1623. }
  1624. func (m *Job) XXX_Unmarshal(b []byte) error {
  1625. return xxx_messageInfo_Job.Unmarshal(m, b)
  1626. }
  1627. func (m *Job) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1628. return xxx_messageInfo_Job.Marshal(b, m, deterministic)
  1629. }
  1630. func (dst *Job) XXX_Merge(src proto.Message) {
  1631. xxx_messageInfo_Job.Merge(dst, src)
  1632. }
  1633. func (m *Job) XXX_Size() int {
  1634. return xxx_messageInfo_Job.Size(m)
  1635. }
  1636. func (m *Job) XXX_DiscardUnknown() {
  1637. xxx_messageInfo_Job.DiscardUnknown(m)
  1638. }
  1639. var xxx_messageInfo_Job proto.InternalMessageInfo
  1640. func (m *Job) GetReference() *JobReference {
  1641. if m != nil {
  1642. return m.Reference
  1643. }
  1644. return nil
  1645. }
  1646. func (m *Job) GetPlacement() *JobPlacement {
  1647. if m != nil {
  1648. return m.Placement
  1649. }
  1650. return nil
  1651. }
  1652. type isJob_TypeJob interface {
  1653. isJob_TypeJob()
  1654. }
  1655. type Job_HadoopJob struct {
  1656. HadoopJob *HadoopJob `protobuf:"bytes,3,opt,name=hadoop_job,json=hadoopJob,proto3,oneof"`
  1657. }
  1658. type Job_SparkJob struct {
  1659. SparkJob *SparkJob `protobuf:"bytes,4,opt,name=spark_job,json=sparkJob,proto3,oneof"`
  1660. }
  1661. type Job_PysparkJob struct {
  1662. PysparkJob *PySparkJob `protobuf:"bytes,5,opt,name=pyspark_job,json=pysparkJob,proto3,oneof"`
  1663. }
  1664. type Job_HiveJob struct {
  1665. HiveJob *HiveJob `protobuf:"bytes,6,opt,name=hive_job,json=hiveJob,proto3,oneof"`
  1666. }
  1667. type Job_PigJob struct {
  1668. PigJob *PigJob `protobuf:"bytes,7,opt,name=pig_job,json=pigJob,proto3,oneof"`
  1669. }
  1670. type Job_SparkSqlJob struct {
  1671. SparkSqlJob *SparkSqlJob `protobuf:"bytes,12,opt,name=spark_sql_job,json=sparkSqlJob,proto3,oneof"`
  1672. }
  1673. func (*Job_HadoopJob) isJob_TypeJob() {}
  1674. func (*Job_SparkJob) isJob_TypeJob() {}
  1675. func (*Job_PysparkJob) isJob_TypeJob() {}
  1676. func (*Job_HiveJob) isJob_TypeJob() {}
  1677. func (*Job_PigJob) isJob_TypeJob() {}
  1678. func (*Job_SparkSqlJob) isJob_TypeJob() {}
  1679. func (m *Job) GetTypeJob() isJob_TypeJob {
  1680. if m != nil {
  1681. return m.TypeJob
  1682. }
  1683. return nil
  1684. }
  1685. func (m *Job) GetHadoopJob() *HadoopJob {
  1686. if x, ok := m.GetTypeJob().(*Job_HadoopJob); ok {
  1687. return x.HadoopJob
  1688. }
  1689. return nil
  1690. }
  1691. func (m *Job) GetSparkJob() *SparkJob {
  1692. if x, ok := m.GetTypeJob().(*Job_SparkJob); ok {
  1693. return x.SparkJob
  1694. }
  1695. return nil
  1696. }
  1697. func (m *Job) GetPysparkJob() *PySparkJob {
  1698. if x, ok := m.GetTypeJob().(*Job_PysparkJob); ok {
  1699. return x.PysparkJob
  1700. }
  1701. return nil
  1702. }
  1703. func (m *Job) GetHiveJob() *HiveJob {
  1704. if x, ok := m.GetTypeJob().(*Job_HiveJob); ok {
  1705. return x.HiveJob
  1706. }
  1707. return nil
  1708. }
  1709. func (m *Job) GetPigJob() *PigJob {
  1710. if x, ok := m.GetTypeJob().(*Job_PigJob); ok {
  1711. return x.PigJob
  1712. }
  1713. return nil
  1714. }
  1715. func (m *Job) GetSparkSqlJob() *SparkSqlJob {
  1716. if x, ok := m.GetTypeJob().(*Job_SparkSqlJob); ok {
  1717. return x.SparkSqlJob
  1718. }
  1719. return nil
  1720. }
  1721. func (m *Job) GetStatus() *JobStatus {
  1722. if m != nil {
  1723. return m.Status
  1724. }
  1725. return nil
  1726. }
  1727. func (m *Job) GetStatusHistory() []*JobStatus {
  1728. if m != nil {
  1729. return m.StatusHistory
  1730. }
  1731. return nil
  1732. }
  1733. func (m *Job) GetYarnApplications() []*YarnApplication {
  1734. if m != nil {
  1735. return m.YarnApplications
  1736. }
  1737. return nil
  1738. }
  1739. func (m *Job) GetDriverOutputResourceUri() string {
  1740. if m != nil {
  1741. return m.DriverOutputResourceUri
  1742. }
  1743. return ""
  1744. }
  1745. func (m *Job) GetDriverControlFilesUri() string {
  1746. if m != nil {
  1747. return m.DriverControlFilesUri
  1748. }
  1749. return ""
  1750. }
  1751. func (m *Job) GetLabels() map[string]string {
  1752. if m != nil {
  1753. return m.Labels
  1754. }
  1755. return nil
  1756. }
  1757. func (m *Job) GetScheduling() *JobScheduling {
  1758. if m != nil {
  1759. return m.Scheduling
  1760. }
  1761. return nil
  1762. }
  1763. func (m *Job) GetJobUuid() string {
  1764. if m != nil {
  1765. return m.JobUuid
  1766. }
  1767. return ""
  1768. }
  1769. // XXX_OneofFuncs is for the internal use of the proto package.
  1770. func (*Job) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
  1771. return _Job_OneofMarshaler, _Job_OneofUnmarshaler, _Job_OneofSizer, []interface{}{
  1772. (*Job_HadoopJob)(nil),
  1773. (*Job_SparkJob)(nil),
  1774. (*Job_PysparkJob)(nil),
  1775. (*Job_HiveJob)(nil),
  1776. (*Job_PigJob)(nil),
  1777. (*Job_SparkSqlJob)(nil),
  1778. }
  1779. }
  1780. func _Job_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
  1781. m := msg.(*Job)
  1782. // type_job
  1783. switch x := m.TypeJob.(type) {
  1784. case *Job_HadoopJob:
  1785. b.EncodeVarint(3<<3 | proto.WireBytes)
  1786. if err := b.EncodeMessage(x.HadoopJob); err != nil {
  1787. return err
  1788. }
  1789. case *Job_SparkJob:
  1790. b.EncodeVarint(4<<3 | proto.WireBytes)
  1791. if err := b.EncodeMessage(x.SparkJob); err != nil {
  1792. return err
  1793. }
  1794. case *Job_PysparkJob:
  1795. b.EncodeVarint(5<<3 | proto.WireBytes)
  1796. if err := b.EncodeMessage(x.PysparkJob); err != nil {
  1797. return err
  1798. }
  1799. case *Job_HiveJob:
  1800. b.EncodeVarint(6<<3 | proto.WireBytes)
  1801. if err := b.EncodeMessage(x.HiveJob); err != nil {
  1802. return err
  1803. }
  1804. case *Job_PigJob:
  1805. b.EncodeVarint(7<<3 | proto.WireBytes)
  1806. if err := b.EncodeMessage(x.PigJob); err != nil {
  1807. return err
  1808. }
  1809. case *Job_SparkSqlJob:
  1810. b.EncodeVarint(12<<3 | proto.WireBytes)
  1811. if err := b.EncodeMessage(x.SparkSqlJob); err != nil {
  1812. return err
  1813. }
  1814. case nil:
  1815. default:
  1816. return fmt.Errorf("Job.TypeJob has unexpected type %T", x)
  1817. }
  1818. return nil
  1819. }
  1820. func _Job_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
  1821. m := msg.(*Job)
  1822. switch tag {
  1823. case 3: // type_job.hadoop_job
  1824. if wire != proto.WireBytes {
  1825. return true, proto.ErrInternalBadWireType
  1826. }
  1827. msg := new(HadoopJob)
  1828. err := b.DecodeMessage(msg)
  1829. m.TypeJob = &Job_HadoopJob{msg}
  1830. return true, err
  1831. case 4: // type_job.spark_job
  1832. if wire != proto.WireBytes {
  1833. return true, proto.ErrInternalBadWireType
  1834. }
  1835. msg := new(SparkJob)
  1836. err := b.DecodeMessage(msg)
  1837. m.TypeJob = &Job_SparkJob{msg}
  1838. return true, err
  1839. case 5: // type_job.pyspark_job
  1840. if wire != proto.WireBytes {
  1841. return true, proto.ErrInternalBadWireType
  1842. }
  1843. msg := new(PySparkJob)
  1844. err := b.DecodeMessage(msg)
  1845. m.TypeJob = &Job_PysparkJob{msg}
  1846. return true, err
  1847. case 6: // type_job.hive_job
  1848. if wire != proto.WireBytes {
  1849. return true, proto.ErrInternalBadWireType
  1850. }
  1851. msg := new(HiveJob)
  1852. err := b.DecodeMessage(msg)
  1853. m.TypeJob = &Job_HiveJob{msg}
  1854. return true, err
  1855. case 7: // type_job.pig_job
  1856. if wire != proto.WireBytes {
  1857. return true, proto.ErrInternalBadWireType
  1858. }
  1859. msg := new(PigJob)
  1860. err := b.DecodeMessage(msg)
  1861. m.TypeJob = &Job_PigJob{msg}
  1862. return true, err
  1863. case 12: // type_job.spark_sql_job
  1864. if wire != proto.WireBytes {
  1865. return true, proto.ErrInternalBadWireType
  1866. }
  1867. msg := new(SparkSqlJob)
  1868. err := b.DecodeMessage(msg)
  1869. m.TypeJob = &Job_SparkSqlJob{msg}
  1870. return true, err
  1871. default:
  1872. return false, nil
  1873. }
  1874. }
  1875. func _Job_OneofSizer(msg proto.Message) (n int) {
  1876. m := msg.(*Job)
  1877. // type_job
  1878. switch x := m.TypeJob.(type) {
  1879. case *Job_HadoopJob:
  1880. s := proto.Size(x.HadoopJob)
  1881. n += 1 // tag and wire
  1882. n += proto.SizeVarint(uint64(s))
  1883. n += s
  1884. case *Job_SparkJob:
  1885. s := proto.Size(x.SparkJob)
  1886. n += 1 // tag and wire
  1887. n += proto.SizeVarint(uint64(s))
  1888. n += s
  1889. case *Job_PysparkJob:
  1890. s := proto.Size(x.PysparkJob)
  1891. n += 1 // tag and wire
  1892. n += proto.SizeVarint(uint64(s))
  1893. n += s
  1894. case *Job_HiveJob:
  1895. s := proto.Size(x.HiveJob)
  1896. n += 1 // tag and wire
  1897. n += proto.SizeVarint(uint64(s))
  1898. n += s
  1899. case *Job_PigJob:
  1900. s := proto.Size(x.PigJob)
  1901. n += 1 // tag and wire
  1902. n += proto.SizeVarint(uint64(s))
  1903. n += s
  1904. case *Job_SparkSqlJob:
  1905. s := proto.Size(x.SparkSqlJob)
  1906. n += 1 // tag and wire
  1907. n += proto.SizeVarint(uint64(s))
  1908. n += s
  1909. case nil:
  1910. default:
  1911. panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
  1912. }
  1913. return n
  1914. }
  1915. // Job scheduling options.
  1916. type JobScheduling struct {
  1917. // Optional. Maximum number of times per hour a driver may be restarted as
  1918. // a result of driver terminating with non-zero code before job is
  1919. // reported failed.
  1920. //
  1921. // A job may be reported as thrashing if driver exits with non-zero code
  1922. // 4 times within 10 minute window.
  1923. //
  1924. // Maximum value is 10.
  1925. MaxFailuresPerHour int32 `protobuf:"varint,1,opt,name=max_failures_per_hour,json=maxFailuresPerHour,proto3" json:"max_failures_per_hour,omitempty"`
  1926. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1927. XXX_unrecognized []byte `json:"-"`
  1928. XXX_sizecache int32 `json:"-"`
  1929. }
  1930. func (m *JobScheduling) Reset() { *m = JobScheduling{} }
  1931. func (m *JobScheduling) String() string { return proto.CompactTextString(m) }
  1932. func (*JobScheduling) ProtoMessage() {}
  1933. func (*JobScheduling) Descriptor() ([]byte, []int) {
  1934. return fileDescriptor_jobs_695072de36c8e540, []int{13}
  1935. }
  1936. func (m *JobScheduling) XXX_Unmarshal(b []byte) error {
  1937. return xxx_messageInfo_JobScheduling.Unmarshal(m, b)
  1938. }
  1939. func (m *JobScheduling) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1940. return xxx_messageInfo_JobScheduling.Marshal(b, m, deterministic)
  1941. }
  1942. func (dst *JobScheduling) XXX_Merge(src proto.Message) {
  1943. xxx_messageInfo_JobScheduling.Merge(dst, src)
  1944. }
  1945. func (m *JobScheduling) XXX_Size() int {
  1946. return xxx_messageInfo_JobScheduling.Size(m)
  1947. }
  1948. func (m *JobScheduling) XXX_DiscardUnknown() {
  1949. xxx_messageInfo_JobScheduling.DiscardUnknown(m)
  1950. }
  1951. var xxx_messageInfo_JobScheduling proto.InternalMessageInfo
  1952. func (m *JobScheduling) GetMaxFailuresPerHour() int32 {
  1953. if m != nil {
  1954. return m.MaxFailuresPerHour
  1955. }
  1956. return 0
  1957. }
  1958. // A request to submit a job.
  1959. type SubmitJobRequest struct {
  1960. // Required. The ID of the Google Cloud Platform project that the job
  1961. // belongs to.
  1962. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  1963. // Required. The Cloud Dataproc region in which to handle the request.
  1964. Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
  1965. // Required. The job resource.
  1966. Job *Job `protobuf:"bytes,2,opt,name=job,proto3" json:"job,omitempty"`
  1967. // Optional. A unique id used to identify the request. If the server
  1968. // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest]
  1969. // requests with the same id, then the second request will be ignored and the
  1970. // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
  1971. // is returned.
  1972. //
  1973. // It is recommended to always set this value to a
  1974. // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
  1975. //
  1976. // The id must contain only letters (a-z, A-Z), numbers (0-9),
  1977. // underscores (_), and hyphens (-). The maximum length is 40 characters.
  1978. RequestId string `protobuf:"bytes,4,opt,name=request_id,json=requestId,proto3" json:"request_id,omitempty"`
  1979. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  1980. XXX_unrecognized []byte `json:"-"`
  1981. XXX_sizecache int32 `json:"-"`
  1982. }
  1983. func (m *SubmitJobRequest) Reset() { *m = SubmitJobRequest{} }
  1984. func (m *SubmitJobRequest) String() string { return proto.CompactTextString(m) }
  1985. func (*SubmitJobRequest) ProtoMessage() {}
  1986. func (*SubmitJobRequest) Descriptor() ([]byte, []int) {
  1987. return fileDescriptor_jobs_695072de36c8e540, []int{14}
  1988. }
  1989. func (m *SubmitJobRequest) XXX_Unmarshal(b []byte) error {
  1990. return xxx_messageInfo_SubmitJobRequest.Unmarshal(m, b)
  1991. }
  1992. func (m *SubmitJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  1993. return xxx_messageInfo_SubmitJobRequest.Marshal(b, m, deterministic)
  1994. }
  1995. func (dst *SubmitJobRequest) XXX_Merge(src proto.Message) {
  1996. xxx_messageInfo_SubmitJobRequest.Merge(dst, src)
  1997. }
  1998. func (m *SubmitJobRequest) XXX_Size() int {
  1999. return xxx_messageInfo_SubmitJobRequest.Size(m)
  2000. }
  2001. func (m *SubmitJobRequest) XXX_DiscardUnknown() {
  2002. xxx_messageInfo_SubmitJobRequest.DiscardUnknown(m)
  2003. }
  2004. var xxx_messageInfo_SubmitJobRequest proto.InternalMessageInfo
  2005. func (m *SubmitJobRequest) GetProjectId() string {
  2006. if m != nil {
  2007. return m.ProjectId
  2008. }
  2009. return ""
  2010. }
  2011. func (m *SubmitJobRequest) GetRegion() string {
  2012. if m != nil {
  2013. return m.Region
  2014. }
  2015. return ""
  2016. }
  2017. func (m *SubmitJobRequest) GetJob() *Job {
  2018. if m != nil {
  2019. return m.Job
  2020. }
  2021. return nil
  2022. }
  2023. func (m *SubmitJobRequest) GetRequestId() string {
  2024. if m != nil {
  2025. return m.RequestId
  2026. }
  2027. return ""
  2028. }
  2029. // A request to get the resource representation for a job in a project.
  2030. type GetJobRequest struct {
  2031. // Required. The ID of the Google Cloud Platform project that the job
  2032. // belongs to.
  2033. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  2034. // Required. The Cloud Dataproc region in which to handle the request.
  2035. Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
  2036. // Required. The job ID.
  2037. JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
  2038. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2039. XXX_unrecognized []byte `json:"-"`
  2040. XXX_sizecache int32 `json:"-"`
  2041. }
  2042. func (m *GetJobRequest) Reset() { *m = GetJobRequest{} }
  2043. func (m *GetJobRequest) String() string { return proto.CompactTextString(m) }
  2044. func (*GetJobRequest) ProtoMessage() {}
  2045. func (*GetJobRequest) Descriptor() ([]byte, []int) {
  2046. return fileDescriptor_jobs_695072de36c8e540, []int{15}
  2047. }
  2048. func (m *GetJobRequest) XXX_Unmarshal(b []byte) error {
  2049. return xxx_messageInfo_GetJobRequest.Unmarshal(m, b)
  2050. }
  2051. func (m *GetJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2052. return xxx_messageInfo_GetJobRequest.Marshal(b, m, deterministic)
  2053. }
  2054. func (dst *GetJobRequest) XXX_Merge(src proto.Message) {
  2055. xxx_messageInfo_GetJobRequest.Merge(dst, src)
  2056. }
  2057. func (m *GetJobRequest) XXX_Size() int {
  2058. return xxx_messageInfo_GetJobRequest.Size(m)
  2059. }
  2060. func (m *GetJobRequest) XXX_DiscardUnknown() {
  2061. xxx_messageInfo_GetJobRequest.DiscardUnknown(m)
  2062. }
  2063. var xxx_messageInfo_GetJobRequest proto.InternalMessageInfo
  2064. func (m *GetJobRequest) GetProjectId() string {
  2065. if m != nil {
  2066. return m.ProjectId
  2067. }
  2068. return ""
  2069. }
  2070. func (m *GetJobRequest) GetRegion() string {
  2071. if m != nil {
  2072. return m.Region
  2073. }
  2074. return ""
  2075. }
  2076. func (m *GetJobRequest) GetJobId() string {
  2077. if m != nil {
  2078. return m.JobId
  2079. }
  2080. return ""
  2081. }
  2082. // A request to list jobs in a project.
  2083. type ListJobsRequest struct {
  2084. // Required. The ID of the Google Cloud Platform project that the job
  2085. // belongs to.
  2086. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  2087. // Required. The Cloud Dataproc region in which to handle the request.
  2088. Region string `protobuf:"bytes,6,opt,name=region,proto3" json:"region,omitempty"`
  2089. // Optional. The number of results to return in each response.
  2090. PageSize int32 `protobuf:"varint,2,opt,name=page_size,json=pageSize,proto3" json:"page_size,omitempty"`
  2091. // Optional. The page token, returned by a previous call, to request the
  2092. // next page of results.
  2093. PageToken string `protobuf:"bytes,3,opt,name=page_token,json=pageToken,proto3" json:"page_token,omitempty"`
  2094. // Optional. If set, the returned jobs list includes only jobs that were
  2095. // submitted to the named cluster.
  2096. ClusterName string `protobuf:"bytes,4,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
  2097. // Optional. Specifies enumerated categories of jobs to list.
  2098. // (default = match ALL jobs).
  2099. //
  2100. // If `filter` is provided, `jobStateMatcher` will be ignored.
  2101. JobStateMatcher ListJobsRequest_JobStateMatcher `protobuf:"varint,5,opt,name=job_state_matcher,json=jobStateMatcher,proto3,enum=google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher" json:"job_state_matcher,omitempty"`
  2102. // Optional. A filter constraining the jobs to list. Filters are
  2103. // case-sensitive and have the following syntax:
  2104. //
  2105. // [field = value] AND [field [= value]] ...
  2106. //
  2107. // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label
  2108. // key. **value** can be `*` to match all values.
  2109. // `status.state` can be either `ACTIVE` or `NON_ACTIVE`.
  2110. // Only the logical `AND` operator is supported; space-separated items are
  2111. // treated as having an implicit `AND` operator.
  2112. //
  2113. // Example filter:
  2114. //
  2115. // status.state = ACTIVE AND labels.env = staging AND labels.starred = *
  2116. Filter string `protobuf:"bytes,7,opt,name=filter,proto3" json:"filter,omitempty"`
  2117. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2118. XXX_unrecognized []byte `json:"-"`
  2119. XXX_sizecache int32 `json:"-"`
  2120. }
  2121. func (m *ListJobsRequest) Reset() { *m = ListJobsRequest{} }
  2122. func (m *ListJobsRequest) String() string { return proto.CompactTextString(m) }
  2123. func (*ListJobsRequest) ProtoMessage() {}
  2124. func (*ListJobsRequest) Descriptor() ([]byte, []int) {
  2125. return fileDescriptor_jobs_695072de36c8e540, []int{16}
  2126. }
  2127. func (m *ListJobsRequest) XXX_Unmarshal(b []byte) error {
  2128. return xxx_messageInfo_ListJobsRequest.Unmarshal(m, b)
  2129. }
  2130. func (m *ListJobsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2131. return xxx_messageInfo_ListJobsRequest.Marshal(b, m, deterministic)
  2132. }
  2133. func (dst *ListJobsRequest) XXX_Merge(src proto.Message) {
  2134. xxx_messageInfo_ListJobsRequest.Merge(dst, src)
  2135. }
  2136. func (m *ListJobsRequest) XXX_Size() int {
  2137. return xxx_messageInfo_ListJobsRequest.Size(m)
  2138. }
  2139. func (m *ListJobsRequest) XXX_DiscardUnknown() {
  2140. xxx_messageInfo_ListJobsRequest.DiscardUnknown(m)
  2141. }
  2142. var xxx_messageInfo_ListJobsRequest proto.InternalMessageInfo
  2143. func (m *ListJobsRequest) GetProjectId() string {
  2144. if m != nil {
  2145. return m.ProjectId
  2146. }
  2147. return ""
  2148. }
  2149. func (m *ListJobsRequest) GetRegion() string {
  2150. if m != nil {
  2151. return m.Region
  2152. }
  2153. return ""
  2154. }
  2155. func (m *ListJobsRequest) GetPageSize() int32 {
  2156. if m != nil {
  2157. return m.PageSize
  2158. }
  2159. return 0
  2160. }
  2161. func (m *ListJobsRequest) GetPageToken() string {
  2162. if m != nil {
  2163. return m.PageToken
  2164. }
  2165. return ""
  2166. }
  2167. func (m *ListJobsRequest) GetClusterName() string {
  2168. if m != nil {
  2169. return m.ClusterName
  2170. }
  2171. return ""
  2172. }
  2173. func (m *ListJobsRequest) GetJobStateMatcher() ListJobsRequest_JobStateMatcher {
  2174. if m != nil {
  2175. return m.JobStateMatcher
  2176. }
  2177. return ListJobsRequest_ALL
  2178. }
  2179. func (m *ListJobsRequest) GetFilter() string {
  2180. if m != nil {
  2181. return m.Filter
  2182. }
  2183. return ""
  2184. }
  2185. // A request to update a job.
  2186. type UpdateJobRequest struct {
  2187. // Required. The ID of the Google Cloud Platform project that the job
  2188. // belongs to.
  2189. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  2190. // Required. The Cloud Dataproc region in which to handle the request.
  2191. Region string `protobuf:"bytes,2,opt,name=region,proto3" json:"region,omitempty"`
  2192. // Required. The job ID.
  2193. JobId string `protobuf:"bytes,3,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
  2194. // Required. The changes to the job.
  2195. Job *Job `protobuf:"bytes,4,opt,name=job,proto3" json:"job,omitempty"`
  2196. // Required. Specifies the path, relative to <code>Job</code>, of
  2197. // the field to update. For example, to update the labels of a Job the
  2198. // <code>update_mask</code> parameter would be specified as
  2199. // <code>labels</code>, and the `PATCH` request body would specify the new
  2200. // value. <strong>Note:</strong> Currently, <code>labels</code> is the only
  2201. // field that can be updated.
  2202. UpdateMask *field_mask.FieldMask `protobuf:"bytes,5,opt,name=update_mask,json=updateMask,proto3" json:"update_mask,omitempty"`
  2203. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2204. XXX_unrecognized []byte `json:"-"`
  2205. XXX_sizecache int32 `json:"-"`
  2206. }
  2207. func (m *UpdateJobRequest) Reset() { *m = UpdateJobRequest{} }
  2208. func (m *UpdateJobRequest) String() string { return proto.CompactTextString(m) }
  2209. func (*UpdateJobRequest) ProtoMessage() {}
  2210. func (*UpdateJobRequest) Descriptor() ([]byte, []int) {
  2211. return fileDescriptor_jobs_695072de36c8e540, []int{17}
  2212. }
  2213. func (m *UpdateJobRequest) XXX_Unmarshal(b []byte) error {
  2214. return xxx_messageInfo_UpdateJobRequest.Unmarshal(m, b)
  2215. }
  2216. func (m *UpdateJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2217. return xxx_messageInfo_UpdateJobRequest.Marshal(b, m, deterministic)
  2218. }
  2219. func (dst *UpdateJobRequest) XXX_Merge(src proto.Message) {
  2220. xxx_messageInfo_UpdateJobRequest.Merge(dst, src)
  2221. }
  2222. func (m *UpdateJobRequest) XXX_Size() int {
  2223. return xxx_messageInfo_UpdateJobRequest.Size(m)
  2224. }
  2225. func (m *UpdateJobRequest) XXX_DiscardUnknown() {
  2226. xxx_messageInfo_UpdateJobRequest.DiscardUnknown(m)
  2227. }
  2228. var xxx_messageInfo_UpdateJobRequest proto.InternalMessageInfo
  2229. func (m *UpdateJobRequest) GetProjectId() string {
  2230. if m != nil {
  2231. return m.ProjectId
  2232. }
  2233. return ""
  2234. }
  2235. func (m *UpdateJobRequest) GetRegion() string {
  2236. if m != nil {
  2237. return m.Region
  2238. }
  2239. return ""
  2240. }
  2241. func (m *UpdateJobRequest) GetJobId() string {
  2242. if m != nil {
  2243. return m.JobId
  2244. }
  2245. return ""
  2246. }
  2247. func (m *UpdateJobRequest) GetJob() *Job {
  2248. if m != nil {
  2249. return m.Job
  2250. }
  2251. return nil
  2252. }
  2253. func (m *UpdateJobRequest) GetUpdateMask() *field_mask.FieldMask {
  2254. if m != nil {
  2255. return m.UpdateMask
  2256. }
  2257. return nil
  2258. }
  2259. // A list of jobs in a project.
  2260. type ListJobsResponse struct {
  2261. // Output only. Jobs list.
  2262. Jobs []*Job `protobuf:"bytes,1,rep,name=jobs,proto3" json:"jobs,omitempty"`
  2263. // Optional. This token is included in the response if there are more results
  2264. // to fetch. To fetch additional results, provide this value as the
  2265. // `page_token` in a subsequent <code>ListJobsRequest</code>.
  2266. NextPageToken string `protobuf:"bytes,2,opt,name=next_page_token,json=nextPageToken,proto3" json:"next_page_token,omitempty"`
  2267. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2268. XXX_unrecognized []byte `json:"-"`
  2269. XXX_sizecache int32 `json:"-"`
  2270. }
  2271. func (m *ListJobsResponse) Reset() { *m = ListJobsResponse{} }
  2272. func (m *ListJobsResponse) String() string { return proto.CompactTextString(m) }
  2273. func (*ListJobsResponse) ProtoMessage() {}
  2274. func (*ListJobsResponse) Descriptor() ([]byte, []int) {
  2275. return fileDescriptor_jobs_695072de36c8e540, []int{18}
  2276. }
  2277. func (m *ListJobsResponse) XXX_Unmarshal(b []byte) error {
  2278. return xxx_messageInfo_ListJobsResponse.Unmarshal(m, b)
  2279. }
  2280. func (m *ListJobsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2281. return xxx_messageInfo_ListJobsResponse.Marshal(b, m, deterministic)
  2282. }
  2283. func (dst *ListJobsResponse) XXX_Merge(src proto.Message) {
  2284. xxx_messageInfo_ListJobsResponse.Merge(dst, src)
  2285. }
  2286. func (m *ListJobsResponse) XXX_Size() int {
  2287. return xxx_messageInfo_ListJobsResponse.Size(m)
  2288. }
  2289. func (m *ListJobsResponse) XXX_DiscardUnknown() {
  2290. xxx_messageInfo_ListJobsResponse.DiscardUnknown(m)
  2291. }
  2292. var xxx_messageInfo_ListJobsResponse proto.InternalMessageInfo
  2293. func (m *ListJobsResponse) GetJobs() []*Job {
  2294. if m != nil {
  2295. return m.Jobs
  2296. }
  2297. return nil
  2298. }
  2299. func (m *ListJobsResponse) GetNextPageToken() string {
  2300. if m != nil {
  2301. return m.NextPageToken
  2302. }
  2303. return ""
  2304. }
  2305. // A request to cancel a job.
  2306. type CancelJobRequest struct {
  2307. // Required. The ID of the Google Cloud Platform project that the job
  2308. // belongs to.
  2309. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  2310. // Required. The Cloud Dataproc region in which to handle the request.
  2311. Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
  2312. // Required. The job ID.
  2313. JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
  2314. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2315. XXX_unrecognized []byte `json:"-"`
  2316. XXX_sizecache int32 `json:"-"`
  2317. }
  2318. func (m *CancelJobRequest) Reset() { *m = CancelJobRequest{} }
  2319. func (m *CancelJobRequest) String() string { return proto.CompactTextString(m) }
  2320. func (*CancelJobRequest) ProtoMessage() {}
  2321. func (*CancelJobRequest) Descriptor() ([]byte, []int) {
  2322. return fileDescriptor_jobs_695072de36c8e540, []int{19}
  2323. }
  2324. func (m *CancelJobRequest) XXX_Unmarshal(b []byte) error {
  2325. return xxx_messageInfo_CancelJobRequest.Unmarshal(m, b)
  2326. }
  2327. func (m *CancelJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2328. return xxx_messageInfo_CancelJobRequest.Marshal(b, m, deterministic)
  2329. }
  2330. func (dst *CancelJobRequest) XXX_Merge(src proto.Message) {
  2331. xxx_messageInfo_CancelJobRequest.Merge(dst, src)
  2332. }
  2333. func (m *CancelJobRequest) XXX_Size() int {
  2334. return xxx_messageInfo_CancelJobRequest.Size(m)
  2335. }
  2336. func (m *CancelJobRequest) XXX_DiscardUnknown() {
  2337. xxx_messageInfo_CancelJobRequest.DiscardUnknown(m)
  2338. }
  2339. var xxx_messageInfo_CancelJobRequest proto.InternalMessageInfo
  2340. func (m *CancelJobRequest) GetProjectId() string {
  2341. if m != nil {
  2342. return m.ProjectId
  2343. }
  2344. return ""
  2345. }
  2346. func (m *CancelJobRequest) GetRegion() string {
  2347. if m != nil {
  2348. return m.Region
  2349. }
  2350. return ""
  2351. }
  2352. func (m *CancelJobRequest) GetJobId() string {
  2353. if m != nil {
  2354. return m.JobId
  2355. }
  2356. return ""
  2357. }
  2358. // A request to delete a job.
  2359. type DeleteJobRequest struct {
  2360. // Required. The ID of the Google Cloud Platform project that the job
  2361. // belongs to.
  2362. ProjectId string `protobuf:"bytes,1,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
  2363. // Required. The Cloud Dataproc region in which to handle the request.
  2364. Region string `protobuf:"bytes,3,opt,name=region,proto3" json:"region,omitempty"`
  2365. // Required. The job ID.
  2366. JobId string `protobuf:"bytes,2,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"`
  2367. XXX_NoUnkeyedLiteral struct{} `json:"-"`
  2368. XXX_unrecognized []byte `json:"-"`
  2369. XXX_sizecache int32 `json:"-"`
  2370. }
  2371. func (m *DeleteJobRequest) Reset() { *m = DeleteJobRequest{} }
  2372. func (m *DeleteJobRequest) String() string { return proto.CompactTextString(m) }
  2373. func (*DeleteJobRequest) ProtoMessage() {}
  2374. func (*DeleteJobRequest) Descriptor() ([]byte, []int) {
  2375. return fileDescriptor_jobs_695072de36c8e540, []int{20}
  2376. }
  2377. func (m *DeleteJobRequest) XXX_Unmarshal(b []byte) error {
  2378. return xxx_messageInfo_DeleteJobRequest.Unmarshal(m, b)
  2379. }
  2380. func (m *DeleteJobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
  2381. return xxx_messageInfo_DeleteJobRequest.Marshal(b, m, deterministic)
  2382. }
  2383. func (dst *DeleteJobRequest) XXX_Merge(src proto.Message) {
  2384. xxx_messageInfo_DeleteJobRequest.Merge(dst, src)
  2385. }
  2386. func (m *DeleteJobRequest) XXX_Size() int {
  2387. return xxx_messageInfo_DeleteJobRequest.Size(m)
  2388. }
  2389. func (m *DeleteJobRequest) XXX_DiscardUnknown() {
  2390. xxx_messageInfo_DeleteJobRequest.DiscardUnknown(m)
  2391. }
  2392. var xxx_messageInfo_DeleteJobRequest proto.InternalMessageInfo
  2393. func (m *DeleteJobRequest) GetProjectId() string {
  2394. if m != nil {
  2395. return m.ProjectId
  2396. }
  2397. return ""
  2398. }
  2399. func (m *DeleteJobRequest) GetRegion() string {
  2400. if m != nil {
  2401. return m.Region
  2402. }
  2403. return ""
  2404. }
  2405. func (m *DeleteJobRequest) GetJobId() string {
  2406. if m != nil {
  2407. return m.JobId
  2408. }
  2409. return ""
  2410. }
  2411. func init() {
  2412. proto.RegisterType((*LoggingConfig)(nil), "google.cloud.dataproc.v1.LoggingConfig")
  2413. proto.RegisterMapType((map[string]LoggingConfig_Level)(nil), "google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry")
  2414. proto.RegisterType((*HadoopJob)(nil), "google.cloud.dataproc.v1.HadoopJob")
  2415. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HadoopJob.PropertiesEntry")
  2416. proto.RegisterType((*SparkJob)(nil), "google.cloud.dataproc.v1.SparkJob")
  2417. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkJob.PropertiesEntry")
  2418. proto.RegisterType((*PySparkJob)(nil), "google.cloud.dataproc.v1.PySparkJob")
  2419. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PySparkJob.PropertiesEntry")
  2420. proto.RegisterType((*QueryList)(nil), "google.cloud.dataproc.v1.QueryList")
  2421. proto.RegisterType((*HiveJob)(nil), "google.cloud.dataproc.v1.HiveJob")
  2422. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.PropertiesEntry")
  2423. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry")
  2424. proto.RegisterType((*SparkSqlJob)(nil), "google.cloud.dataproc.v1.SparkSqlJob")
  2425. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry")
  2426. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry")
  2427. proto.RegisterType((*PigJob)(nil), "google.cloud.dataproc.v1.PigJob")
  2428. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.PropertiesEntry")
  2429. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry")
  2430. proto.RegisterType((*JobPlacement)(nil), "google.cloud.dataproc.v1.JobPlacement")
  2431. proto.RegisterType((*JobStatus)(nil), "google.cloud.dataproc.v1.JobStatus")
  2432. proto.RegisterType((*JobReference)(nil), "google.cloud.dataproc.v1.JobReference")
  2433. proto.RegisterType((*YarnApplication)(nil), "google.cloud.dataproc.v1.YarnApplication")
  2434. proto.RegisterType((*Job)(nil), "google.cloud.dataproc.v1.Job")
  2435. proto.RegisterMapType((map[string]string)(nil), "google.cloud.dataproc.v1.Job.LabelsEntry")
  2436. proto.RegisterType((*JobScheduling)(nil), "google.cloud.dataproc.v1.JobScheduling")
  2437. proto.RegisterType((*SubmitJobRequest)(nil), "google.cloud.dataproc.v1.SubmitJobRequest")
  2438. proto.RegisterType((*GetJobRequest)(nil), "google.cloud.dataproc.v1.GetJobRequest")
  2439. proto.RegisterType((*ListJobsRequest)(nil), "google.cloud.dataproc.v1.ListJobsRequest")
  2440. proto.RegisterType((*UpdateJobRequest)(nil), "google.cloud.dataproc.v1.UpdateJobRequest")
  2441. proto.RegisterType((*ListJobsResponse)(nil), "google.cloud.dataproc.v1.ListJobsResponse")
  2442. proto.RegisterType((*CancelJobRequest)(nil), "google.cloud.dataproc.v1.CancelJobRequest")
  2443. proto.RegisterType((*DeleteJobRequest)(nil), "google.cloud.dataproc.v1.DeleteJobRequest")
  2444. proto.RegisterEnum("google.cloud.dataproc.v1.LoggingConfig_Level", LoggingConfig_Level_name, LoggingConfig_Level_value)
  2445. proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_State", JobStatus_State_name, JobStatus_State_value)
  2446. proto.RegisterEnum("google.cloud.dataproc.v1.JobStatus_Substate", JobStatus_Substate_name, JobStatus_Substate_value)
  2447. proto.RegisterEnum("google.cloud.dataproc.v1.YarnApplication_State", YarnApplication_State_name, YarnApplication_State_value)
  2448. proto.RegisterEnum("google.cloud.dataproc.v1.ListJobsRequest_JobStateMatcher", ListJobsRequest_JobStateMatcher_name, ListJobsRequest_JobStateMatcher_value)
  2449. }
  2450. // Reference imports to suppress errors if they are not otherwise used.
  2451. var _ context.Context
  2452. var _ grpc.ClientConn
  2453. // This is a compile-time assertion to ensure that this generated file
  2454. // is compatible with the grpc package it is being compiled against.
  2455. const _ = grpc.SupportPackageIsVersion4
  2456. // JobControllerClient is the client API for JobController service.
  2457. //
  2458. // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
  2459. type JobControllerClient interface {
  2460. // Submits a job to a cluster.
  2461. SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error)
  2462. // Gets the resource representation for a job in a project.
  2463. GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error)
  2464. // Lists regions/{region}/jobs in a project.
  2465. ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error)
  2466. // Updates a job in a project.
  2467. UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error)
  2468. // Starts a job cancellation request. To access the job resource
  2469. // after cancellation, call
  2470. // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
  2471. // or
  2472. // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
  2473. CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error)
  2474. // Deletes the job from the project. If the job is active, the delete fails,
  2475. // and the response returns `FAILED_PRECONDITION`.
  2476. DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error)
  2477. }
  2478. type jobControllerClient struct {
  2479. cc *grpc.ClientConn
  2480. }
  2481. func NewJobControllerClient(cc *grpc.ClientConn) JobControllerClient {
  2482. return &jobControllerClient{cc}
  2483. }
  2484. func (c *jobControllerClient) SubmitJob(ctx context.Context, in *SubmitJobRequest, opts ...grpc.CallOption) (*Job, error) {
  2485. out := new(Job)
  2486. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/SubmitJob", in, out, opts...)
  2487. if err != nil {
  2488. return nil, err
  2489. }
  2490. return out, nil
  2491. }
  2492. func (c *jobControllerClient) GetJob(ctx context.Context, in *GetJobRequest, opts ...grpc.CallOption) (*Job, error) {
  2493. out := new(Job)
  2494. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/GetJob", in, out, opts...)
  2495. if err != nil {
  2496. return nil, err
  2497. }
  2498. return out, nil
  2499. }
  2500. func (c *jobControllerClient) ListJobs(ctx context.Context, in *ListJobsRequest, opts ...grpc.CallOption) (*ListJobsResponse, error) {
  2501. out := new(ListJobsResponse)
  2502. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/ListJobs", in, out, opts...)
  2503. if err != nil {
  2504. return nil, err
  2505. }
  2506. return out, nil
  2507. }
  2508. func (c *jobControllerClient) UpdateJob(ctx context.Context, in *UpdateJobRequest, opts ...grpc.CallOption) (*Job, error) {
  2509. out := new(Job)
  2510. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/UpdateJob", in, out, opts...)
  2511. if err != nil {
  2512. return nil, err
  2513. }
  2514. return out, nil
  2515. }
  2516. func (c *jobControllerClient) CancelJob(ctx context.Context, in *CancelJobRequest, opts ...grpc.CallOption) (*Job, error) {
  2517. out := new(Job)
  2518. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/CancelJob", in, out, opts...)
  2519. if err != nil {
  2520. return nil, err
  2521. }
  2522. return out, nil
  2523. }
  2524. func (c *jobControllerClient) DeleteJob(ctx context.Context, in *DeleteJobRequest, opts ...grpc.CallOption) (*empty.Empty, error) {
  2525. out := new(empty.Empty)
  2526. err := c.cc.Invoke(ctx, "/google.cloud.dataproc.v1.JobController/DeleteJob", in, out, opts...)
  2527. if err != nil {
  2528. return nil, err
  2529. }
  2530. return out, nil
  2531. }
  2532. // JobControllerServer is the server API for JobController service.
  2533. type JobControllerServer interface {
  2534. // Submits a job to a cluster.
  2535. SubmitJob(context.Context, *SubmitJobRequest) (*Job, error)
  2536. // Gets the resource representation for a job in a project.
  2537. GetJob(context.Context, *GetJobRequest) (*Job, error)
  2538. // Lists regions/{region}/jobs in a project.
  2539. ListJobs(context.Context, *ListJobsRequest) (*ListJobsResponse, error)
  2540. // Updates a job in a project.
  2541. UpdateJob(context.Context, *UpdateJobRequest) (*Job, error)
  2542. // Starts a job cancellation request. To access the job resource
  2543. // after cancellation, call
  2544. // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
  2545. // or
  2546. // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
  2547. CancelJob(context.Context, *CancelJobRequest) (*Job, error)
  2548. // Deletes the job from the project. If the job is active, the delete fails,
  2549. // and the response returns `FAILED_PRECONDITION`.
  2550. DeleteJob(context.Context, *DeleteJobRequest) (*empty.Empty, error)
  2551. }
  2552. func RegisterJobControllerServer(s *grpc.Server, srv JobControllerServer) {
  2553. s.RegisterService(&_JobController_serviceDesc, srv)
  2554. }
  2555. func _JobController_SubmitJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2556. in := new(SubmitJobRequest)
  2557. if err := dec(in); err != nil {
  2558. return nil, err
  2559. }
  2560. if interceptor == nil {
  2561. return srv.(JobControllerServer).SubmitJob(ctx, in)
  2562. }
  2563. info := &grpc.UnaryServerInfo{
  2564. Server: srv,
  2565. FullMethod: "/google.cloud.dataproc.v1.JobController/SubmitJob",
  2566. }
  2567. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2568. return srv.(JobControllerServer).SubmitJob(ctx, req.(*SubmitJobRequest))
  2569. }
  2570. return interceptor(ctx, in, info, handler)
  2571. }
  2572. func _JobController_GetJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2573. in := new(GetJobRequest)
  2574. if err := dec(in); err != nil {
  2575. return nil, err
  2576. }
  2577. if interceptor == nil {
  2578. return srv.(JobControllerServer).GetJob(ctx, in)
  2579. }
  2580. info := &grpc.UnaryServerInfo{
  2581. Server: srv,
  2582. FullMethod: "/google.cloud.dataproc.v1.JobController/GetJob",
  2583. }
  2584. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2585. return srv.(JobControllerServer).GetJob(ctx, req.(*GetJobRequest))
  2586. }
  2587. return interceptor(ctx, in, info, handler)
  2588. }
  2589. func _JobController_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2590. in := new(ListJobsRequest)
  2591. if err := dec(in); err != nil {
  2592. return nil, err
  2593. }
  2594. if interceptor == nil {
  2595. return srv.(JobControllerServer).ListJobs(ctx, in)
  2596. }
  2597. info := &grpc.UnaryServerInfo{
  2598. Server: srv,
  2599. FullMethod: "/google.cloud.dataproc.v1.JobController/ListJobs",
  2600. }
  2601. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2602. return srv.(JobControllerServer).ListJobs(ctx, req.(*ListJobsRequest))
  2603. }
  2604. return interceptor(ctx, in, info, handler)
  2605. }
  2606. func _JobController_UpdateJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2607. in := new(UpdateJobRequest)
  2608. if err := dec(in); err != nil {
  2609. return nil, err
  2610. }
  2611. if interceptor == nil {
  2612. return srv.(JobControllerServer).UpdateJob(ctx, in)
  2613. }
  2614. info := &grpc.UnaryServerInfo{
  2615. Server: srv,
  2616. FullMethod: "/google.cloud.dataproc.v1.JobController/UpdateJob",
  2617. }
  2618. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2619. return srv.(JobControllerServer).UpdateJob(ctx, req.(*UpdateJobRequest))
  2620. }
  2621. return interceptor(ctx, in, info, handler)
  2622. }
  2623. func _JobController_CancelJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2624. in := new(CancelJobRequest)
  2625. if err := dec(in); err != nil {
  2626. return nil, err
  2627. }
  2628. if interceptor == nil {
  2629. return srv.(JobControllerServer).CancelJob(ctx, in)
  2630. }
  2631. info := &grpc.UnaryServerInfo{
  2632. Server: srv,
  2633. FullMethod: "/google.cloud.dataproc.v1.JobController/CancelJob",
  2634. }
  2635. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2636. return srv.(JobControllerServer).CancelJob(ctx, req.(*CancelJobRequest))
  2637. }
  2638. return interceptor(ctx, in, info, handler)
  2639. }
  2640. func _JobController_DeleteJob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
  2641. in := new(DeleteJobRequest)
  2642. if err := dec(in); err != nil {
  2643. return nil, err
  2644. }
  2645. if interceptor == nil {
  2646. return srv.(JobControllerServer).DeleteJob(ctx, in)
  2647. }
  2648. info := &grpc.UnaryServerInfo{
  2649. Server: srv,
  2650. FullMethod: "/google.cloud.dataproc.v1.JobController/DeleteJob",
  2651. }
  2652. handler := func(ctx context.Context, req interface{}) (interface{}, error) {
  2653. return srv.(JobControllerServer).DeleteJob(ctx, req.(*DeleteJobRequest))
  2654. }
  2655. return interceptor(ctx, in, info, handler)
  2656. }
  2657. var _JobController_serviceDesc = grpc.ServiceDesc{
  2658. ServiceName: "google.cloud.dataproc.v1.JobController",
  2659. HandlerType: (*JobControllerServer)(nil),
  2660. Methods: []grpc.MethodDesc{
  2661. {
  2662. MethodName: "SubmitJob",
  2663. Handler: _JobController_SubmitJob_Handler,
  2664. },
  2665. {
  2666. MethodName: "GetJob",
  2667. Handler: _JobController_GetJob_Handler,
  2668. },
  2669. {
  2670. MethodName: "ListJobs",
  2671. Handler: _JobController_ListJobs_Handler,
  2672. },
  2673. {
  2674. MethodName: "UpdateJob",
  2675. Handler: _JobController_UpdateJob_Handler,
  2676. },
  2677. {
  2678. MethodName: "CancelJob",
  2679. Handler: _JobController_CancelJob_Handler,
  2680. },
  2681. {
  2682. MethodName: "DeleteJob",
  2683. Handler: _JobController_DeleteJob_Handler,
  2684. },
  2685. },
  2686. Streams: []grpc.StreamDesc{},
  2687. Metadata: "google/cloud/dataproc/v1/jobs.proto",
  2688. }
  2689. func init() {
  2690. proto.RegisterFile("google/cloud/dataproc/v1/jobs.proto", fileDescriptor_jobs_695072de36c8e540)
  2691. }
  2692. var fileDescriptor_jobs_695072de36c8e540 = []byte{
  2693. // 2320 bytes of a gzipped FileDescriptorProto
  2694. 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x59, 0xcf, 0x73, 0x23, 0x47,
  2695. 0xf5, 0xb7, 0x7e, 0x6b, 0x9e, 0x6c, 0x79, 0xdc, 0xd9, 0xcd, 0x57, 0x5f, 0x25, 0xa9, 0x38, 0xb3,
  2696. 0x64, 0xf1, 0x2e, 0x20, 0x61, 0x05, 0x36, 0x1b, 0x1b, 0xd8, 0xc8, 0xd2, 0x78, 0x25, 0x47, 0x2b,
  2697. 0x6b, 0x47, 0xd2, 0x6e, 0x41, 0x15, 0x35, 0x3b, 0x92, 0xda, 0xf2, 0xd8, 0xa3, 0x99, 0xf1, 0xf4,
  2698. 0x8c, 0x6b, 0x95, 0xad, 0xbd, 0x70, 0xe1, 0x48, 0x01, 0xa7, 0x50, 0xc5, 0x85, 0x1b, 0x7f, 0x00,
  2699. 0x5c, 0x28, 0x8a, 0x0b, 0x67, 0x2e, 0x5c, 0xa9, 0x9c, 0x38, 0x72, 0xe2, 0x2f, 0xa0, 0xba, 0x7b,
  2700. 0x46, 0x96, 0x64, 0xeb, 0x87, 0x77, 0x21, 0x95, 0xe4, 0xe4, 0x9e, 0x7e, 0x3f, 0xfa, 0x75, 0x7f,
  2701. 0x3e, 0xfd, 0xde, 0x6b, 0x19, 0x6e, 0xf5, 0x2d, 0xab, 0x6f, 0xe0, 0x7c, 0xd7, 0xb0, 0xbc, 0x5e,
  2702. 0xbe, 0xa7, 0xb9, 0x9a, 0xed, 0x58, 0xdd, 0xfc, 0xf9, 0x76, 0xfe, 0xc4, 0xea, 0x90, 0x9c, 0xed,
  2703. 0x58, 0xae, 0x85, 0x32, 0x5c, 0x29, 0xc7, 0x94, 0x72, 0x81, 0x52, 0xee, 0x7c, 0x3b, 0xfb, 0xb6,
  2704. 0x6f, 0xae, 0xd9, 0x7a, 0x5e, 0x33, 0x4d, 0xcb, 0xd5, 0x5c, 0xdd, 0x32, 0x7d, 0xbb, 0xec, 0x5b,
  2705. 0xbe, 0x94, 0x7d, 0x75, 0xbc, 0xa3, 0x3c, 0x1e, 0xd8, 0xee, 0xd0, 0x17, 0x6e, 0x4e, 0x0b, 0x8f,
  2706. 0x74, 0x6c, 0xf4, 0xd4, 0x81, 0x46, 0x4e, 0x7d, 0x8d, 0x77, 0xa7, 0x35, 0x5c, 0x7d, 0x80, 0x89,
  2707. 0xab, 0x0d, 0x6c, 0xae, 0x20, 0x7d, 0x1e, 0x86, 0xb5, 0x9a, 0xd5, 0xef, 0xeb, 0x66, 0xbf, 0x64,
  2708. 0x99, 0x47, 0x7a, 0x1f, 0x1d, 0xc3, 0x46, 0xcf, 0xd1, 0xcf, 0xb1, 0xa3, 0x1a, 0x56, 0x5f, 0x35,
  2709. 0xf0, 0x39, 0x36, 0x48, 0x26, 0xbc, 0x19, 0xd9, 0x4a, 0x15, 0x7e, 0x90, 0x9b, 0xb5, 0x8b, 0xdc,
  2710. 0x84, 0x8f, 0x5c, 0x99, 0x39, 0xa8, 0x59, 0xfd, 0x1a, 0x33, 0x97, 0x4d, 0xd7, 0x19, 0x2a, 0xeb,
  2711. 0xbd, 0xc9, 0xd9, 0xec, 0x19, 0xdc, 0xb8, 0x4a, 0x11, 0x89, 0x10, 0x39, 0xc5, 0xc3, 0x4c, 0x68,
  2712. 0x33, 0xb4, 0x25, 0x28, 0x74, 0x88, 0x4a, 0x10, 0x3b, 0xd7, 0x0c, 0x0f, 0x67, 0xc2, 0x9b, 0xa1,
  2713. 0xad, 0x74, 0xe1, 0x3b, 0xcb, 0xc6, 0xc1, 0xbc, 0x2a, 0xdc, 0x76, 0x27, 0x7c, 0x3f, 0x24, 0xd9,
  2714. 0x10, 0x63, 0x73, 0xe8, 0x26, 0x6c, 0xd4, 0xe4, 0x27, 0x72, 0x4d, 0x6d, 0xd7, 0x9b, 0x0d, 0xb9,
  2715. 0x54, 0xdd, 0xaf, 0xca, 0x65, 0x71, 0x05, 0x25, 0x20, 0x52, 0xac, 0xd5, 0xc4, 0x10, 0x12, 0x20,
  2716. 0xd6, 0x52, 0x8a, 0x25, 0x59, 0x0c, 0xd3, 0x61, 0x59, 0xde, 0x6b, 0x3f, 0x14, 0x23, 0x28, 0x09,
  2717. 0xd1, 0x6a, 0x7d, 0xff, 0x50, 0x8c, 0xd2, 0xd1, 0xd3, 0xa2, 0x52, 0x17, 0x63, 0x54, 0x2c, 0x2b,
  2718. 0xca, 0xa1, 0x22, 0xc6, 0xe9, 0x70, 0xbf, 0xd8, 0x2a, 0xd6, 0xc4, 0x04, 0x75, 0x74, 0xb8, 0xbf,
  2719. 0x2f, 0x26, 0xa5, 0xbf, 0x44, 0x40, 0xa8, 0x68, 0x3d, 0xcb, 0xb2, 0x0f, 0xac, 0x0e, 0xfa, 0x16,
  2720. 0x6c, 0x0c, 0x34, 0xdd, 0x54, 0x4f, 0x34, 0x47, 0x3d, 0xd2, 0x0d, 0xac, 0x7a, 0x8e, 0xce, 0x37,
  2721. 0x5a, 0x59, 0x51, 0xd2, 0x54, 0x74, 0xa0, 0x39, 0xfb, 0xba, 0x81, 0xdb, 0x8e, 0x8e, 0xde, 0x05,
  2722. 0x60, 0xca, 0x5d, 0x43, 0x23, 0x84, 0x6d, 0x9d, 0x6a, 0x09, 0x74, 0xae, 0x44, 0xa7, 0x10, 0x82,
  2723. 0xa8, 0xe6, 0xf4, 0x49, 0x26, 0xb2, 0x19, 0xd9, 0x12, 0x14, 0x36, 0x46, 0x12, 0xac, 0x8d, 0x3b,
  2724. 0x27, 0x99, 0x28, 0x13, 0xa6, 0x4e, 0x46, 0x7e, 0x09, 0x7a, 0x0b, 0x84, 0x0b, 0x79, 0x8c, 0xc9,
  2725. 0x93, 0x47, 0x81, 0xf0, 0x3d, 0x58, 0xd5, 0x9c, 0xee, 0xb1, 0x7e, 0xee, 0xcb, 0xe3, 0xdc, 0xde,
  2726. 0x9f, 0x63, 0x2a, 0x4d, 0x00, 0xdb, 0xb1, 0x6c, 0xec, 0xb8, 0x3a, 0x26, 0x99, 0x04, 0xe3, 0xc6,
  2727. 0x07, 0xb3, 0x31, 0x19, 0x6d, 0x3f, 0xd7, 0x18, 0x59, 0x71, 0x4a, 0x8c, 0xb9, 0x41, 0x75, 0x48,
  2728. 0x1b, 0x1c, 0x3c, 0xb5, 0xcb, 0xd0, 0xcb, 0x24, 0x37, 0x43, 0x5b, 0xa9, 0xc2, 0x37, 0x97, 0x04,
  2729. 0x5b, 0x59, 0x33, 0xc6, 0x3f, 0xb3, 0x3f, 0x84, 0xf5, 0xa9, 0xe5, 0xae, 0x20, 0xd6, 0x8d, 0x71,
  2730. 0x62, 0x09, 0x63, 0x4c, 0xd9, 0x4b, 0x42, 0x9c, 0xf3, 0x55, 0xfa, 0x73, 0x04, 0x92, 0x4d, 0x5b,
  2731. 0x73, 0x4e, 0xbf, 0x3e, 0x00, 0x2a, 0x57, 0x00, 0x58, 0x98, 0x7d, 0xce, 0xc1, 0xee, 0xbf, 0x9a,
  2732. 0xf8, 0xfd, 0x35, 0x02, 0xd0, 0x18, 0x8e, 0x10, 0xcc, 0xc3, 0x0d, 0x06, 0x8a, 0x3d, 0x74, 0x8f,
  2733. 0x2d, 0x73, 0x0a, 0x44, 0x85, 0xa1, 0xdb, 0x60, 0xa2, 0x00, 0xc5, 0x00, 0xa4, 0xf0, 0x18, 0x48,
  2734. 0x5b, 0x20, 0x4e, 0xd9, 0x07, 0x20, 0xa6, 0xed, 0x71, 0xe3, 0x2f, 0x06, 0xce, 0xd6, 0x15, 0x70,
  2735. 0x7e, 0x6f, 0xf6, 0xb1, 0x5f, 0x1c, 0xc6, 0x57, 0x08, 0x50, 0xe9, 0x7d, 0x10, 0x1e, 0x7b, 0xd8,
  2736. 0x19, 0xd6, 0x74, 0xe2, 0xa2, 0x0c, 0x24, 0xce, 0x3c, 0xec, 0xd0, 0xed, 0x86, 0xd8, 0x79, 0x04,
  2737. 0x9f, 0xd2, 0x2f, 0xa2, 0x90, 0xa8, 0xe8, 0xe7, 0x98, 0x42, 0x7d, 0x1b, 0xd2, 0x74, 0x7a, 0x78,
  2738. 0xf9, 0xa6, 0xae, 0xb2, 0xf9, 0x00, 0xe1, 0x32, 0x00, 0xd7, 0x33, 0x74, 0xe2, 0xb2, 0x95, 0x53,
  2739. 0x85, 0x5b, 0xb3, 0x77, 0x39, 0x0a, 0x83, 0x5e, 0xe6, 0xb3, 0x51, 0x4c, 0x39, 0x78, 0xa3, 0x6b,
  2740. 0x99, 0xae, 0x6e, 0x7a, 0x58, 0xa5, 0xc4, 0xd0, 0x74, 0xc3, 0x73, 0x70, 0x26, 0xb2, 0x19, 0xda,
  2741. 0x4a, 0x2a, 0x1b, 0x81, 0xe8, 0xd0, 0xdc, 0xe7, 0x02, 0xa4, 0x81, 0x48, 0xba, 0x8e, 0x6e, 0xbb,
  2742. 0xea, 0xb9, 0xe6, 0xe8, 0x5a, 0xc7, 0xc0, 0x9c, 0x1c, 0xa9, 0xc2, 0xbd, 0x39, 0xb9, 0x94, 0x6f,
  2743. 0x2d, 0xd7, 0x64, 0x96, 0x4f, 0x02, 0x43, 0xbf, 0xc2, 0x92, 0xc9, 0x59, 0xf4, 0x78, 0x82, 0x18,
  2744. 0x31, 0xe6, 0x7c, 0x7b, 0xb1, 0xf3, 0x79, 0xac, 0xb8, 0xc4, 0xe7, 0xf8, 0x25, 0x3e, 0x67, 0xf7,
  2745. 0xe0, 0xc6, 0x55, 0xf1, 0x5d, 0x07, 0xee, 0xd7, 0xbd, 0xfe, 0xc2, 0x88, 0x20, 0xd2, 0x9f, 0xa2,
  2746. 0x90, 0x62, 0x84, 0x6f, 0x9e, 0x19, 0x5f, 0x3c, 0x2b, 0xf0, 0x15, 0x28, 0x47, 0x18, 0x10, 0x3b,
  2747. 0x0b, 0x12, 0x2e, 0x0f, 0x77, 0x49, 0xa4, 0xdb, 0x13, 0x48, 0x73, 0x1a, 0x7d, 0x7f, 0xb9, 0x05,
  2748. 0xae, 0x85, 0xf6, 0xfd, 0xcb, 0xd9, 0xeb, 0x72, 0x9e, 0x88, 0xbf, 0x56, 0x9e, 0xf8, 0x72, 0xb1,
  2749. 0xe7, 0x1f, 0x51, 0x88, 0x37, 0xf4, 0xfe, 0x97, 0x3f, 0x9d, 0x3c, 0x9b, 0x99, 0x4e, 0xe6, 0xf0,
  2750. 0x80, 0xef, 0x6c, 0x49, 0x8e, 0x35, 0xae, 0xc8, 0x26, 0xdf, 0x5d, 0xe8, 0xfb, 0x35, 0x93, 0xc9,
  2751. 0x15, 0xf4, 0x4a, 0x7c, 0x8d, 0xe8, 0xd5, 0x82, 0xd5, 0x03, 0xab, 0xd3, 0x30, 0xb4, 0x2e, 0x1e,
  2752. 0x60, 0xd3, 0xa5, 0xd5, 0xbe, 0x6b, 0x78, 0xc4, 0xc5, 0x8e, 0x6a, 0x6a, 0x03, 0xec, 0xfb, 0x4b,
  2753. 0xf9, 0x73, 0x75, 0x6d, 0x80, 0xc7, 0x55, 0x3c, 0x4f, 0xef, 0xf9, 0xee, 0x03, 0x95, 0xb6, 0xa7,
  2754. 0xf7, 0xa4, 0x7f, 0x45, 0x40, 0x38, 0xb0, 0x3a, 0x4d, 0x57, 0x73, 0x3d, 0x82, 0x1e, 0x40, 0x8c,
  2755. 0xb8, 0x9a, 0xcb, 0x9d, 0xa5, 0x0b, 0x77, 0x66, 0x1f, 0xdc, 0xc8, 0x26, 0x47, 0xff, 0x60, 0x85,
  2756. 0xdb, 0xd1, 0x6a, 0xdb, 0xc3, 0xae, 0xa6, 0x1b, 0x7e, 0x13, 0xab, 0x04, 0x9f, 0xa8, 0x0c, 0x22,
  2757. 0x53, 0x51, 0x89, 0xab, 0x39, 0xae, 0x4a, 0x5f, 0x97, 0xfe, 0xed, 0xcf, 0x06, 0xab, 0x04, 0x4f,
  2758. 0xcf, 0x5c, 0x2b, 0x78, 0x7a, 0x2a, 0x69, 0x66, 0xd3, 0xa4, 0x26, 0x74, 0x12, 0x55, 0x20, 0x49,
  2759. 0xbc, 0x0e, 0x8f, 0x31, 0xc1, 0x62, 0xfc, 0xf6, 0x52, 0x31, 0xfa, 0x36, 0xca, 0xc8, 0x5a, 0xfa,
  2760. 0x7d, 0x08, 0x62, 0x2c, 0x74, 0xfa, 0xc0, 0x6b, 0xb6, 0x8a, 0x2d, 0x79, 0xea, 0x81, 0x97, 0x82,
  2761. 0x44, 0x43, 0xae, 0x97, 0xab, 0xf5, 0x87, 0x62, 0x08, 0xa5, 0x01, 0x9a, 0x72, 0xab, 0xdd, 0x50,
  2762. 0xcb, 0x87, 0x75, 0x59, 0x4c, 0x52, 0xa1, 0xd2, 0xae, 0xd7, 0xa9, 0x30, 0x8c, 0x10, 0xa4, 0x4b,
  2763. 0xc5, 0x7a, 0x49, 0xae, 0xa9, 0x81, 0x41, 0x64, 0x6c, 0xae, 0xd9, 0x2a, 0x2a, 0x2d, 0xb9, 0x2c,
  2764. 0x26, 0xd0, 0x1a, 0x08, 0x7c, 0xae, 0x26, 0x97, 0xf9, 0xc3, 0x90, 0x79, 0x9b, 0x78, 0x18, 0xbe,
  2765. 0x01, 0xeb, 0xc5, 0x56, 0x4b, 0x7e, 0xd4, 0x68, 0xa9, 0xfb, 0xc5, 0x6a, 0xad, 0xad, 0xc8, 0xa2,
  2766. 0x20, 0x55, 0x20, 0x19, 0xec, 0x00, 0xad, 0x43, 0x6a, 0x32, 0xce, 0x35, 0x10, 0x9a, 0xed, 0xbd,
  2767. 0x47, 0xd5, 0x16, 0x5d, 0x24, 0x84, 0x00, 0xe2, 0x8f, 0xdb, 0x72, 0x5b, 0x2e, 0x8b, 0x61, 0x24,
  2768. 0xc2, 0x6a, 0xb3, 0x55, 0xac, 0xc9, 0x34, 0x86, 0x56, 0xbb, 0x29, 0x46, 0xa4, 0x32, 0x23, 0x91,
  2769. 0x82, 0x8f, 0xb0, 0x83, 0xcd, 0x2e, 0x46, 0xef, 0xb0, 0x8b, 0x7a, 0x82, 0xbb, 0xae, 0xaa, 0xf7,
  2770. 0x7c, 0x0a, 0x09, 0xfe, 0x4c, 0xb5, 0x87, 0x6e, 0x42, 0xfc, 0xc4, 0xea, 0xa8, 0x23, 0xea, 0xc4,
  2771. 0x4e, 0xac, 0x4e, 0xb5, 0x27, 0xfd, 0x21, 0x0c, 0xeb, 0x3f, 0xd6, 0x1c, 0xb3, 0x68, 0xdb, 0x86,
  2772. 0xde, 0x65, 0xbf, 0x42, 0xd0, 0xde, 0x77, 0x8c, 0x86, 0x6c, 0x8c, 0xe4, 0x80, 0x4e, 0xfc, 0x31,
  2773. 0x9e, 0x9f, 0x0d, 0xd5, 0x94, 0xb7, 0x49, 0x52, 0x65, 0x21, 0x69, 0x3b, 0x56, 0xdf, 0xc1, 0x84,
  2774. 0xb0, 0xa4, 0x16, 0x56, 0x46, 0xdf, 0x94, 0xe2, 0xae, 0xa3, 0x75, 0x4f, 0xe9, 0xa5, 0xf7, 0x1c,
  2775. 0x23, 0x13, 0xe5, 0x14, 0x0f, 0xe6, 0xda, 0x8e, 0x21, 0xfd, 0x7c, 0x11, 0xd2, 0x09, 0x88, 0xd4,
  2776. 0xe5, 0xa7, 0x1c, 0xe5, 0xba, 0xfc, 0x54, 0x6d, 0x16, 0x9f, 0x70, 0x60, 0x27, 0x8e, 0x36, 0x82,
  2777. 0x56, 0x21, 0x59, 0x2c, 0x95, 0xe4, 0x46, 0x8b, 0xc1, 0x37, 0x46, 0x81, 0x18, 0x15, 0xed, 0x57,
  2778. 0xeb, 0xd5, 0x66, 0x45, 0x2e, 0x8b, 0x71, 0x8a, 0x01, 0x05, 0x8f, 0x81, 0x0e, 0x10, 0xff, 0xa4,
  2779. 0xca, 0x10, 0x4f, 0x4a, 0xff, 0x4e, 0x42, 0x84, 0x96, 0x87, 0x32, 0x08, 0x4e, 0x00, 0x01, 0x3b,
  2780. 0xb0, 0x54, 0xe1, 0xf6, 0x5c, 0x1a, 0x8f, 0x00, 0x53, 0x2e, 0x0c, 0xa9, 0x17, 0x3b, 0xc8, 0x06,
  2781. 0x7e, 0xed, 0x98, 0xef, 0x65, 0x94, 0x3b, 0x94, 0x0b, 0x43, 0x5a, 0x82, 0x8e, 0xd9, 0xab, 0x5b,
  2782. 0x3d, 0xb1, 0x3a, 0xec, 0x78, 0xe7, 0x96, 0xa0, 0xd1, 0x0b, 0x9d, 0x96, 0xa0, 0xe3, 0xd1, 0xaf,
  2783. 0x15, 0x45, 0x10, 0x08, 0x6d, 0x14, 0x98, 0x93, 0x28, 0x73, 0x22, 0x2d, 0x7e, 0x25, 0x56, 0x56,
  2784. 0x94, 0x24, 0x09, 0x5e, 0x5b, 0x0f, 0x21, 0x65, 0x0f, 0x2f, 0x9c, 0xc4, 0x98, 0x93, 0x6f, 0x2c,
  2785. 0xf3, 0x36, 0xa9, 0xac, 0x28, 0xe0, 0x9b, 0x52, 0x47, 0x3f, 0x82, 0x24, 0x7b, 0x03, 0x51, 0x2f,
  2786. 0x3c, 0xc3, 0xbc, 0xb7, 0xb0, 0x91, 0xad, 0xac, 0x28, 0x89, 0x63, 0xff, 0x2d, 0xb0, 0x0b, 0x09,
  2787. 0x5b, 0xef, 0x33, 0x73, 0x5e, 0x3f, 0x36, 0x17, 0x55, 0xae, 0xca, 0x8a, 0x12, 0xb7, 0x79, 0xe5,
  2788. 0xff, 0x04, 0xd6, 0xf8, 0x1e, 0xc8, 0x99, 0xc1, 0x5c, 0xac, 0x32, 0x17, 0xef, 0x2f, 0xd5, 0x60,
  2789. 0x55, 0x56, 0x94, 0x14, 0x19, 0xeb, 0x3f, 0x77, 0x21, 0x4e, 0x58, 0x02, 0xf3, 0xdf, 0x53, 0xb7,
  2790. 0x96, 0xc8, 0x75, 0x8a, 0x6f, 0x82, 0x0e, 0x20, 0xcd, 0x47, 0xea, 0xb1, 0x4e, 0x5c, 0xcb, 0x19,
  2791. 0x66, 0xd6, 0x58, 0x1d, 0x5e, 0xca, 0xc9, 0x1a, 0x37, 0xad, 0x70, 0x4b, 0xf4, 0x04, 0x36, 0x86,
  2792. 0x9a, 0x63, 0xaa, 0xda, 0xc5, 0x15, 0x25, 0x19, 0x81, 0xb9, 0xbb, 0xb3, 0xf4, 0xa5, 0x56, 0xc4,
  2793. 0xe1, 0xe4, 0x04, 0x41, 0xbb, 0x90, 0xf5, 0x7f, 0x41, 0xb4, 0x3c, 0xd7, 0xf6, 0x5c, 0xd5, 0xc1,
  2794. 0xc4, 0xf2, 0x9c, 0x2e, 0xef, 0x99, 0x36, 0xd8, 0x5d, 0xfe, 0x3f, 0xae, 0x71, 0xc8, 0x14, 0x14,
  2795. 0x5f, 0x4e, 0x9b, 0xa7, 0x0f, 0x21, 0xe3, 0x1b, 0xd3, 0x16, 0xc7, 0xb1, 0x0c, 0xd6, 0x1d, 0x10,
  2796. 0x66, 0xba, 0xce, 0x4c, 0x6f, 0x72, 0x79, 0x89, 0x8b, 0x69, 0x9f, 0x40, 0xa8, 0x61, 0x11, 0xe2,
  2797. 0x86, 0xd6, 0xc1, 0x06, 0xc9, 0xa0, 0x45, 0x5b, 0xa0, 0x6d, 0x49, 0x8d, 0xe9, 0xf2, 0x96, 0xc4,
  2798. 0x37, 0x44, 0x0f, 0x01, 0x48, 0xf7, 0x18, 0xf7, 0x3c, 0x43, 0x37, 0xfb, 0x99, 0x1b, 0x8b, 0xda,
  2799. 0x0c, 0x7a, 0xb0, 0x23, 0x75, 0x65, 0xcc, 0x14, 0xfd, 0x3f, 0x24, 0x69, 0x86, 0x65, 0xe5, 0xf9,
  2800. 0x4d, 0x5e, 0x31, 0x4f, 0xac, 0x0e, 0x2d, 0xcd, 0xd9, 0x8f, 0x20, 0x35, 0xb6, 0xf4, 0xb5, 0xda,
  2801. 0x06, 0x80, 0xa4, 0x3b, 0xb4, 0xd9, 0x15, 0x90, 0xf6, 0x60, 0x6d, 0x62, 0x79, 0xb4, 0x0d, 0x37,
  2802. 0x07, 0xda, 0xf3, 0xa0, 0x4d, 0x24, 0xaa, 0x8d, 0x1d, 0xf5, 0xd8, 0xf2, 0x1c, 0xe6, 0x3a, 0xa6,
  2803. 0xa0, 0x81, 0xf6, 0xdc, 0xef, 0x14, 0x49, 0x03, 0x3b, 0x15, 0xcb, 0x73, 0xa4, 0xcf, 0x42, 0x20,
  2804. 0x36, 0xbd, 0xce, 0x40, 0x77, 0x59, 0x32, 0x3a, 0xf3, 0x30, 0x71, 0x17, 0xd5, 0x8e, 0x37, 0x21,
  2805. 0xee, 0xe0, 0xbe, 0x6e, 0x99, 0x2c, 0xa9, 0x08, 0x8a, 0xff, 0x85, 0xf2, 0x10, 0xa1, 0xf7, 0x82,
  2806. 0x27, 0xac, 0x77, 0xe6, 0xa7, 0x3d, 0xaa, 0x49, 0xd7, 0x71, 0xf8, 0x92, 0x74, 0x1d, 0x9e, 0xe0,
  2807. 0x05, 0x7f, 0xa6, 0xda, 0x93, 0x7e, 0x0a, 0x6b, 0x0f, 0xf1, 0x7f, 0x21, 0xae, 0x19, 0xb5, 0xee,
  2808. 0xf3, 0x30, 0xac, 0xd3, 0x2e, 0xfb, 0xc0, 0xea, 0x90, 0x6b, 0xaf, 0x10, 0x9f, 0x58, 0xe1, 0x2d,
  2809. 0x10, 0x6c, 0xad, 0x8f, 0x55, 0xa2, 0x7f, 0xca, 0x31, 0x8b, 0x29, 0x49, 0x3a, 0xd1, 0xd4, 0x3f,
  2810. 0xe5, 0x95, 0x98, 0x0a, 0x5d, 0xeb, 0x14, 0x07, 0xa1, 0x31, 0xf5, 0x16, 0x9d, 0xb8, 0xd4, 0xed,
  2811. 0x45, 0x2f, 0x77, 0x7b, 0x18, 0x36, 0xe8, 0x06, 0x78, 0x97, 0x35, 0xd0, 0xdc, 0xee, 0x31, 0x76,
  2812. 0x58, 0x1a, 0x4d, 0x17, 0x3e, 0x9a, 0xd3, 0x01, 0x4f, 0xee, 0x2d, 0xc8, 0x01, 0xf8, 0x11, 0x77,
  2813. 0xa0, 0xac, 0x9f, 0x4c, 0x4e, 0xd0, 0xdd, 0x1d, 0xe9, 0x86, 0x8b, 0x1d, 0x96, 0x1d, 0x05, 0xc5,
  2814. 0xff, 0x92, 0xee, 0xc1, 0xfa, 0x94, 0x6d, 0xf0, 0x1b, 0xf9, 0x0a, 0x2d, 0x82, 0xc5, 0x52, 0xab,
  2815. 0xfa, 0x44, 0xf6, 0x8b, 0xec, 0x61, 0x5d, 0xf5, 0xbf, 0xc3, 0xd2, 0xdf, 0x42, 0x20, 0xb6, 0xed,
  2816. 0x9e, 0xe6, 0xe2, 0x57, 0xc1, 0x30, 0x3c, 0x03, 0xc3, 0xc8, 0x18, 0x86, 0x01, 0xe5, 0xa2, 0x4b,
  2817. 0x53, 0x6e, 0x17, 0x52, 0x1e, 0x0b, 0x89, 0xfd, 0x87, 0xc4, 0xaf, 0x45, 0x97, 0xfb, 0xd4, 0x7d,
  2818. 0x1d, 0x1b, 0xbd, 0x47, 0x1a, 0x39, 0x55, 0x80, 0xab, 0xd3, 0xb1, 0x34, 0x00, 0xf1, 0xe2, 0x50,
  2819. 0x89, 0x6d, 0x99, 0x04, 0xa3, 0x6d, 0x88, 0x9e, 0x58, 0x1d, 0xfe, 0x13, 0xd4, 0xc2, 0x10, 0x98,
  2820. 0x2a, 0xba, 0x0d, 0xeb, 0x26, 0x7e, 0xee, 0xaa, 0x63, 0xac, 0xe0, 0x9b, 0x5d, 0xa3, 0xd3, 0x8d,
  2821. 0x80, 0x19, 0xd2, 0x33, 0x10, 0x4b, 0x9a, 0xd9, 0xc5, 0xc6, 0xff, 0xec, 0x0a, 0x3c, 0x03, 0xb1,
  2822. 0x8c, 0x0d, 0xfc, 0x6a, 0x00, 0x2d, 0xb3, 0x42, 0xe1, 0x8f, 0x09, 0x96, 0xa4, 0xfc, 0x44, 0x6d,
  2823. 0x60, 0x07, 0x7d, 0x16, 0x02, 0x61, 0x94, 0x71, 0xd0, 0xdd, 0x39, 0xe5, 0x73, 0x2a, 0x2d, 0x65,
  2824. 0xe7, 0x1f, 0xae, 0x54, 0xfc, 0xd9, 0xdf, 0xff, 0xf9, 0xeb, 0xf0, 0xae, 0x74, 0x2f, 0x7f, 0xbe,
  2825. 0x9d, 0xf7, 0x03, 0x26, 0xf9, 0x17, 0x17, 0x9b, 0x79, 0x99, 0xe7, 0xb1, 0x92, 0xfc, 0x0b, 0x3e,
  2826. 0x78, 0xc9, 0xfe, 0x3b, 0xb7, 0x43, 0xd8, 0x42, 0x3b, 0xa1, 0xbb, 0xe8, 0x57, 0x21, 0x88, 0xf3,
  2827. 0x94, 0x83, 0xe6, 0xe4, 0xfc, 0x89, 0xa4, 0xb4, 0x28, 0xaa, 0x8f, 0x59, 0x54, 0x3b, 0xe8, 0xfe,
  2828. 0x35, 0xa3, 0xca, 0xbf, 0xe0, 0xc7, 0xf9, 0x12, 0xfd, 0x26, 0x04, 0xc9, 0x80, 0x76, 0xe8, 0xce,
  2829. 0xd2, 0xf7, 0x3d, 0x7b, 0x77, 0x19, 0x55, 0xce, 0x62, 0xe9, 0x43, 0x16, 0xe5, 0x36, 0xca, 0x5f,
  2830. 0x33, 0x4a, 0xf4, 0xdb, 0x10, 0x08, 0xa3, 0x3b, 0x3e, 0x0f, 0xcd, 0xe9, 0x44, 0xb0, 0xe8, 0xdc,
  2831. 0x64, 0x16, 0xd1, 0x83, 0xc2, 0x2b, 0x9f, 0xdb, 0x0e, 0xbb, 0xef, 0xbf, 0x0b, 0x81, 0x30, 0xba,
  2832. 0x44, 0xf3, 0xe2, 0x9b, 0xbe, 0x69, 0x8b, 0xe2, 0x3b, 0x60, 0xf1, 0x95, 0xa5, 0x07, 0xaf, 0x1c,
  2833. 0x5f, 0x97, 0xad, 0x48, 0x69, 0xf7, 0xcb, 0x10, 0x08, 0xa3, 0x7b, 0x38, 0x2f, 0xc8, 0xe9, 0xcb,
  2834. 0x9a, 0x7d, 0xf3, 0x52, 0xe6, 0x92, 0x07, 0xb6, 0x3b, 0x0c, 0x58, 0x77, 0xf7, 0x95, 0x4f, 0x6f,
  2835. 0x6f, 0x00, 0x6f, 0x77, 0xad, 0xc1, 0xcc, 0x50, 0xf6, 0x04, 0xca, 0x9f, 0x06, 0x5d, 0xb5, 0x11,
  2836. 0xfa, 0xc9, 0xc7, 0xbe, 0x5a, 0xdf, 0x32, 0x34, 0xb3, 0x9f, 0xb3, 0x9c, 0x7e, 0xbe, 0x8f, 0x4d,
  2837. 0x16, 0x53, 0x9e, 0x8b, 0x34, 0x5b, 0x27, 0x97, 0xff, 0x3b, 0xbe, 0x1b, 0x8c, 0x3b, 0x71, 0xa6,
  2838. 0xfc, 0xc1, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa1, 0x58, 0x75, 0xc0, 0x49, 0x1f, 0x00, 0x00,
  2839. }