# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/dataproc_v1beta2/proto/jobs.proto
import sys
_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1"))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/cloud/dataproc_v1beta2/proto/jobs.proto",
package="google.cloud.dataproc.v1beta2",
syntax="proto3",
serialized_options=_b(
"\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"
),
serialized_pb=_b(
'\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xda\x02\n\nPySparkJob\x12\x1c\n\x14main_python_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\tQueryList\x12\x0f\n\x07queries\x18\x01 \x03(\t"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa2\x02\n\tSparkRJob\x12\x17\n\x0fmain_r_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x0cJobPlacement\x12\x14\n\x0c\x63luster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"2\n\x0cJobReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x96\x02\n\x0fYarnApplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.State\x12\x10\n\x08progress\x18\x03 \x01(\x02\x12\x14\n\x0ctracking_url\x18\x04 \x01(\t"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb3\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12>\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacement\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"{\n\x10SubmitJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12/\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x12\n\nrequest_id\x18\x04 \x01(\t"C\n\rGetJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x95\x02\n\x0fListJobsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xa8\x01\n\x10UpdateJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12/\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12/\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x10\x43\x61ncelJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"F\n\x10\x44\x65leteJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t2\x8b\x08\n\rJobController\x12\xa8\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"F\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\x12\xa1\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"E\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\x12\xa9\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xb1\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"O\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\x12\x9b\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"E\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}Bw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3'
),
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,
google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,
google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,
],
)
_LOGGINGCONFIG_LEVEL = _descriptor.EnumDescriptor(
name="Level",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig.Level",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="LEVEL_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="ALL", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="TRACE", index=2, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="DEBUG", index=3, number=3, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="INFO", index=4, number=4, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="WARN", index=5, number=5, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ERROR", index=6, number=6, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="FATAL", index=7, number=7, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="OFF", index=8, number=8, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=427,
serialized_end=539,
)
_sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL)
_JOBSTATUS_STATE = _descriptor.EnumDescriptor(
name="State",
full_name="google.cloud.dataproc.v1beta2.JobStatus.State",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="STATE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="PENDING", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SETUP_DONE", index=2, number=8, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="RUNNING", index=3, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="CANCEL_PENDING", index=4, number=3, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="CANCEL_STARTED", index=5, number=7, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="CANCELLED", index=6, number=4, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="DONE", index=7, number=5, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ERROR", index=8, number=6, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ATTEMPT_FAILURE",
index=9,
number=9,
serialized_options=None,
type=None,
),
],
containing_type=None,
serialized_options=None,
serialized_start=3618,
serialized_end=3787,
)
_sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE)
_JOBSTATUS_SUBSTATE = _descriptor.EnumDescriptor(
name="Substate",
full_name="google.cloud.dataproc.v1beta2.JobStatus.Substate",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SUBMITTED", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="QUEUED", index=2, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="STALE_STATUS", index=3, number=3, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=3789,
serialized_end=3861,
)
_sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE)
_YARNAPPLICATION_STATE = _descriptor.EnumDescriptor(
name="State",
full_name="google.cloud.dataproc.v1beta2.YarnApplication.State",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="STATE_UNSPECIFIED",
index=0,
number=0,
serialized_options=None,
type=None,
),
_descriptor.EnumValueDescriptor(
name="NEW", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="NEW_SAVING", index=2, number=2, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="SUBMITTED", index=3, number=3, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ACCEPTED", index=4, number=4, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="RUNNING", index=5, number=5, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="FINISHED", index=6, number=6, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="FAILED", index=7, number=7, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="KILLED", index=8, number=8, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=4059,
serialized_end=4194,
)
_sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE)
_LISTJOBSREQUEST_JOBSTATEMATCHER = _descriptor.EnumDescriptor(
name="JobStateMatcher",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher",
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name="ALL", index=0, number=0, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="ACTIVE", index=1, number=1, serialized_options=None, type=None
),
_descriptor.EnumValueDescriptor(
name="NON_ACTIVE", index=2, number=2, serialized_options=None, type=None
),
],
containing_type=None,
serialized_options=None,
serialized_start=5740,
serialized_end=5794,
)
_sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER)
_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY = _descriptor.Descriptor(
name="DriverLogLevelsEntry",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry.value",
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=319,
serialized_end=425,
)
_LOGGINGCONFIG = _descriptor.Descriptor(
name="LoggingConfig",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="driver_log_levels",
full_name="google.cloud.dataproc.v1beta2.LoggingConfig.driver_log_levels",
index=0,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY],
enum_types=[_LOGGINGCONFIG_LEVEL],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=208,
serialized_end=539,
)
_HADOOPJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_HADOOPJOB = _descriptor.Descriptor(
name="HadoopJob",
full_name="google.cloud.dataproc.v1beta2.HadoopJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="main_jar_file_uri",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.main_jar_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="main_class",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.main_class",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="args",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.args",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.jar_file_uris",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="file_uris",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.file_uris",
index=4,
number=5,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="archive_uris",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.archive_uris",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.properties",
index=6,
number=7,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.logging_config",
index=7,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_HADOOPJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="driver",
full_name="google.cloud.dataproc.v1beta2.HadoopJob.driver",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=542,
serialized_end=891,
)
_SPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_SPARKJOB = _descriptor.Descriptor(
name="SparkJob",
full_name="google.cloud.dataproc.v1beta2.SparkJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="main_jar_file_uri",
full_name="google.cloud.dataproc.v1beta2.SparkJob.main_jar_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="main_class",
full_name="google.cloud.dataproc.v1beta2.SparkJob.main_class",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="args",
full_name="google.cloud.dataproc.v1beta2.SparkJob.args",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.SparkJob.jar_file_uris",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="file_uris",
full_name="google.cloud.dataproc.v1beta2.SparkJob.file_uris",
index=4,
number=5,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="archive_uris",
full_name="google.cloud.dataproc.v1beta2.SparkJob.archive_uris",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.SparkJob.properties",
index=6,
number=7,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.SparkJob.logging_config",
index=7,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_SPARKJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="driver",
full_name="google.cloud.dataproc.v1beta2.SparkJob.driver",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=894,
serialized_end=1241,
)
_PYSPARKJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_PYSPARKJOB = _descriptor.Descriptor(
name="PySparkJob",
full_name="google.cloud.dataproc.v1beta2.PySparkJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="main_python_file_uri",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.main_python_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="args",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.args",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="python_file_uris",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.python_file_uris",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.jar_file_uris",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="file_uris",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.file_uris",
index=4,
number=5,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="archive_uris",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.archive_uris",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.properties",
index=6,
number=7,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.PySparkJob.logging_config",
index=7,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_PYSPARKJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1244,
serialized_end=1590,
)
_QUERYLIST = _descriptor.Descriptor(
name="QueryList",
full_name="google.cloud.dataproc.v1beta2.QueryList",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="queries",
full_name="google.cloud.dataproc.v1beta2.QueryList.queries",
index=0,
number=1,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1592,
serialized_end=1620,
)
_HIVEJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor(
name="ScriptVariablesEntry",
full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1939,
serialized_end=1993,
)
_HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_HIVEJOB = _descriptor.Descriptor(
name="HiveJob",
full_name="google.cloud.dataproc.v1beta2.HiveJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="query_file_uri",
full_name="google.cloud.dataproc.v1beta2.HiveJob.query_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="query_list",
full_name="google.cloud.dataproc.v1beta2.HiveJob.query_list",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="continue_on_failure",
full_name="google.cloud.dataproc.v1beta2.HiveJob.continue_on_failure",
index=2,
number=3,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="script_variables",
full_name="google.cloud.dataproc.v1beta2.HiveJob.script_variables",
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.HiveJob.properties",
index=4,
number=5,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.HiveJob.jar_file_uris",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="queries",
full_name="google.cloud.dataproc.v1beta2.HiveJob.queries",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=1623,
serialized_end=2055,
)
_SPARKSQLJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor(
name="ScriptVariablesEntry",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1939,
serialized_end=1993,
)
_SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_SPARKSQLJOB = _descriptor.Descriptor(
name="SparkSqlJob",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="query_file_uri",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.query_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="query_list",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.query_list",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="script_variables",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.script_variables",
index=2,
number=3,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.properties",
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.jar_file_uris",
index=4,
number=56,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.logging_config",
index=5,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="queries",
full_name="google.cloud.dataproc.v1beta2.SparkSqlJob.queries",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=2058,
serialized_end=2543,
)
_PIGJOB_SCRIPTVARIABLESENTRY = _descriptor.Descriptor(
name="ScriptVariablesEntry",
full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=1939,
serialized_end=1993,
)
_PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_PIGJOB = _descriptor.Descriptor(
name="PigJob",
full_name="google.cloud.dataproc.v1beta2.PigJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="query_file_uri",
full_name="google.cloud.dataproc.v1beta2.PigJob.query_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="query_list",
full_name="google.cloud.dataproc.v1beta2.PigJob.query_list",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="continue_on_failure",
full_name="google.cloud.dataproc.v1beta2.PigJob.continue_on_failure",
index=2,
number=3,
type=8,
cpp_type=7,
label=1,
has_default_value=False,
default_value=False,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="script_variables",
full_name="google.cloud.dataproc.v1beta2.PigJob.script_variables",
index=3,
number=4,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.PigJob.properties",
index=4,
number=5,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="jar_file_uris",
full_name="google.cloud.dataproc.v1beta2.PigJob.jar_file_uris",
index=5,
number=6,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.PigJob.logging_config",
index=6,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="queries",
full_name="google.cloud.dataproc.v1beta2.PigJob.queries",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=2546,
serialized_end=3045,
)
_SPARKRJOB_PROPERTIESENTRY = _descriptor.Descriptor(
name="PropertiesEntry",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=832,
serialized_end=881,
)
_SPARKRJOB = _descriptor.Descriptor(
name="SparkRJob",
full_name="google.cloud.dataproc.v1beta2.SparkRJob",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="main_r_file_uri",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.main_r_file_uri",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="args",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.args",
index=1,
number=2,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="file_uris",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.file_uris",
index=2,
number=3,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="archive_uris",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.archive_uris",
index=3,
number=4,
type=9,
cpp_type=9,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="properties",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.properties",
index=4,
number=5,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="logging_config",
full_name="google.cloud.dataproc.v1beta2.SparkRJob.logging_config",
index=5,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_SPARKRJOB_PROPERTIESENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3048,
serialized_end=3338,
)
_JOBPLACEMENT = _descriptor.Descriptor(
name="JobPlacement",
full_name="google.cloud.dataproc.v1beta2.JobPlacement",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="cluster_name",
full_name="google.cloud.dataproc.v1beta2.JobPlacement.cluster_name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="cluster_uuid",
full_name="google.cloud.dataproc.v1beta2.JobPlacement.cluster_uuid",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3340,
serialized_end=3398,
)
_JOBSTATUS = _descriptor.Descriptor(
name="JobStatus",
full_name="google.cloud.dataproc.v1beta2.JobStatus",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="state",
full_name="google.cloud.dataproc.v1beta2.JobStatus.state",
index=0,
number=1,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="details",
full_name="google.cloud.dataproc.v1beta2.JobStatus.details",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="state_start_time",
full_name="google.cloud.dataproc.v1beta2.JobStatus.state_start_time",
index=2,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="substate",
full_name="google.cloud.dataproc.v1beta2.JobStatus.substate",
index=3,
number=7,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3401,
serialized_end=3861,
)
_JOBREFERENCE = _descriptor.Descriptor(
name="JobReference",
full_name="google.cloud.dataproc.v1beta2.JobReference",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.JobReference.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_id",
full_name="google.cloud.dataproc.v1beta2.JobReference.job_id",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3863,
serialized_end=3913,
)
_YARNAPPLICATION = _descriptor.Descriptor(
name="YarnApplication",
full_name="google.cloud.dataproc.v1beta2.YarnApplication",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.cloud.dataproc.v1beta2.YarnApplication.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="state",
full_name="google.cloud.dataproc.v1beta2.YarnApplication.state",
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="progress",
full_name="google.cloud.dataproc.v1beta2.YarnApplication.progress",
index=2,
number=3,
type=2,
cpp_type=6,
label=1,
has_default_value=False,
default_value=float(0),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="tracking_url",
full_name="google.cloud.dataproc.v1beta2.YarnApplication.tracking_url",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_YARNAPPLICATION_STATE],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=3916,
serialized_end=4194,
)
_JOB_LABELSENTRY = _descriptor.Descriptor(
name="LabelsEntry",
full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="value",
full_name="google.cloud.dataproc.v1beta2.Job.LabelsEntry.value",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=_b("8\001"),
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5215,
serialized_end=5260,
)
_JOB = _descriptor.Descriptor(
name="Job",
full_name="google.cloud.dataproc.v1beta2.Job",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="reference",
full_name="google.cloud.dataproc.v1beta2.Job.reference",
index=0,
number=1,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="placement",
full_name="google.cloud.dataproc.v1beta2.Job.placement",
index=1,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="hadoop_job",
full_name="google.cloud.dataproc.v1beta2.Job.hadoop_job",
index=2,
number=3,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="spark_job",
full_name="google.cloud.dataproc.v1beta2.Job.spark_job",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="pyspark_job",
full_name="google.cloud.dataproc.v1beta2.Job.pyspark_job",
index=4,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="hive_job",
full_name="google.cloud.dataproc.v1beta2.Job.hive_job",
index=5,
number=6,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="pig_job",
full_name="google.cloud.dataproc.v1beta2.Job.pig_job",
index=6,
number=7,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="spark_r_job",
full_name="google.cloud.dataproc.v1beta2.Job.spark_r_job",
index=7,
number=21,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="spark_sql_job",
full_name="google.cloud.dataproc.v1beta2.Job.spark_sql_job",
index=8,
number=12,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="status",
full_name="google.cloud.dataproc.v1beta2.Job.status",
index=9,
number=8,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="status_history",
full_name="google.cloud.dataproc.v1beta2.Job.status_history",
index=10,
number=13,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="yarn_applications",
full_name="google.cloud.dataproc.v1beta2.Job.yarn_applications",
index=11,
number=9,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="submitted_by",
full_name="google.cloud.dataproc.v1beta2.Job.submitted_by",
index=12,
number=10,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="driver_output_resource_uri",
full_name="google.cloud.dataproc.v1beta2.Job.driver_output_resource_uri",
index=13,
number=17,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="driver_control_files_uri",
full_name="google.cloud.dataproc.v1beta2.Job.driver_control_files_uri",
index=14,
number=15,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="labels",
full_name="google.cloud.dataproc.v1beta2.Job.labels",
index=15,
number=18,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="scheduling",
full_name="google.cloud.dataproc.v1beta2.Job.scheduling",
index=16,
number=20,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_uuid",
full_name="google.cloud.dataproc.v1beta2.Job.job_uuid",
index=17,
number=22,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[_JOB_LABELSENTRY],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name="type_job",
full_name="google.cloud.dataproc.v1beta2.Job.type_job",
index=0,
containing_type=None,
fields=[],
)
],
serialized_start=4197,
serialized_end=5272,
)
_JOBSCHEDULING = _descriptor.Descriptor(
name="JobScheduling",
full_name="google.cloud.dataproc.v1beta2.JobScheduling",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="max_failures_per_hour",
full_name="google.cloud.dataproc.v1beta2.JobScheduling.max_failures_per_hour",
index=0,
number=1,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
)
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5274,
serialized_end=5320,
)
_SUBMITJOBREQUEST = _descriptor.Descriptor(
name="SubmitJobRequest",
full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.region",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job",
full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.job",
index=2,
number=2,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="request_id",
full_name="google.cloud.dataproc.v1beta2.SubmitJobRequest.request_id",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5322,
serialized_end=5445,
)
_GETJOBREQUEST = _descriptor.Descriptor(
name="GetJobRequest",
full_name="google.cloud.dataproc.v1beta2.GetJobRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.GetJobRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.GetJobRequest.region",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_id",
full_name="google.cloud.dataproc.v1beta2.GetJobRequest.job_id",
index=2,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5447,
serialized_end=5514,
)
_LISTJOBSREQUEST = _descriptor.Descriptor(
name="ListJobsRequest",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.region",
index=1,
number=6,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_size",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.page_size",
index=2,
number=2,
type=5,
cpp_type=1,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="page_token",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.page_token",
index=3,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="cluster_name",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.cluster_name",
index=4,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_state_matcher",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.job_state_matcher",
index=5,
number=5,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="filter",
full_name="google.cloud.dataproc.v1beta2.ListJobsRequest.filter",
index=6,
number=7,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5517,
serialized_end=5794,
)
_UPDATEJOBREQUEST = _descriptor.Descriptor(
name="UpdateJobRequest",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.region",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_id",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.job_id",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.job",
index=3,
number=4,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="update_mask",
full_name="google.cloud.dataproc.v1beta2.UpdateJobRequest.update_mask",
index=4,
number=5,
type=11,
cpp_type=10,
label=1,
has_default_value=False,
default_value=None,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5797,
serialized_end=5965,
)
_LISTJOBSRESPONSE = _descriptor.Descriptor(
name="ListJobsResponse",
full_name="google.cloud.dataproc.v1beta2.ListJobsResponse",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="jobs",
full_name="google.cloud.dataproc.v1beta2.ListJobsResponse.jobs",
index=0,
number=1,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="next_page_token",
full_name="google.cloud.dataproc.v1beta2.ListJobsResponse.next_page_token",
index=1,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=5967,
serialized_end=6060,
)
_CANCELJOBREQUEST = _descriptor.Descriptor(
name="CancelJobRequest",
full_name="google.cloud.dataproc.v1beta2.CancelJobRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.region",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_id",
full_name="google.cloud.dataproc.v1beta2.CancelJobRequest.job_id",
index=2,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=6062,
serialized_end=6132,
)
_DELETEJOBREQUEST = _descriptor.Descriptor(
name="DeleteJobRequest",
full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest",
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name="project_id",
full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.project_id",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="region",
full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.region",
index=1,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
_descriptor.FieldDescriptor(
name="job_id",
full_name="google.cloud.dataproc.v1beta2.DeleteJobRequest.job_id",
index=2,
number=2,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=_b("").decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=6134,
serialized_end=6204,
)
_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[
"value"
].enum_type = _LOGGINGCONFIG_LEVEL
_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.containing_type = _LOGGINGCONFIG
_LOGGINGCONFIG.fields_by_name[
"driver_log_levels"
].message_type = _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY
_LOGGINGCONFIG_LEVEL.containing_type = _LOGGINGCONFIG
_HADOOPJOB_PROPERTIESENTRY.containing_type = _HADOOPJOB
_HADOOPJOB.fields_by_name["properties"].message_type = _HADOOPJOB_PROPERTIESENTRY
_HADOOPJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_HADOOPJOB.oneofs_by_name["driver"].fields.append(
_HADOOPJOB.fields_by_name["main_jar_file_uri"]
)
_HADOOPJOB.fields_by_name[
"main_jar_file_uri"
].containing_oneof = _HADOOPJOB.oneofs_by_name["driver"]
_HADOOPJOB.oneofs_by_name["driver"].fields.append(
_HADOOPJOB.fields_by_name["main_class"]
)
_HADOOPJOB.fields_by_name["main_class"].containing_oneof = _HADOOPJOB.oneofs_by_name[
"driver"
]
_SPARKJOB_PROPERTIESENTRY.containing_type = _SPARKJOB
_SPARKJOB.fields_by_name["properties"].message_type = _SPARKJOB_PROPERTIESENTRY
_SPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_SPARKJOB.oneofs_by_name["driver"].fields.append(
_SPARKJOB.fields_by_name["main_jar_file_uri"]
)
_SPARKJOB.fields_by_name[
"main_jar_file_uri"
].containing_oneof = _SPARKJOB.oneofs_by_name["driver"]
_SPARKJOB.oneofs_by_name["driver"].fields.append(_SPARKJOB.fields_by_name["main_class"])
_SPARKJOB.fields_by_name["main_class"].containing_oneof = _SPARKJOB.oneofs_by_name[
"driver"
]
_PYSPARKJOB_PROPERTIESENTRY.containing_type = _PYSPARKJOB
_PYSPARKJOB.fields_by_name["properties"].message_type = _PYSPARKJOB_PROPERTIESENTRY
_PYSPARKJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_HIVEJOB_SCRIPTVARIABLESENTRY.containing_type = _HIVEJOB
_HIVEJOB_PROPERTIESENTRY.containing_type = _HIVEJOB
_HIVEJOB.fields_by_name["query_list"].message_type = _QUERYLIST
_HIVEJOB.fields_by_name["script_variables"].message_type = _HIVEJOB_SCRIPTVARIABLESENTRY
_HIVEJOB.fields_by_name["properties"].message_type = _HIVEJOB_PROPERTIESENTRY
_HIVEJOB.oneofs_by_name["queries"].fields.append(
_HIVEJOB.fields_by_name["query_file_uri"]
)
_HIVEJOB.fields_by_name["query_file_uri"].containing_oneof = _HIVEJOB.oneofs_by_name[
"queries"
]
_HIVEJOB.oneofs_by_name["queries"].fields.append(_HIVEJOB.fields_by_name["query_list"])
_HIVEJOB.fields_by_name["query_list"].containing_oneof = _HIVEJOB.oneofs_by_name[
"queries"
]
_SPARKSQLJOB_SCRIPTVARIABLESENTRY.containing_type = _SPARKSQLJOB
_SPARKSQLJOB_PROPERTIESENTRY.containing_type = _SPARKSQLJOB
_SPARKSQLJOB.fields_by_name["query_list"].message_type = _QUERYLIST
_SPARKSQLJOB.fields_by_name[
"script_variables"
].message_type = _SPARKSQLJOB_SCRIPTVARIABLESENTRY
_SPARKSQLJOB.fields_by_name["properties"].message_type = _SPARKSQLJOB_PROPERTIESENTRY
_SPARKSQLJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_SPARKSQLJOB.oneofs_by_name["queries"].fields.append(
_SPARKSQLJOB.fields_by_name["query_file_uri"]
)
_SPARKSQLJOB.fields_by_name[
"query_file_uri"
].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"]
_SPARKSQLJOB.oneofs_by_name["queries"].fields.append(
_SPARKSQLJOB.fields_by_name["query_list"]
)
_SPARKSQLJOB.fields_by_name[
"query_list"
].containing_oneof = _SPARKSQLJOB.oneofs_by_name["queries"]
_PIGJOB_SCRIPTVARIABLESENTRY.containing_type = _PIGJOB
_PIGJOB_PROPERTIESENTRY.containing_type = _PIGJOB
_PIGJOB.fields_by_name["query_list"].message_type = _QUERYLIST
_PIGJOB.fields_by_name["script_variables"].message_type = _PIGJOB_SCRIPTVARIABLESENTRY
_PIGJOB.fields_by_name["properties"].message_type = _PIGJOB_PROPERTIESENTRY
_PIGJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_PIGJOB.oneofs_by_name["queries"].fields.append(
_PIGJOB.fields_by_name["query_file_uri"]
)
_PIGJOB.fields_by_name["query_file_uri"].containing_oneof = _PIGJOB.oneofs_by_name[
"queries"
]
_PIGJOB.oneofs_by_name["queries"].fields.append(_PIGJOB.fields_by_name["query_list"])
_PIGJOB.fields_by_name["query_list"].containing_oneof = _PIGJOB.oneofs_by_name[
"queries"
]
_SPARKRJOB_PROPERTIESENTRY.containing_type = _SPARKRJOB
_SPARKRJOB.fields_by_name["properties"].message_type = _SPARKRJOB_PROPERTIESENTRY
_SPARKRJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG
_JOBSTATUS.fields_by_name["state"].enum_type = _JOBSTATUS_STATE
_JOBSTATUS.fields_by_name[
"state_start_time"
].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_JOBSTATUS.fields_by_name["substate"].enum_type = _JOBSTATUS_SUBSTATE
_JOBSTATUS_STATE.containing_type = _JOBSTATUS
_JOBSTATUS_SUBSTATE.containing_type = _JOBSTATUS
_YARNAPPLICATION.fields_by_name["state"].enum_type = _YARNAPPLICATION_STATE
_YARNAPPLICATION_STATE.containing_type = _YARNAPPLICATION
_JOB_LABELSENTRY.containing_type = _JOB
_JOB.fields_by_name["reference"].message_type = _JOBREFERENCE
_JOB.fields_by_name["placement"].message_type = _JOBPLACEMENT
_JOB.fields_by_name["hadoop_job"].message_type = _HADOOPJOB
_JOB.fields_by_name["spark_job"].message_type = _SPARKJOB
_JOB.fields_by_name["pyspark_job"].message_type = _PYSPARKJOB
_JOB.fields_by_name["hive_job"].message_type = _HIVEJOB
_JOB.fields_by_name["pig_job"].message_type = _PIGJOB
_JOB.fields_by_name["spark_r_job"].message_type = _SPARKRJOB
_JOB.fields_by_name["spark_sql_job"].message_type = _SPARKSQLJOB
_JOB.fields_by_name["status"].message_type = _JOBSTATUS
_JOB.fields_by_name["status_history"].message_type = _JOBSTATUS
_JOB.fields_by_name["yarn_applications"].message_type = _YARNAPPLICATION
_JOB.fields_by_name["labels"].message_type = _JOB_LABELSENTRY
_JOB.fields_by_name["scheduling"].message_type = _JOBSCHEDULING
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hadoop_job"])
_JOB.fields_by_name["hadoop_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_job"])
_JOB.fields_by_name["spark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pyspark_job"])
_JOB.fields_by_name["pyspark_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["hive_job"])
_JOB.fields_by_name["hive_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pig_job"])
_JOB.fields_by_name["pig_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_r_job"])
_JOB.fields_by_name["spark_r_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_sql_job"])
_JOB.fields_by_name["spark_sql_job"].containing_oneof = _JOB.oneofs_by_name["type_job"]
_SUBMITJOBREQUEST.fields_by_name["job"].message_type = _JOB
_LISTJOBSREQUEST.fields_by_name[
"job_state_matcher"
].enum_type = _LISTJOBSREQUEST_JOBSTATEMATCHER
_LISTJOBSREQUEST_JOBSTATEMATCHER.containing_type = _LISTJOBSREQUEST
_UPDATEJOBREQUEST.fields_by_name["job"].message_type = _JOB
_UPDATEJOBREQUEST.fields_by_name[
"update_mask"
].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_LISTJOBSRESPONSE.fields_by_name["jobs"].message_type = _JOB
DESCRIPTOR.message_types_by_name["LoggingConfig"] = _LOGGINGCONFIG
DESCRIPTOR.message_types_by_name["HadoopJob"] = _HADOOPJOB
DESCRIPTOR.message_types_by_name["SparkJob"] = _SPARKJOB
DESCRIPTOR.message_types_by_name["PySparkJob"] = _PYSPARKJOB
DESCRIPTOR.message_types_by_name["QueryList"] = _QUERYLIST
DESCRIPTOR.message_types_by_name["HiveJob"] = _HIVEJOB
DESCRIPTOR.message_types_by_name["SparkSqlJob"] = _SPARKSQLJOB
DESCRIPTOR.message_types_by_name["PigJob"] = _PIGJOB
DESCRIPTOR.message_types_by_name["SparkRJob"] = _SPARKRJOB
DESCRIPTOR.message_types_by_name["JobPlacement"] = _JOBPLACEMENT
DESCRIPTOR.message_types_by_name["JobStatus"] = _JOBSTATUS
DESCRIPTOR.message_types_by_name["JobReference"] = _JOBREFERENCE
DESCRIPTOR.message_types_by_name["YarnApplication"] = _YARNAPPLICATION
DESCRIPTOR.message_types_by_name["Job"] = _JOB
DESCRIPTOR.message_types_by_name["JobScheduling"] = _JOBSCHEDULING
DESCRIPTOR.message_types_by_name["SubmitJobRequest"] = _SUBMITJOBREQUEST
DESCRIPTOR.message_types_by_name["GetJobRequest"] = _GETJOBREQUEST
DESCRIPTOR.message_types_by_name["ListJobsRequest"] = _LISTJOBSREQUEST
DESCRIPTOR.message_types_by_name["UpdateJobRequest"] = _UPDATEJOBREQUEST
DESCRIPTOR.message_types_by_name["ListJobsResponse"] = _LISTJOBSRESPONSE
DESCRIPTOR.message_types_by_name["CancelJobRequest"] = _CANCELJOBREQUEST
DESCRIPTOR.message_types_by_name["DeleteJobRequest"] = _DELETEJOBREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LoggingConfig = _reflection.GeneratedProtocolMessageType(
"LoggingConfig",
(_message.Message,),
dict(
DriverLogLevelsEntry=_reflection.GeneratedProtocolMessageType(
"DriverLogLevelsEntry",
(_message.Message,),
dict(
DESCRIPTOR=_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry)
),
),
DESCRIPTOR=_LOGGINGCONFIG,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""The runtime logging config of the job.
Attributes:
driver_log_levels:
The per-package log levels for the driver. This may include
"root" package name to configure rootLogger. Examples:
'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LoggingConfig)
),
)
_sym_db.RegisterMessage(LoggingConfig)
_sym_db.RegisterMessage(LoggingConfig.DriverLogLevelsEntry)
HadoopJob = _reflection.GeneratedProtocolMessageType(
"HadoopJob",
(_message.Message,),
dict(
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_HADOOPJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry)
),
),
DESCRIPTOR=_HADOOPJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache Hadoop
MapReduce <https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html>`__
jobs on `Apache Hadoop
YARN <https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html>`__.
Attributes:
driver:
Required. Indicates the location of the driver's main class.
Specify either the jar file that contains the main class or
the main class name. To specify both, add the jar file to
``jar_file_uris``, and then specify the main class name in
this property.
main_jar_file_uri:
The HCFS URI of the jar file containing the main class.
Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-
metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar'
'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-
examples.jar'
main_class:
The name of the driver's main class. The jar file containing
the class must be in the default CLASSPATH or specified in
``jar_file_uris``.
args:
Optional. The arguments to pass to the driver. Do not include
arguments, such as ``-libjars`` or ``-Dfoo=bar``, that can be
set as job properties, since a collision may occur that causes
an incorrect job submission.
jar_file_uris:
Optional. Jar file URIs to add to the CLASSPATHs of the Hadoop
driver and tasks.
file_uris:
Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to
be copied to the working directory of Hadoop drivers and
distributed tasks. Useful for naively parallel tasks.
archive_uris:
Optional. HCFS URIs of archives to be extracted in the working
directory of Hadoop drivers and tasks. Supported file types:
.jar, .tar, .tar.gz, .tgz, or .zip.
properties:
Optional. A mapping of property names to values, used to
configure Hadoop. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include
properties set in /etc/hadoop/conf/\*-site and classes in user
code.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HadoopJob)
),
)
_sym_db.RegisterMessage(HadoopJob)
_sym_db.RegisterMessage(HadoopJob.PropertiesEntry)
SparkJob = _reflection.GeneratedProtocolMessageType(
"SparkJob",
(_message.Message,),
dict(
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_SPARKJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry)
),
),
DESCRIPTOR=_SPARKJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache
Spark <http://spark.apache.org/>`__ applications on YARN.
Attributes:
driver:
Required. The specification of the main method to call to
drive the job. Specify either the jar file that contains the
main class or the main class name. To pass both a main jar and
a main class in that jar, add the jar to
``CommonJob.jar_file_uris``, and then specify the main class
name in ``main_class``.
main_jar_file_uri:
The HCFS URI of the jar file that contains the main class.
main_class:
The name of the driver's main class. The jar file that
contains the class must be in the default CLASSPATH or
specified in ``jar_file_uris``.
args:
Optional. The arguments to pass to the driver. Do not include
arguments, such as ``--conf``, that can be set as job
properties, since a collision may occur that causes an
incorrect job submission.
jar_file_uris:
Optional. HCFS URIs of jar files to add to the CLASSPATHs of
the Spark driver and tasks.
file_uris:
Optional. HCFS URIs of files to be copied to the working
directory of Spark drivers and distributed tasks. Useful for
naively parallel tasks.
archive_uris:
Optional. HCFS URIs of archives to be extracted in the working
directory of Spark drivers and tasks. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
properties:
Optional. A mapping of property names to values, used to
configure Spark. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include
properties set in /etc/spark/conf/spark-defaults.conf and
classes in user code.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkJob)
),
)
_sym_db.RegisterMessage(SparkJob)
_sym_db.RegisterMessage(SparkJob.PropertiesEntry)
PySparkJob = _reflection.GeneratedProtocolMessageType(
"PySparkJob",
(_message.Message,),
dict(
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_PYSPARKJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry)
),
),
DESCRIPTOR=_PYSPARKJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache
PySpark <https://spark.apache.org/docs/0.9.0/python-programming-guide.html>`__
applications on YARN.
Attributes:
main_python_file_uri:
Required. The HCFS URI of the main Python file to use as the
driver. Must be a .py file.
args:
Optional. The arguments to pass to the driver. Do not include
arguments, such as ``--conf``, that can be set as job
properties, since a collision may occur that causes an
incorrect job submission.
python_file_uris:
Optional. HCFS file URIs of Python files to pass to the
PySpark framework. Supported file types: .py, .egg, and .zip.
jar_file_uris:
Optional. HCFS URIs of jar files to add to the CLASSPATHs of
the Python driver and tasks.
file_uris:
Optional. HCFS URIs of files to be copied to the working
directory of Python drivers and distributed tasks. Useful for
naively parallel tasks.
archive_uris:
Optional. HCFS URIs of archives to be extracted in the working
directory of .jar, .tar, .tar.gz, .tgz, and .zip.
properties:
Optional. A mapping of property names to values, used to
configure PySpark. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include
properties set in /etc/spark/conf/spark-defaults.conf and
classes in user code.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PySparkJob)
),
)
_sym_db.RegisterMessage(PySparkJob)
_sym_db.RegisterMessage(PySparkJob.PropertiesEntry)
QueryList = _reflection.GeneratedProtocolMessageType(
"QueryList",
(_message.Message,),
dict(
DESCRIPTOR=_QUERYLIST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A list of queries to run on a cluster.
Attributes:
queries:
Required. The queries to execute. You do not need to terminate
a query with a semicolon. Multiple queries can be specified in
one string by separating each with a semicolon. Here is an
example of an Cloud Dataproc API snippet that uses a QueryList
to specify a HiveJob: :: "hiveJob": { "queryList":
{ "queries": [ "query1", "query2",
"query3;query4", ] } }
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.QueryList)
),
)
_sym_db.RegisterMessage(QueryList)
HiveJob = _reflection.GeneratedProtocolMessageType(
"HiveJob",
(_message.Message,),
dict(
ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType(
"ScriptVariablesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_HIVEJOB_SCRIPTVARIABLESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry)
),
),
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_HIVEJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry)
),
),
DESCRIPTOR=_HIVEJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache
Hive <https://hive.apache.org/>`__ queries on YARN.
Attributes:
queries:
Required. The sequence of Hive queries to execute, specified
as either an HCFS file URI or a list of queries.
query_file_uri:
The HCFS URI of the script that contains Hive queries.
query_list:
A list of queries.
continue_on_failure:
Optional. Whether to continue executing queries if a query
fails. The default value is ``false``. Setting to ``true`` can
be useful when executing independent parallel queries.
script_variables:
Optional. Mapping of query variable names to values
(equivalent to the Hive command: ``SET name="value";``).
properties:
Optional. A mapping of property names and values, used to
configure Hive. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include
properties set in /etc/hadoop/conf/\*-site.xml,
/etc/hive/conf/hive-site.xml, and classes in user code.
jar_file_uris:
Optional. HCFS URIs of jar files to add to the CLASSPATH of
the Hive server and Hadoop MapReduce (MR) tasks. Can contain
Hive SerDes and UDFs.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.HiveJob)
),
)
_sym_db.RegisterMessage(HiveJob)
_sym_db.RegisterMessage(HiveJob.ScriptVariablesEntry)
_sym_db.RegisterMessage(HiveJob.PropertiesEntry)
SparkSqlJob = _reflection.GeneratedProtocolMessageType(
"SparkSqlJob",
(_message.Message,),
dict(
ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType(
"ScriptVariablesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_SPARKSQLJOB_SCRIPTVARIABLESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry)
),
),
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_SPARKSQLJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry)
),
),
DESCRIPTOR=_SPARKSQLJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache Spark
SQL <http://spark.apache.org/sql/>`__ queries.
Attributes:
queries:
Required. The sequence of Spark SQL queries to execute,
specified as either an HCFS file URI or as a list of queries.
query_file_uri:
The HCFS URI of the script that contains SQL queries.
query_list:
A list of queries.
script_variables:
Optional. Mapping of query variable names to values
(equivalent to the Spark SQL command: SET ``name="value";``).
properties:
Optional. A mapping of property names to values, used to
configure Spark SQL's SparkConf. Properties that conflict with
values set by the Cloud Dataproc API may be overwritten.
jar_file_uris:
Optional. HCFS URIs of jar files to be added to the Spark
CLASSPATH.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkSqlJob)
),
)
_sym_db.RegisterMessage(SparkSqlJob)
_sym_db.RegisterMessage(SparkSqlJob.ScriptVariablesEntry)
_sym_db.RegisterMessage(SparkSqlJob.PropertiesEntry)
PigJob = _reflection.GeneratedProtocolMessageType(
"PigJob",
(_message.Message,),
dict(
ScriptVariablesEntry=_reflection.GeneratedProtocolMessageType(
"ScriptVariablesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_PIGJOB_SCRIPTVARIABLESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry)
),
),
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_PIGJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry)
),
),
DESCRIPTOR=_PIGJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache
Pig <https://pig.apache.org/>`__ queries on YARN.
Attributes:
queries:
Required. The sequence of Pig queries to execute, specified as
an HCFS file URI or a list of queries.
query_file_uri:
The HCFS URI of the script that contains the Pig queries.
query_list:
A list of queries.
continue_on_failure:
Optional. Whether to continue executing queries if a query
fails. The default value is ``false``. Setting to ``true`` can
be useful when executing independent parallel queries.
script_variables:
Optional. Mapping of query variable names to values
(equivalent to the Pig command: ``name=[value]``).
properties:
Optional. A mapping of property names to values, used to
configure Pig. Properties that conflict with values set by the
Cloud Dataproc API may be overwritten. Can include properties
set in /etc/hadoop/conf/\*-site.xml,
/etc/pig/conf/pig.properties, and classes in user code.
jar_file_uris:
Optional. HCFS URIs of jar files to add to the CLASSPATH of
the Pig Client and Hadoop MapReduce (MR) tasks. Can contain
Pig UDFs.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.PigJob)
),
)
_sym_db.RegisterMessage(PigJob)
_sym_db.RegisterMessage(PigJob.ScriptVariablesEntry)
_sym_db.RegisterMessage(PigJob.PropertiesEntry)
SparkRJob = _reflection.GeneratedProtocolMessageType(
"SparkRJob",
(_message.Message,),
dict(
PropertiesEntry=_reflection.GeneratedProtocolMessageType(
"PropertiesEntry",
(_message.Message,),
dict(
DESCRIPTOR=_SPARKRJOB_PROPERTIESENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry)
),
),
DESCRIPTOR=_SPARKRJOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job for running `Apache
SparkR <https://spark.apache.org/docs/latest/sparkr.html>`__
applications on YARN.
Attributes:
main_r_file_uri:
Required. The HCFS URI of the main R file to use as the
driver. Must be a .R file.
args:
Optional. The arguments to pass to the driver. Do not include
arguments, such as ``--conf``, that can be set as job
properties, since a collision may occur that causes an
incorrect job submission.
file_uris:
Optional. HCFS URIs of files to be copied to the working
directory of R drivers and distributed tasks. Useful for
naively parallel tasks.
archive_uris:
Optional. HCFS URIs of archives to be extracted in the working
directory of Spark drivers and tasks. Supported file types:
.jar, .tar, .tar.gz, .tgz, and .zip.
properties:
Optional. A mapping of property names to values, used to
configure SparkR. Properties that conflict with values set by
the Cloud Dataproc API may be overwritten. Can include
properties set in /etc/spark/conf/spark-defaults.conf and
classes in user code.
logging_config:
Optional. The runtime log config for job execution.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SparkRJob)
),
)
_sym_db.RegisterMessage(SparkRJob)
_sym_db.RegisterMessage(SparkRJob.PropertiesEntry)
JobPlacement = _reflection.GeneratedProtocolMessageType(
"JobPlacement",
(_message.Message,),
dict(
DESCRIPTOR=_JOBPLACEMENT,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""Cloud Dataproc job config.
Attributes:
cluster_name:
Required. The name of the cluster where the job will be
submitted.
cluster_uuid:
Output only. A cluster UUID generated by the Cloud Dataproc
service when the job is submitted.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobPlacement)
),
)
_sym_db.RegisterMessage(JobPlacement)
JobStatus = _reflection.GeneratedProtocolMessageType(
"JobStatus",
(_message.Message,),
dict(
DESCRIPTOR=_JOBSTATUS,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""Cloud Dataproc job status.
Attributes:
state:
Output only. A state message specifying the overall job state.
details:
Output only. Optional job state details, such as an error
description if the state is ERROR.
state_start_time:
Output only. The time when this state was entered.
substate:
Output only. Additional state information, which includes
status reported by the agent.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobStatus)
),
)
_sym_db.RegisterMessage(JobStatus)
JobReference = _reflection.GeneratedProtocolMessageType(
"JobReference",
(_message.Message,),
dict(
DESCRIPTOR=_JOBREFERENCE,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""Encapsulates the full scoping used to reference a job.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
job_id:
Optional. The job ID, which must be unique within the project.
The ID must contain only letters (a-z, A-Z), numbers (0-9),
underscores (\_), or hyphens (-). The maximum length is 100
characters. If not specified by the caller, the job ID will
be provided by the server.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobReference)
),
)
_sym_db.RegisterMessage(JobReference)
YarnApplication = _reflection.GeneratedProtocolMessageType(
"YarnApplication",
(_message.Message,),
dict(
DESCRIPTOR=_YARNAPPLICATION,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A YARN application created by a job. Application information is a subset
of org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.
**Beta Feature**: This report is available for testing purposes only. It
may be changed before final release.
Attributes:
name:
Required. The application name.
state:
Required. The application state.
progress:
Required. The numerical progress of the application, from 1 to
100.
tracking_url:
Optional. The HTTP URL of the ApplicationMaster,
HistoryServer, or TimelineServer that provides application-
specific information. The URL uses the internal hostname, and
requires a proxy server for resolution and, possibly, access.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.YarnApplication)
),
)
_sym_db.RegisterMessage(YarnApplication)
Job = _reflection.GeneratedProtocolMessageType(
"Job",
(_message.Message,),
dict(
LabelsEntry=_reflection.GeneratedProtocolMessageType(
"LabelsEntry",
(_message.Message,),
dict(
DESCRIPTOR=_JOB_LABELSENTRY,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2"
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Job.LabelsEntry)
),
),
DESCRIPTOR=_JOB,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A Cloud Dataproc job resource.
Attributes:
reference:
Optional. The fully qualified reference to the job, which can
be used to obtain the equivalent REST path of the job
resource. If this property is not specified when a job is
created, the server generates a job\_id.
placement:
Required. Job information, including how, when, and where to
run the job.
type_job:
Required. The application/framework-specific portion of the
job.
hadoop_job:
Job is a Hadoop job.
spark_job:
Job is a Spark job.
pyspark_job:
Job is a Pyspark job.
hive_job:
Job is a Hive job.
pig_job:
Job is a Pig job.
spark_r_job:
Job is a SparkR job.
spark_sql_job:
Job is a SparkSql job.
status:
Output only. The job status. Additional application-specific
status information may be contained in the type\_job and
yarn\_applications fields.
status_history:
Output only. The previous job status.
yarn_applications:
Output only. The collection of YARN applications spun up by
this job. **Beta** Feature: This report is available for
testing purposes only. It may be changed before final release.
submitted_by:
Output only. The email address of the user submitting the job.
For jobs submitted on the cluster, the address is
username@hostname.
driver_output_resource_uri:
Output only. A URI pointing to the location of the stdout of
the job's driver program.
driver_control_files_uri:
Output only. If present, the location of miscellaneous control
files which may be used as part of job setup and handling. If
not present, control files may be placed in the same location
as ``driver_output_uri``.
labels:
Optional. The labels to associate with this job. Label
**keys** must contain 1 to 63 characters, and must conform to
`RFC 1035 <https://www.ietf.org/rfc/rfc1035.txt>`__. Label
**values** may be empty, but, if present, must contain 1 to 63
characters, and must conform to `RFC 1035
<https://www.ietf.org/rfc/rfc1035.txt>`__. No more than 32
labels can be associated with a job.
scheduling:
Optional. Job scheduling configuration.
job_uuid:
Output only. A UUID that uniquely identifies a job within the
project over time. This is in contrast to a user-settable
reference.job\_id that may be reused over time.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.Job)
),
)
_sym_db.RegisterMessage(Job)
_sym_db.RegisterMessage(Job.LabelsEntry)
JobScheduling = _reflection.GeneratedProtocolMessageType(
"JobScheduling",
(_message.Message,),
dict(
DESCRIPTOR=_JOBSCHEDULING,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""Job scheduling options.
Attributes:
max_failures_per_hour:
Optional. Maximum number of times per hour a driver may be
restarted as a result of driver terminating with non-zero code
before job is reported failed. A job may be reported as
thrashing if driver exits with non-zero code 4 times within 10
minute window. Maximum value is 10.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobScheduling)
),
)
_sym_db.RegisterMessage(JobScheduling)
SubmitJobRequest = _reflection.GeneratedProtocolMessageType(
"SubmitJobRequest",
(_message.Message,),
dict(
DESCRIPTOR=_SUBMITJOBREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to submit a job.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
job:
Required. The job resource.
request_id:
Optional. A unique id used to identify the request. If the
server receives two [SubmitJobRequest][google.cloud.dataproc.v
1beta2.SubmitJobRequest] requests with the same id, then the
second request will be ignored and the first
[Job][google.cloud.dataproc.v1beta2.Job] created and stored in
the backend is returned. It is recommended to always set this
value to a `UUID <https://en.wikipedia.org/wiki/Universally_un
ique_identifier>`__. The id must contain only letters (a-z,
A-Z), numbers (0-9), underscores (\_), and hyphens (-). The
maximum length is 40 characters.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.SubmitJobRequest)
),
)
_sym_db.RegisterMessage(SubmitJobRequest)
GetJobRequest = _reflection.GeneratedProtocolMessageType(
"GetJobRequest",
(_message.Message,),
dict(
DESCRIPTOR=_GETJOBREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to get the resource representation for a job in a project.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
job_id:
Required. The job ID.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetJobRequest)
),
)
_sym_db.RegisterMessage(GetJobRequest)
ListJobsRequest = _reflection.GeneratedProtocolMessageType(
"ListJobsRequest",
(_message.Message,),
dict(
DESCRIPTOR=_LISTJOBSREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to list jobs in a project.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
page_size:
Optional. The number of results to return in each response.
page_token:
Optional. The page token, returned by a previous call, to
request the next page of results.
cluster_name:
Optional. If set, the returned jobs list includes only jobs
that were submitted to the named cluster.
job_state_matcher:
Optional. Specifies enumerated categories of jobs to list.
(default = match ALL jobs). If ``filter`` is provided,
``jobStateMatcher`` will be ignored.
filter:
Optional. A filter constraining the jobs to list. Filters are
case-sensitive and have the following syntax: [field = value]
AND [field [= value]] ... where **field** is ``status.state``
or ``labels.[KEY]``, and ``[KEY]`` is a label key. **value**
can be ``*`` to match all values. ``status.state`` can be
either ``ACTIVE`` or ``NON_ACTIVE``. Only the logical ``AND``
operator is supported; space-separated items are treated as
having an implicit ``AND`` operator. Example filter:
status.state = ACTIVE AND labels.env = staging AND
labels.starred = \*
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListJobsRequest)
),
)
_sym_db.RegisterMessage(ListJobsRequest)
UpdateJobRequest = _reflection.GeneratedProtocolMessageType(
"UpdateJobRequest",
(_message.Message,),
dict(
DESCRIPTOR=_UPDATEJOBREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to update a job.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
job_id:
Required. The job ID.
job:
Required. The changes to the job.
update_mask:
Required. Specifies the path, relative to Job, of the field to
update. For example, to update the labels of a Job the
update\_mask parameter would be specified as labels, and the
``PATCH`` request body would specify the new value. Note:
Currently, labels is the only field that can be updated.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.UpdateJobRequest)
),
)
_sym_db.RegisterMessage(UpdateJobRequest)
ListJobsResponse = _reflection.GeneratedProtocolMessageType(
"ListJobsResponse",
(_message.Message,),
dict(
DESCRIPTOR=_LISTJOBSRESPONSE,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A list of jobs in a project.
Attributes:
jobs:
Output only. Jobs list.
next_page_token:
Optional. This token is included in the response if there are
more results to fetch. To fetch additional results, provide
this value as the ``page_token`` in a subsequent
ListJobsRequest.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.ListJobsResponse)
),
)
_sym_db.RegisterMessage(ListJobsResponse)
CancelJobRequest = _reflection.GeneratedProtocolMessageType(
"CancelJobRequest",
(_message.Message,),
dict(
DESCRIPTOR=_CANCELJOBREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to cancel a job.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
job_id:
Required. The job ID.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.CancelJobRequest)
),
)
_sym_db.RegisterMessage(CancelJobRequest)
DeleteJobRequest = _reflection.GeneratedProtocolMessageType(
"DeleteJobRequest",
(_message.Message,),
dict(
DESCRIPTOR=_DELETEJOBREQUEST,
__module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2",
__doc__="""A request to delete a job.
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
job belongs to.
region:
Required. The Cloud Dataproc region in which to handle the
request.
job_id:
Required. The job ID.
""",
# @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DeleteJobRequest)
),
)
_sym_db.RegisterMessage(DeleteJobRequest)
DESCRIPTOR._options = None
_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None
_HADOOPJOB_PROPERTIESENTRY._options = None
_SPARKJOB_PROPERTIESENTRY._options = None
_PYSPARKJOB_PROPERTIESENTRY._options = None
_HIVEJOB_SCRIPTVARIABLESENTRY._options = None
_HIVEJOB_PROPERTIESENTRY._options = None
_SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None
_SPARKSQLJOB_PROPERTIESENTRY._options = None
_PIGJOB_SCRIPTVARIABLESENTRY._options = None
_PIGJOB_PROPERTIESENTRY._options = None
_SPARKRJOB_PROPERTIESENTRY._options = None
_JOB_LABELSENTRY._options = None
_JOBCONTROLLER = _descriptor.ServiceDescriptor(
name="JobController",
full_name="google.cloud.dataproc.v1beta2.JobController",
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=6207,
serialized_end=7242,
methods=[
_descriptor.MethodDescriptor(
name="SubmitJob",
full_name="google.cloud.dataproc.v1beta2.JobController.SubmitJob",
index=0,
containing_service=None,
input_type=_SUBMITJOBREQUEST,
output_type=_JOB,
serialized_options=_b(
'\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*'
),
),
_descriptor.MethodDescriptor(
name="GetJob",
full_name="google.cloud.dataproc.v1beta2.JobController.GetJob",
index=1,
containing_service=None,
input_type=_GETJOBREQUEST,
output_type=_JOB,
serialized_options=_b(
"\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}"
),
),
_descriptor.MethodDescriptor(
name="ListJobs",
full_name="google.cloud.dataproc.v1beta2.JobController.ListJobs",
index=2,
containing_service=None,
input_type=_LISTJOBSREQUEST,
output_type=_LISTJOBSRESPONSE,
serialized_options=_b(
"\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs"
),
),
_descriptor.MethodDescriptor(
name="UpdateJob",
full_name="google.cloud.dataproc.v1beta2.JobController.UpdateJob",
index=3,
containing_service=None,
input_type=_UPDATEJOBREQUEST,
output_type=_JOB,
serialized_options=_b(
"\202\323\344\223\002D2=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\003job"
),
),
_descriptor.MethodDescriptor(
name="CancelJob",
full_name="google.cloud.dataproc.v1beta2.JobController.CancelJob",
index=4,
containing_service=None,
input_type=_CANCELJOBREQUEST,
output_type=_JOB,
serialized_options=_b(
'\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*'
),
),
_descriptor.MethodDescriptor(
name="DeleteJob",
full_name="google.cloud.dataproc.v1beta2.JobController.DeleteJob",
index=5,
containing_service=None,
input_type=_DELETEJOBREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=_b(
"\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}"
),
),
],
)
_sym_db.RegisterServiceDescriptor(_JOBCONTROLLER)
DESCRIPTOR.services_by_name["JobController"] = _JOBCONTROLLER
# @@protoc_insertion_point(module_scope)