From 4f128103699de579eef9c687673e229f2e3bbed6 Mon Sep 17 00:00:00 2001 From: Jia Fan Date: Tue, 11 Jul 2023 14:17:52 +0800 Subject: [PATCH] [SPARK-44370][CONNECT] Migrate Buf remote generation alpha to remote plugins --- .../connect/common/src/main/buf.gen.yaml | 10 +- python/pyspark/sql/connect/proto/base_pb2.py | 748 +----------------- .../pyspark/sql/connect/proto/catalog_pb2.py | 352 +-------- .../pyspark/sql/connect/proto/commands_pb2.py | 432 +--------- .../pyspark/sql/connect/proto/common_pb2.py | 30 +- .../sql/connect/proto/example_plugins_pb2.py | 42 +- .../sql/connect/proto/expressions_pb2.py | 308 +------- .../sql/connect/proto/relations_pb2.py | 744 +---------------- python/pyspark/sql/connect/proto/types_pb2.py | 293 +------ 9 files changed, 29 insertions(+), 2930 deletions(-) diff --git a/connector/connect/common/src/main/buf.gen.yaml b/connector/connect/common/src/main/buf.gen.yaml index 6d0ff8fd4c826..07edaa567b11a 100644 --- a/connector/connect/common/src/main/buf.gen.yaml +++ b/connector/connect/common/src/main/buf.gen.yaml @@ -16,18 +16,18 @@ # version: v1 plugins: - - remote: buf.build/protocolbuffers/plugins/cpp:v3.20.0-1 + - plugin: buf.build/protocolbuffers/cpp:v21.7 out: gen/proto/cpp - - remote: buf.build/protocolbuffers/plugins/csharp:v3.20.0-1 + - plugin: buf.build/protocolbuffers/csharp:v21.7 out: gen/proto/csharp - - remote: buf.build/protocolbuffers/plugins/java:v3.20.0-1 + - plugin: buf.build/protocolbuffers/java:v21.7 out: gen/proto/java - plugin: buf.build/grpc/ruby:v1.56.0 out: gen/proto/ruby - - remote: buf.build/protocolbuffers/plugins/ruby:v21.2.0-1 + - plugin: buf.build/protocolbuffers/ruby:v21.7 out: gen/proto/ruby # Building the Python build and building the mypy interfaces. - - remote: buf.build/protocolbuffers/plugins/python:v3.19.3-1 + - plugin: buf.build/protocolbuffers/python:v21.7 out: gen/proto/python - plugin: buf.build/grpc/python:v1.56.0 out: gen/proto/python diff --git a/python/pyspark/sql/connect/proto/base_pb2.py b/python/pyspark/sql/connect/proto/base_pb2.py index 160109b95699c..7bf93ed58fa86 100644 --- a/python/pyspark/sql/connect/proto/base_pb2.py +++ b/python/pyspark/sql/connect/proto/base_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/base.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -41,749 +40,8 @@ b'\n\x18spark/connect/base.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1cspark/connect/commands.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"t\n\x04Plan\x12-\n\x04root\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationH\x00R\x04root\x12\x32\n\x07\x63ommand\x18\x02 \x01(\x0b\x32\x16.spark.connect.CommandH\x00R\x07\x63ommandB\t\n\x07op_type"z\n\x0bUserContext\x12\x17\n\x07user_id\x18\x01 \x01(\tR\x06userId\x12\x1b\n\tuser_name\x18\x02 \x01(\tR\x08userName\x12\x35\n\nextensions\x18\xe7\x07 \x03(\x0b\x32\x14.google.protobuf.AnyR\nextensions"\xf5\x12\n\x12\x41nalyzePlanRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x03 \x01(\tH\x01R\nclientType\x88\x01\x01\x12\x42\n\x06schema\x18\x04 \x01(\x0b\x32(.spark.connect.AnalyzePlanRequest.SchemaH\x00R\x06schema\x12\x45\n\x07\x65xplain\x18\x05 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.ExplainH\x00R\x07\x65xplain\x12O\n\x0btree_string\x18\x06 \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.TreeStringH\x00R\ntreeString\x12\x46\n\x08is_local\x18\x07 \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.IsLocalH\x00R\x07isLocal\x12R\n\x0cis_streaming\x18\x08 \x01(\x0b\x32-.spark.connect.AnalyzePlanRequest.IsStreamingH\x00R\x0bisStreaming\x12O\n\x0binput_files\x18\t \x01(\x0b\x32,.spark.connect.AnalyzePlanRequest.InputFilesH\x00R\ninputFiles\x12U\n\rspark_version\x18\n \x01(\x0b\x32..spark.connect.AnalyzePlanRequest.SparkVersionH\x00R\x0csparkVersion\x12I\n\tddl_parse\x18\x0b \x01(\x0b\x32*.spark.connect.AnalyzePlanRequest.DDLParseH\x00R\x08\x64\x64lParse\x12X\n\x0esame_semantics\x18\x0c \x01(\x0b\x32/.spark.connect.AnalyzePlanRequest.SameSemanticsH\x00R\rsameSemantics\x12U\n\rsemantic_hash\x18\r \x01(\x0b\x32..spark.connect.AnalyzePlanRequest.SemanticHashH\x00R\x0csemanticHash\x12\x45\n\x07persist\x18\x0e \x01(\x0b\x32).spark.connect.AnalyzePlanRequest.PersistH\x00R\x07persist\x12K\n\tunpersist\x18\x0f \x01(\x0b\x32+.spark.connect.AnalyzePlanRequest.UnpersistH\x00R\tunpersist\x12_\n\x11get_storage_level\x18\x10 \x01(\x0b\x32\x31.spark.connect.AnalyzePlanRequest.GetStorageLevelH\x00R\x0fgetStorageLevel\x1a\x31\n\x06Schema\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\xbb\x02\n\x07\x45xplain\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12X\n\x0c\x65xplain_mode\x18\x02 \x01(\x0e\x32\x35.spark.connect.AnalyzePlanRequest.Explain.ExplainModeR\x0b\x65xplainMode"\xac\x01\n\x0b\x45xplainMode\x12\x1c\n\x18\x45XPLAIN_MODE_UNSPECIFIED\x10\x00\x12\x17\n\x13\x45XPLAIN_MODE_SIMPLE\x10\x01\x12\x19\n\x15\x45XPLAIN_MODE_EXTENDED\x10\x02\x12\x18\n\x14\x45XPLAIN_MODE_CODEGEN\x10\x03\x12\x15\n\x11\x45XPLAIN_MODE_COST\x10\x04\x12\x1a\n\x16\x45XPLAIN_MODE_FORMATTED\x10\x05\x1aZ\n\nTreeString\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12\x19\n\x05level\x18\x02 \x01(\x05H\x00R\x05level\x88\x01\x01\x42\x08\n\x06_level\x1a\x32\n\x07IsLocal\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x36\n\x0bIsStreaming\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x35\n\nInputFiles\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x0e\n\x0cSparkVersion\x1a)\n\x08\x44\x44LParse\x12\x1d\n\nddl_string\x18\x01 \x01(\tR\tddlString\x1ay\n\rSameSemantics\x12\x34\n\x0btarget_plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\ntargetPlan\x12\x32\n\nother_plan\x18\x02 \x01(\x0b\x32\x13.spark.connect.PlanR\totherPlan\x1a\x37\n\x0cSemanticHash\x12\'\n\x04plan\x18\x01 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x1a\x97\x01\n\x07Persist\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relation\x12\x45\n\rstorage_level\x18\x02 \x01(\x0b\x32\x1b.spark.connect.StorageLevelH\x00R\x0cstorageLevel\x88\x01\x01\x42\x10\n\x0e_storage_level\x1an\n\tUnpersist\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relation\x12\x1f\n\x08\x62locking\x18\x02 \x01(\x08H\x00R\x08\x62locking\x88\x01\x01\x42\x0b\n\t_blocking\x1a\x46\n\x0fGetStorageLevel\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relationB\t\n\x07\x61nalyzeB\x0e\n\x0c_client_type"\x99\r\n\x13\x41nalyzePlanResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12\x43\n\x06schema\x18\x02 \x01(\x0b\x32).spark.connect.AnalyzePlanResponse.SchemaH\x00R\x06schema\x12\x46\n\x07\x65xplain\x18\x03 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.ExplainH\x00R\x07\x65xplain\x12P\n\x0btree_string\x18\x04 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.TreeStringH\x00R\ntreeString\x12G\n\x08is_local\x18\x05 \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.IsLocalH\x00R\x07isLocal\x12S\n\x0cis_streaming\x18\x06 \x01(\x0b\x32..spark.connect.AnalyzePlanResponse.IsStreamingH\x00R\x0bisStreaming\x12P\n\x0binput_files\x18\x07 \x01(\x0b\x32-.spark.connect.AnalyzePlanResponse.InputFilesH\x00R\ninputFiles\x12V\n\rspark_version\x18\x08 \x01(\x0b\x32/.spark.connect.AnalyzePlanResponse.SparkVersionH\x00R\x0csparkVersion\x12J\n\tddl_parse\x18\t \x01(\x0b\x32+.spark.connect.AnalyzePlanResponse.DDLParseH\x00R\x08\x64\x64lParse\x12Y\n\x0esame_semantics\x18\n \x01(\x0b\x32\x30.spark.connect.AnalyzePlanResponse.SameSemanticsH\x00R\rsameSemantics\x12V\n\rsemantic_hash\x18\x0b \x01(\x0b\x32/.spark.connect.AnalyzePlanResponse.SemanticHashH\x00R\x0csemanticHash\x12\x46\n\x07persist\x18\x0c \x01(\x0b\x32*.spark.connect.AnalyzePlanResponse.PersistH\x00R\x07persist\x12L\n\tunpersist\x18\r \x01(\x0b\x32,.spark.connect.AnalyzePlanResponse.UnpersistH\x00R\tunpersist\x12`\n\x11get_storage_level\x18\x0e \x01(\x0b\x32\x32.spark.connect.AnalyzePlanResponse.GetStorageLevelH\x00R\x0fgetStorageLevel\x1a\x39\n\x06Schema\x12/\n\x06schema\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06schema\x1a\x30\n\x07\x45xplain\x12%\n\x0e\x65xplain_string\x18\x01 \x01(\tR\rexplainString\x1a-\n\nTreeString\x12\x1f\n\x0btree_string\x18\x01 \x01(\tR\ntreeString\x1a$\n\x07IsLocal\x12\x19\n\x08is_local\x18\x01 \x01(\x08R\x07isLocal\x1a\x30\n\x0bIsStreaming\x12!\n\x0cis_streaming\x18\x01 \x01(\x08R\x0bisStreaming\x1a"\n\nInputFiles\x12\x14\n\x05\x66iles\x18\x01 \x03(\tR\x05\x66iles\x1a(\n\x0cSparkVersion\x12\x18\n\x07version\x18\x01 \x01(\tR\x07version\x1a;\n\x08\x44\x44LParse\x12/\n\x06parsed\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06parsed\x1a\'\n\rSameSemantics\x12\x16\n\x06result\x18\x01 \x01(\x08R\x06result\x1a&\n\x0cSemanticHash\x12\x16\n\x06result\x18\x01 \x01(\x05R\x06result\x1a\t\n\x07Persist\x1a\x0b\n\tUnpersist\x1aS\n\x0fGetStorageLevel\x12@\n\rstorage_level\x18\x01 \x01(\x0b\x32\x1b.spark.connect.StorageLevelR\x0cstorageLevelB\x08\n\x06result"\x85\x03\n\x12\x45xecutePlanRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\'\n\x04plan\x18\x03 \x01(\x0b\x32\x13.spark.connect.PlanR\x04plan\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x12X\n\x0frequest_options\x18\x05 \x03(\x0b\x32/.spark.connect.ExecutePlanRequest.RequestOptionR\x0erequestOptions\x1aX\n\rRequestOption\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textensionB\x10\n\x0erequest_optionB\x0e\n\x0c_client_type"\xe5\r\n\x13\x45xecutePlanResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12P\n\x0b\x61rrow_batch\x18\x02 \x01(\x0b\x32-.spark.connect.ExecutePlanResponse.ArrowBatchH\x00R\narrowBatch\x12\x63\n\x12sql_command_result\x18\x05 \x01(\x0b\x32\x33.spark.connect.ExecutePlanResponse.SqlCommandResultH\x00R\x10sqlCommandResult\x12~\n#write_stream_operation_start_result\x18\x08 \x01(\x0b\x32..spark.connect.WriteStreamOperationStartResultH\x00R\x1fwriteStreamOperationStartResult\x12q\n\x1estreaming_query_command_result\x18\t \x01(\x0b\x32*.spark.connect.StreamingQueryCommandResultH\x00R\x1bstreamingQueryCommandResult\x12k\n\x1cget_resources_command_result\x18\n \x01(\x0b\x32(.spark.connect.GetResourcesCommandResultH\x00R\x19getResourcesCommandResult\x12\x87\x01\n&streaming_query_manager_command_result\x18\x0b \x01(\x0b\x32\x31.spark.connect.StreamingQueryManagerCommandResultH\x00R"streamingQueryManagerCommandResult\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textension\x12\x44\n\x07metrics\x18\x04 \x01(\x0b\x32*.spark.connect.ExecutePlanResponse.MetricsR\x07metrics\x12]\n\x10observed_metrics\x18\x06 \x03(\x0b\x32\x32.spark.connect.ExecutePlanResponse.ObservedMetricsR\x0fobservedMetrics\x12/\n\x06schema\x18\x07 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06schema\x1aG\n\x10SqlCommandResult\x12\x33\n\x08relation\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x08relation\x1a=\n\nArrowBatch\x12\x1b\n\trow_count\x18\x01 \x01(\x03R\x08rowCount\x12\x12\n\x04\x64\x61ta\x18\x02 \x01(\x0cR\x04\x64\x61ta\x1a\x85\x04\n\x07Metrics\x12Q\n\x07metrics\x18\x01 \x03(\x0b\x32\x37.spark.connect.ExecutePlanResponse.Metrics.MetricObjectR\x07metrics\x1a\xcc\x02\n\x0cMetricObject\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x17\n\x07plan_id\x18\x02 \x01(\x03R\x06planId\x12\x16\n\x06parent\x18\x03 \x01(\x03R\x06parent\x12z\n\x11\x65xecution_metrics\x18\x04 \x03(\x0b\x32M.spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntryR\x10\x65xecutionMetrics\x1a{\n\x15\x45xecutionMetricsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12L\n\x05value\x18\x02 \x01(\x0b\x32\x36.spark.connect.ExecutePlanResponse.Metrics.MetricValueR\x05value:\x02\x38\x01\x1aX\n\x0bMetricValue\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x14\n\x05value\x18\x02 \x01(\x03R\x05value\x12\x1f\n\x0bmetric_type\x18\x03 \x01(\tR\nmetricType\x1a`\n\x0fObservedMetrics\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x39\n\x06values\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06valuesB\x0f\n\rresponse_type"A\n\x08KeyValue\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x19\n\x05value\x18\x02 \x01(\tH\x00R\x05value\x88\x01\x01\x42\x08\n\x06_value"\x84\x08\n\rConfigRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12\x44\n\toperation\x18\x03 \x01(\x0b\x32&.spark.connect.ConfigRequest.OperationR\toperation\x12$\n\x0b\x63lient_type\x18\x04 \x01(\tH\x00R\nclientType\x88\x01\x01\x1a\xf2\x03\n\tOperation\x12\x34\n\x03set\x18\x01 \x01(\x0b\x32 .spark.connect.ConfigRequest.SetH\x00R\x03set\x12\x34\n\x03get\x18\x02 \x01(\x0b\x32 .spark.connect.ConfigRequest.GetH\x00R\x03get\x12W\n\x10get_with_default\x18\x03 \x01(\x0b\x32+.spark.connect.ConfigRequest.GetWithDefaultH\x00R\x0egetWithDefault\x12G\n\nget_option\x18\x04 \x01(\x0b\x32&.spark.connect.ConfigRequest.GetOptionH\x00R\tgetOption\x12>\n\x07get_all\x18\x05 \x01(\x0b\x32#.spark.connect.ConfigRequest.GetAllH\x00R\x06getAll\x12:\n\x05unset\x18\x06 \x01(\x0b\x32".spark.connect.ConfigRequest.UnsetH\x00R\x05unset\x12P\n\ris_modifiable\x18\x07 \x01(\x0b\x32).spark.connect.ConfigRequest.IsModifiableH\x00R\x0cisModifiableB\t\n\x07op_type\x1a\x34\n\x03Set\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x19\n\x03Get\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a?\n\x0eGetWithDefault\x12-\n\x05pairs\x18\x01 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x1a\x1f\n\tGetOption\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a\x30\n\x06GetAll\x12\x1b\n\x06prefix\x18\x01 \x01(\tH\x00R\x06prefix\x88\x01\x01\x42\t\n\x07_prefix\x1a\x1b\n\x05Unset\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keys\x1a"\n\x0cIsModifiable\x12\x12\n\x04keys\x18\x01 \x03(\tR\x04keysB\x0e\n\x0c_client_type"z\n\x0e\x43onfigResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12-\n\x05pairs\x18\x02 \x03(\x0b\x32\x17.spark.connect.KeyValueR\x05pairs\x12\x1a\n\x08warnings\x18\x03 \x03(\tR\x08warnings"\xe7\x06\n\x13\x41\x64\x64\x41rtifactsRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x06 \x01(\tH\x01R\nclientType\x88\x01\x01\x12@\n\x05\x62\x61tch\x18\x03 \x01(\x0b\x32(.spark.connect.AddArtifactsRequest.BatchH\x00R\x05\x62\x61tch\x12Z\n\x0b\x62\x65gin_chunk\x18\x04 \x01(\x0b\x32\x37.spark.connect.AddArtifactsRequest.BeginChunkedArtifactH\x00R\nbeginChunk\x12H\n\x05\x63hunk\x18\x05 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkH\x00R\x05\x63hunk\x1a\x35\n\rArtifactChunk\x12\x12\n\x04\x64\x61ta\x18\x01 \x01(\x0cR\x04\x64\x61ta\x12\x10\n\x03\x63rc\x18\x02 \x01(\x03R\x03\x63rc\x1ao\n\x13SingleChunkArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x44\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x04\x64\x61ta\x1a]\n\x05\x42\x61tch\x12T\n\tartifacts\x18\x01 \x03(\x0b\x32\x36.spark.connect.AddArtifactsRequest.SingleChunkArtifactR\tartifacts\x1a\xc1\x01\n\x14\x42\x65ginChunkedArtifact\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1f\n\x0btotal_bytes\x18\x02 \x01(\x03R\ntotalBytes\x12\x1d\n\nnum_chunks\x18\x03 \x01(\x03R\tnumChunks\x12U\n\rinitial_chunk\x18\x04 \x01(\x0b\x32\x30.spark.connect.AddArtifactsRequest.ArtifactChunkR\x0cinitialChunkB\t\n\x07payloadB\x0e\n\x0c_client_type"\xbc\x01\n\x14\x41\x64\x64\x41rtifactsResponse\x12Q\n\tartifacts\x18\x01 \x03(\x0b\x32\x33.spark.connect.AddArtifactsResponse.ArtifactSummaryR\tartifacts\x1aQ\n\x0f\x41rtifactSummary\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12*\n\x11is_crc_successful\x18\x02 \x01(\x08R\x0fisCrcSuccessful"\xc3\x01\n\x17\x41rtifactStatusesRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x03 \x01(\tH\x00R\nclientType\x88\x01\x01\x12\x14\n\x05names\x18\x04 \x03(\tR\x05namesB\x0e\n\x0c_client_type"\x8c\x02\n\x18\x41rtifactStatusesResponse\x12Q\n\x08statuses\x18\x01 \x03(\x0b\x32\x35.spark.connect.ArtifactStatusesResponse.StatusesEntryR\x08statuses\x1a(\n\x0e\x41rtifactStatus\x12\x16\n\x06\x65xists\x18\x01 \x01(\x08R\x06\x65xists\x1as\n\rStatusesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12L\n\x05value\x18\x02 \x01(\x0b\x32\x36.spark.connect.ArtifactStatusesResponse.ArtifactStatusR\x05value:\x02\x38\x01"\xc5\x02\n\x10InterruptRequest\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId\x12=\n\x0cuser_context\x18\x02 \x01(\x0b\x32\x1a.spark.connect.UserContextR\x0buserContext\x12$\n\x0b\x63lient_type\x18\x03 \x01(\tH\x00R\nclientType\x88\x01\x01\x12T\n\x0einterrupt_type\x18\x04 \x01(\x0e\x32-.spark.connect.InterruptRequest.InterruptTypeR\rinterruptType"G\n\rInterruptType\x12\x1e\n\x1aINTERRUPT_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12INTERRUPT_TYPE_ALL\x10\x01\x42\x0e\n\x0c_client_type"2\n\x11InterruptResponse\x12\x1d\n\nsession_id\x18\x01 \x01(\tR\tsessionId2\xa4\x04\n\x13SparkConnectService\x12X\n\x0b\x45xecutePlan\x12!.spark.connect.ExecutePlanRequest\x1a".spark.connect.ExecutePlanResponse"\x00\x30\x01\x12V\n\x0b\x41nalyzePlan\x12!.spark.connect.AnalyzePlanRequest\x1a".spark.connect.AnalyzePlanResponse"\x00\x12G\n\x06\x43onfig\x12\x1c.spark.connect.ConfigRequest\x1a\x1d.spark.connect.ConfigResponse"\x00\x12[\n\x0c\x41\x64\x64\x41rtifacts\x12".spark.connect.AddArtifactsRequest\x1a#.spark.connect.AddArtifactsResponse"\x00(\x01\x12\x63\n\x0e\x41rtifactStatus\x12&.spark.connect.ArtifactStatusesRequest\x1a\'.spark.connect.ArtifactStatusesResponse"\x00\x12P\n\tInterrupt\x12\x1f.spark.connect.InterruptRequest\x1a .spark.connect.InterruptResponse"\x00\x42\x36\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_PLAN = DESCRIPTOR.message_types_by_name["Plan"] -_USERCONTEXT = DESCRIPTOR.message_types_by_name["UserContext"] -_ANALYZEPLANREQUEST = DESCRIPTOR.message_types_by_name["AnalyzePlanRequest"] -_ANALYZEPLANREQUEST_SCHEMA = _ANALYZEPLANREQUEST.nested_types_by_name["Schema"] -_ANALYZEPLANREQUEST_EXPLAIN = _ANALYZEPLANREQUEST.nested_types_by_name["Explain"] -_ANALYZEPLANREQUEST_TREESTRING = _ANALYZEPLANREQUEST.nested_types_by_name["TreeString"] -_ANALYZEPLANREQUEST_ISLOCAL = _ANALYZEPLANREQUEST.nested_types_by_name["IsLocal"] -_ANALYZEPLANREQUEST_ISSTREAMING = _ANALYZEPLANREQUEST.nested_types_by_name["IsStreaming"] -_ANALYZEPLANREQUEST_INPUTFILES = _ANALYZEPLANREQUEST.nested_types_by_name["InputFiles"] -_ANALYZEPLANREQUEST_SPARKVERSION = _ANALYZEPLANREQUEST.nested_types_by_name["SparkVersion"] -_ANALYZEPLANREQUEST_DDLPARSE = _ANALYZEPLANREQUEST.nested_types_by_name["DDLParse"] -_ANALYZEPLANREQUEST_SAMESEMANTICS = _ANALYZEPLANREQUEST.nested_types_by_name["SameSemantics"] -_ANALYZEPLANREQUEST_SEMANTICHASH = _ANALYZEPLANREQUEST.nested_types_by_name["SemanticHash"] -_ANALYZEPLANREQUEST_PERSIST = _ANALYZEPLANREQUEST.nested_types_by_name["Persist"] -_ANALYZEPLANREQUEST_UNPERSIST = _ANALYZEPLANREQUEST.nested_types_by_name["Unpersist"] -_ANALYZEPLANREQUEST_GETSTORAGELEVEL = _ANALYZEPLANREQUEST.nested_types_by_name["GetStorageLevel"] -_ANALYZEPLANRESPONSE = DESCRIPTOR.message_types_by_name["AnalyzePlanResponse"] -_ANALYZEPLANRESPONSE_SCHEMA = _ANALYZEPLANRESPONSE.nested_types_by_name["Schema"] -_ANALYZEPLANRESPONSE_EXPLAIN = _ANALYZEPLANRESPONSE.nested_types_by_name["Explain"] -_ANALYZEPLANRESPONSE_TREESTRING = _ANALYZEPLANRESPONSE.nested_types_by_name["TreeString"] -_ANALYZEPLANRESPONSE_ISLOCAL = _ANALYZEPLANRESPONSE.nested_types_by_name["IsLocal"] -_ANALYZEPLANRESPONSE_ISSTREAMING = _ANALYZEPLANRESPONSE.nested_types_by_name["IsStreaming"] -_ANALYZEPLANRESPONSE_INPUTFILES = _ANALYZEPLANRESPONSE.nested_types_by_name["InputFiles"] -_ANALYZEPLANRESPONSE_SPARKVERSION = _ANALYZEPLANRESPONSE.nested_types_by_name["SparkVersion"] -_ANALYZEPLANRESPONSE_DDLPARSE = _ANALYZEPLANRESPONSE.nested_types_by_name["DDLParse"] -_ANALYZEPLANRESPONSE_SAMESEMANTICS = _ANALYZEPLANRESPONSE.nested_types_by_name["SameSemantics"] -_ANALYZEPLANRESPONSE_SEMANTICHASH = _ANALYZEPLANRESPONSE.nested_types_by_name["SemanticHash"] -_ANALYZEPLANRESPONSE_PERSIST = _ANALYZEPLANRESPONSE.nested_types_by_name["Persist"] -_ANALYZEPLANRESPONSE_UNPERSIST = _ANALYZEPLANRESPONSE.nested_types_by_name["Unpersist"] -_ANALYZEPLANRESPONSE_GETSTORAGELEVEL = _ANALYZEPLANRESPONSE.nested_types_by_name["GetStorageLevel"] -_EXECUTEPLANREQUEST = DESCRIPTOR.message_types_by_name["ExecutePlanRequest"] -_EXECUTEPLANREQUEST_REQUESTOPTION = _EXECUTEPLANREQUEST.nested_types_by_name["RequestOption"] -_EXECUTEPLANRESPONSE = DESCRIPTOR.message_types_by_name["ExecutePlanResponse"] -_EXECUTEPLANRESPONSE_SQLCOMMANDRESULT = _EXECUTEPLANRESPONSE.nested_types_by_name[ - "SqlCommandResult" -] -_EXECUTEPLANRESPONSE_ARROWBATCH = _EXECUTEPLANRESPONSE.nested_types_by_name["ArrowBatch"] -_EXECUTEPLANRESPONSE_METRICS = _EXECUTEPLANRESPONSE.nested_types_by_name["Metrics"] -_EXECUTEPLANRESPONSE_METRICS_METRICOBJECT = _EXECUTEPLANRESPONSE_METRICS.nested_types_by_name[ - "MetricObject" -] -_EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY = ( - _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT.nested_types_by_name["ExecutionMetricsEntry"] -) -_EXECUTEPLANRESPONSE_METRICS_METRICVALUE = _EXECUTEPLANRESPONSE_METRICS.nested_types_by_name[ - "MetricValue" -] -_EXECUTEPLANRESPONSE_OBSERVEDMETRICS = _EXECUTEPLANRESPONSE.nested_types_by_name["ObservedMetrics"] -_KEYVALUE = DESCRIPTOR.message_types_by_name["KeyValue"] -_CONFIGREQUEST = DESCRIPTOR.message_types_by_name["ConfigRequest"] -_CONFIGREQUEST_OPERATION = _CONFIGREQUEST.nested_types_by_name["Operation"] -_CONFIGREQUEST_SET = _CONFIGREQUEST.nested_types_by_name["Set"] -_CONFIGREQUEST_GET = _CONFIGREQUEST.nested_types_by_name["Get"] -_CONFIGREQUEST_GETWITHDEFAULT = _CONFIGREQUEST.nested_types_by_name["GetWithDefault"] -_CONFIGREQUEST_GETOPTION = _CONFIGREQUEST.nested_types_by_name["GetOption"] -_CONFIGREQUEST_GETALL = _CONFIGREQUEST.nested_types_by_name["GetAll"] -_CONFIGREQUEST_UNSET = _CONFIGREQUEST.nested_types_by_name["Unset"] -_CONFIGREQUEST_ISMODIFIABLE = _CONFIGREQUEST.nested_types_by_name["IsModifiable"] -_CONFIGRESPONSE = DESCRIPTOR.message_types_by_name["ConfigResponse"] -_ADDARTIFACTSREQUEST = DESCRIPTOR.message_types_by_name["AddArtifactsRequest"] -_ADDARTIFACTSREQUEST_ARTIFACTCHUNK = _ADDARTIFACTSREQUEST.nested_types_by_name["ArtifactChunk"] -_ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT = _ADDARTIFACTSREQUEST.nested_types_by_name[ - "SingleChunkArtifact" -] -_ADDARTIFACTSREQUEST_BATCH = _ADDARTIFACTSREQUEST.nested_types_by_name["Batch"] -_ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT = _ADDARTIFACTSREQUEST.nested_types_by_name[ - "BeginChunkedArtifact" -] -_ADDARTIFACTSRESPONSE = DESCRIPTOR.message_types_by_name["AddArtifactsResponse"] -_ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY = _ADDARTIFACTSRESPONSE.nested_types_by_name[ - "ArtifactSummary" -] -_ARTIFACTSTATUSESREQUEST = DESCRIPTOR.message_types_by_name["ArtifactStatusesRequest"] -_ARTIFACTSTATUSESRESPONSE = DESCRIPTOR.message_types_by_name["ArtifactStatusesResponse"] -_ARTIFACTSTATUSESRESPONSE_ARTIFACTSTATUS = _ARTIFACTSTATUSESRESPONSE.nested_types_by_name[ - "ArtifactStatus" -] -_ARTIFACTSTATUSESRESPONSE_STATUSESENTRY = _ARTIFACTSTATUSESRESPONSE.nested_types_by_name[ - "StatusesEntry" -] -_INTERRUPTREQUEST = DESCRIPTOR.message_types_by_name["InterruptRequest"] -_INTERRUPTRESPONSE = DESCRIPTOR.message_types_by_name["InterruptResponse"] -_ANALYZEPLANREQUEST_EXPLAIN_EXPLAINMODE = _ANALYZEPLANREQUEST_EXPLAIN.enum_types_by_name[ - "ExplainMode" -] -_INTERRUPTREQUEST_INTERRUPTTYPE = _INTERRUPTREQUEST.enum_types_by_name["InterruptType"] -Plan = _reflection.GeneratedProtocolMessageType( - "Plan", - (_message.Message,), - { - "DESCRIPTOR": _PLAN, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Plan) - }, -) -_sym_db.RegisterMessage(Plan) - -UserContext = _reflection.GeneratedProtocolMessageType( - "UserContext", - (_message.Message,), - { - "DESCRIPTOR": _USERCONTEXT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.UserContext) - }, -) -_sym_db.RegisterMessage(UserContext) - -AnalyzePlanRequest = _reflection.GeneratedProtocolMessageType( - "AnalyzePlanRequest", - (_message.Message,), - { - "Schema": _reflection.GeneratedProtocolMessageType( - "Schema", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_SCHEMA, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.Schema) - }, - ), - "Explain": _reflection.GeneratedProtocolMessageType( - "Explain", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_EXPLAIN, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.Explain) - }, - ), - "TreeString": _reflection.GeneratedProtocolMessageType( - "TreeString", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_TREESTRING, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.TreeString) - }, - ), - "IsLocal": _reflection.GeneratedProtocolMessageType( - "IsLocal", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_ISLOCAL, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.IsLocal) - }, - ), - "IsStreaming": _reflection.GeneratedProtocolMessageType( - "IsStreaming", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_ISSTREAMING, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.IsStreaming) - }, - ), - "InputFiles": _reflection.GeneratedProtocolMessageType( - "InputFiles", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_INPUTFILES, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.InputFiles) - }, - ), - "SparkVersion": _reflection.GeneratedProtocolMessageType( - "SparkVersion", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_SPARKVERSION, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.SparkVersion) - }, - ), - "DDLParse": _reflection.GeneratedProtocolMessageType( - "DDLParse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_DDLPARSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.DDLParse) - }, - ), - "SameSemantics": _reflection.GeneratedProtocolMessageType( - "SameSemantics", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_SAMESEMANTICS, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.SameSemantics) - }, - ), - "SemanticHash": _reflection.GeneratedProtocolMessageType( - "SemanticHash", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_SEMANTICHASH, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.SemanticHash) - }, - ), - "Persist": _reflection.GeneratedProtocolMessageType( - "Persist", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_PERSIST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.Persist) - }, - ), - "Unpersist": _reflection.GeneratedProtocolMessageType( - "Unpersist", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_UNPERSIST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.Unpersist) - }, - ), - "GetStorageLevel": _reflection.GeneratedProtocolMessageType( - "GetStorageLevel", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANREQUEST_GETSTORAGELEVEL, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest.GetStorageLevel) - }, - ), - "DESCRIPTOR": _ANALYZEPLANREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanRequest) - }, -) -_sym_db.RegisterMessage(AnalyzePlanRequest) -_sym_db.RegisterMessage(AnalyzePlanRequest.Schema) -_sym_db.RegisterMessage(AnalyzePlanRequest.Explain) -_sym_db.RegisterMessage(AnalyzePlanRequest.TreeString) -_sym_db.RegisterMessage(AnalyzePlanRequest.IsLocal) -_sym_db.RegisterMessage(AnalyzePlanRequest.IsStreaming) -_sym_db.RegisterMessage(AnalyzePlanRequest.InputFiles) -_sym_db.RegisterMessage(AnalyzePlanRequest.SparkVersion) -_sym_db.RegisterMessage(AnalyzePlanRequest.DDLParse) -_sym_db.RegisterMessage(AnalyzePlanRequest.SameSemantics) -_sym_db.RegisterMessage(AnalyzePlanRequest.SemanticHash) -_sym_db.RegisterMessage(AnalyzePlanRequest.Persist) -_sym_db.RegisterMessage(AnalyzePlanRequest.Unpersist) -_sym_db.RegisterMessage(AnalyzePlanRequest.GetStorageLevel) - -AnalyzePlanResponse = _reflection.GeneratedProtocolMessageType( - "AnalyzePlanResponse", - (_message.Message,), - { - "Schema": _reflection.GeneratedProtocolMessageType( - "Schema", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_SCHEMA, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.Schema) - }, - ), - "Explain": _reflection.GeneratedProtocolMessageType( - "Explain", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_EXPLAIN, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.Explain) - }, - ), - "TreeString": _reflection.GeneratedProtocolMessageType( - "TreeString", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_TREESTRING, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.TreeString) - }, - ), - "IsLocal": _reflection.GeneratedProtocolMessageType( - "IsLocal", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_ISLOCAL, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.IsLocal) - }, - ), - "IsStreaming": _reflection.GeneratedProtocolMessageType( - "IsStreaming", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_ISSTREAMING, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.IsStreaming) - }, - ), - "InputFiles": _reflection.GeneratedProtocolMessageType( - "InputFiles", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_INPUTFILES, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.InputFiles) - }, - ), - "SparkVersion": _reflection.GeneratedProtocolMessageType( - "SparkVersion", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_SPARKVERSION, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.SparkVersion) - }, - ), - "DDLParse": _reflection.GeneratedProtocolMessageType( - "DDLParse", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_DDLPARSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.DDLParse) - }, - ), - "SameSemantics": _reflection.GeneratedProtocolMessageType( - "SameSemantics", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_SAMESEMANTICS, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.SameSemantics) - }, - ), - "SemanticHash": _reflection.GeneratedProtocolMessageType( - "SemanticHash", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_SEMANTICHASH, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.SemanticHash) - }, - ), - "Persist": _reflection.GeneratedProtocolMessageType( - "Persist", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_PERSIST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.Persist) - }, - ), - "Unpersist": _reflection.GeneratedProtocolMessageType( - "Unpersist", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_UNPERSIST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.Unpersist) - }, - ), - "GetStorageLevel": _reflection.GeneratedProtocolMessageType( - "GetStorageLevel", - (_message.Message,), - { - "DESCRIPTOR": _ANALYZEPLANRESPONSE_GETSTORAGELEVEL, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse.GetStorageLevel) - }, - ), - "DESCRIPTOR": _ANALYZEPLANRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AnalyzePlanResponse) - }, -) -_sym_db.RegisterMessage(AnalyzePlanResponse) -_sym_db.RegisterMessage(AnalyzePlanResponse.Schema) -_sym_db.RegisterMessage(AnalyzePlanResponse.Explain) -_sym_db.RegisterMessage(AnalyzePlanResponse.TreeString) -_sym_db.RegisterMessage(AnalyzePlanResponse.IsLocal) -_sym_db.RegisterMessage(AnalyzePlanResponse.IsStreaming) -_sym_db.RegisterMessage(AnalyzePlanResponse.InputFiles) -_sym_db.RegisterMessage(AnalyzePlanResponse.SparkVersion) -_sym_db.RegisterMessage(AnalyzePlanResponse.DDLParse) -_sym_db.RegisterMessage(AnalyzePlanResponse.SameSemantics) -_sym_db.RegisterMessage(AnalyzePlanResponse.SemanticHash) -_sym_db.RegisterMessage(AnalyzePlanResponse.Persist) -_sym_db.RegisterMessage(AnalyzePlanResponse.Unpersist) -_sym_db.RegisterMessage(AnalyzePlanResponse.GetStorageLevel) - -ExecutePlanRequest = _reflection.GeneratedProtocolMessageType( - "ExecutePlanRequest", - (_message.Message,), - { - "RequestOption": _reflection.GeneratedProtocolMessageType( - "RequestOption", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANREQUEST_REQUESTOPTION, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanRequest.RequestOption) - }, - ), - "DESCRIPTOR": _EXECUTEPLANREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanRequest) - }, -) -_sym_db.RegisterMessage(ExecutePlanRequest) -_sym_db.RegisterMessage(ExecutePlanRequest.RequestOption) - -ExecutePlanResponse = _reflection.GeneratedProtocolMessageType( - "ExecutePlanResponse", - (_message.Message,), - { - "SqlCommandResult": _reflection.GeneratedProtocolMessageType( - "SqlCommandResult", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANRESPONSE_SQLCOMMANDRESULT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.SqlCommandResult) - }, - ), - "ArrowBatch": _reflection.GeneratedProtocolMessageType( - "ArrowBatch", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANRESPONSE_ARROWBATCH, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.ArrowBatch) - }, - ), - "Metrics": _reflection.GeneratedProtocolMessageType( - "Metrics", - (_message.Message,), - { - "MetricObject": _reflection.GeneratedProtocolMessageType( - "MetricObject", - (_message.Message,), - { - "ExecutionMetricsEntry": _reflection.GeneratedProtocolMessageType( - "ExecutionMetricsEntry", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT_EXECUTIONMETRICSENTRY, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntry) - }, - ), - "DESCRIPTOR": _EXECUTEPLANRESPONSE_METRICS_METRICOBJECT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.Metrics.MetricObject) - }, - ), - "MetricValue": _reflection.GeneratedProtocolMessageType( - "MetricValue", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANRESPONSE_METRICS_METRICVALUE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.Metrics.MetricValue) - }, - ), - "DESCRIPTOR": _EXECUTEPLANRESPONSE_METRICS, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.Metrics) - }, - ), - "ObservedMetrics": _reflection.GeneratedProtocolMessageType( - "ObservedMetrics", - (_message.Message,), - { - "DESCRIPTOR": _EXECUTEPLANRESPONSE_OBSERVEDMETRICS, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse.ObservedMetrics) - }, - ), - "DESCRIPTOR": _EXECUTEPLANRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExecutePlanResponse) - }, -) -_sym_db.RegisterMessage(ExecutePlanResponse) -_sym_db.RegisterMessage(ExecutePlanResponse.SqlCommandResult) -_sym_db.RegisterMessage(ExecutePlanResponse.ArrowBatch) -_sym_db.RegisterMessage(ExecutePlanResponse.Metrics) -_sym_db.RegisterMessage(ExecutePlanResponse.Metrics.MetricObject) -_sym_db.RegisterMessage(ExecutePlanResponse.Metrics.MetricObject.ExecutionMetricsEntry) -_sym_db.RegisterMessage(ExecutePlanResponse.Metrics.MetricValue) -_sym_db.RegisterMessage(ExecutePlanResponse.ObservedMetrics) - -KeyValue = _reflection.GeneratedProtocolMessageType( - "KeyValue", - (_message.Message,), - { - "DESCRIPTOR": _KEYVALUE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.KeyValue) - }, -) -_sym_db.RegisterMessage(KeyValue) - -ConfigRequest = _reflection.GeneratedProtocolMessageType( - "ConfigRequest", - (_message.Message,), - { - "Operation": _reflection.GeneratedProtocolMessageType( - "Operation", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_OPERATION, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.Operation) - }, - ), - "Set": _reflection.GeneratedProtocolMessageType( - "Set", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_SET, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.Set) - }, - ), - "Get": _reflection.GeneratedProtocolMessageType( - "Get", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_GET, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.Get) - }, - ), - "GetWithDefault": _reflection.GeneratedProtocolMessageType( - "GetWithDefault", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_GETWITHDEFAULT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.GetWithDefault) - }, - ), - "GetOption": _reflection.GeneratedProtocolMessageType( - "GetOption", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_GETOPTION, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.GetOption) - }, - ), - "GetAll": _reflection.GeneratedProtocolMessageType( - "GetAll", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_GETALL, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.GetAll) - }, - ), - "Unset": _reflection.GeneratedProtocolMessageType( - "Unset", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_UNSET, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.Unset) - }, - ), - "IsModifiable": _reflection.GeneratedProtocolMessageType( - "IsModifiable", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGREQUEST_ISMODIFIABLE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest.IsModifiable) - }, - ), - "DESCRIPTOR": _CONFIGREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigRequest) - }, -) -_sym_db.RegisterMessage(ConfigRequest) -_sym_db.RegisterMessage(ConfigRequest.Operation) -_sym_db.RegisterMessage(ConfigRequest.Set) -_sym_db.RegisterMessage(ConfigRequest.Get) -_sym_db.RegisterMessage(ConfigRequest.GetWithDefault) -_sym_db.RegisterMessage(ConfigRequest.GetOption) -_sym_db.RegisterMessage(ConfigRequest.GetAll) -_sym_db.RegisterMessage(ConfigRequest.Unset) -_sym_db.RegisterMessage(ConfigRequest.IsModifiable) - -ConfigResponse = _reflection.GeneratedProtocolMessageType( - "ConfigResponse", - (_message.Message,), - { - "DESCRIPTOR": _CONFIGRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ConfigResponse) - }, -) -_sym_db.RegisterMessage(ConfigResponse) - -AddArtifactsRequest = _reflection.GeneratedProtocolMessageType( - "AddArtifactsRequest", - (_message.Message,), - { - "ArtifactChunk": _reflection.GeneratedProtocolMessageType( - "ArtifactChunk", - (_message.Message,), - { - "DESCRIPTOR": _ADDARTIFACTSREQUEST_ARTIFACTCHUNK, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsRequest.ArtifactChunk) - }, - ), - "SingleChunkArtifact": _reflection.GeneratedProtocolMessageType( - "SingleChunkArtifact", - (_message.Message,), - { - "DESCRIPTOR": _ADDARTIFACTSREQUEST_SINGLECHUNKARTIFACT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsRequest.SingleChunkArtifact) - }, - ), - "Batch": _reflection.GeneratedProtocolMessageType( - "Batch", - (_message.Message,), - { - "DESCRIPTOR": _ADDARTIFACTSREQUEST_BATCH, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsRequest.Batch) - }, - ), - "BeginChunkedArtifact": _reflection.GeneratedProtocolMessageType( - "BeginChunkedArtifact", - (_message.Message,), - { - "DESCRIPTOR": _ADDARTIFACTSREQUEST_BEGINCHUNKEDARTIFACT, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsRequest.BeginChunkedArtifact) - }, - ), - "DESCRIPTOR": _ADDARTIFACTSREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsRequest) - }, -) -_sym_db.RegisterMessage(AddArtifactsRequest) -_sym_db.RegisterMessage(AddArtifactsRequest.ArtifactChunk) -_sym_db.RegisterMessage(AddArtifactsRequest.SingleChunkArtifact) -_sym_db.RegisterMessage(AddArtifactsRequest.Batch) -_sym_db.RegisterMessage(AddArtifactsRequest.BeginChunkedArtifact) - -AddArtifactsResponse = _reflection.GeneratedProtocolMessageType( - "AddArtifactsResponse", - (_message.Message,), - { - "ArtifactSummary": _reflection.GeneratedProtocolMessageType( - "ArtifactSummary", - (_message.Message,), - { - "DESCRIPTOR": _ADDARTIFACTSRESPONSE_ARTIFACTSUMMARY, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsResponse.ArtifactSummary) - }, - ), - "DESCRIPTOR": _ADDARTIFACTSRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.AddArtifactsResponse) - }, -) -_sym_db.RegisterMessage(AddArtifactsResponse) -_sym_db.RegisterMessage(AddArtifactsResponse.ArtifactSummary) - -ArtifactStatusesRequest = _reflection.GeneratedProtocolMessageType( - "ArtifactStatusesRequest", - (_message.Message,), - { - "DESCRIPTOR": _ARTIFACTSTATUSESREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ArtifactStatusesRequest) - }, -) -_sym_db.RegisterMessage(ArtifactStatusesRequest) - -ArtifactStatusesResponse = _reflection.GeneratedProtocolMessageType( - "ArtifactStatusesResponse", - (_message.Message,), - { - "ArtifactStatus": _reflection.GeneratedProtocolMessageType( - "ArtifactStatus", - (_message.Message,), - { - "DESCRIPTOR": _ARTIFACTSTATUSESRESPONSE_ARTIFACTSTATUS, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ArtifactStatusesResponse.ArtifactStatus) - }, - ), - "StatusesEntry": _reflection.GeneratedProtocolMessageType( - "StatusesEntry", - (_message.Message,), - { - "DESCRIPTOR": _ARTIFACTSTATUSESRESPONSE_STATUSESENTRY, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ArtifactStatusesResponse.StatusesEntry) - }, - ), - "DESCRIPTOR": _ARTIFACTSTATUSESRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ArtifactStatusesResponse) - }, -) -_sym_db.RegisterMessage(ArtifactStatusesResponse) -_sym_db.RegisterMessage(ArtifactStatusesResponse.ArtifactStatus) -_sym_db.RegisterMessage(ArtifactStatusesResponse.StatusesEntry) - -InterruptRequest = _reflection.GeneratedProtocolMessageType( - "InterruptRequest", - (_message.Message,), - { - "DESCRIPTOR": _INTERRUPTREQUEST, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.InterruptRequest) - }, -) -_sym_db.RegisterMessage(InterruptRequest) - -InterruptResponse = _reflection.GeneratedProtocolMessageType( - "InterruptResponse", - (_message.Message,), - { - "DESCRIPTOR": _INTERRUPTRESPONSE, - "__module__": "spark.connect.base_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.InterruptResponse) - }, -) -_sym_db.RegisterMessage(InterruptResponse) - -_SPARKCONNECTSERVICE = DESCRIPTOR.services_by_name["SparkConnectService"] +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.base_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/catalog_pb2.py b/python/pyspark/sql/connect/proto/catalog_pb2.py index 1680eca73146b..709f0f005c7d3 100644 --- a/python/pyspark/sql/connect/proto/catalog_pb2.py +++ b/python/pyspark/sql/connect/proto/catalog_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/catalog.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -37,353 +36,8 @@ b'\n\x1bspark/connect/catalog.proto\x12\rspark.connect\x1a\x1aspark/connect/common.proto\x1a\x19spark/connect/types.proto"\xc6\x0e\n\x07\x43\x61talog\x12K\n\x10\x63urrent_database\x18\x01 \x01(\x0b\x32\x1e.spark.connect.CurrentDatabaseH\x00R\x0f\x63urrentDatabase\x12U\n\x14set_current_database\x18\x02 \x01(\x0b\x32!.spark.connect.SetCurrentDatabaseH\x00R\x12setCurrentDatabase\x12\x45\n\x0elist_databases\x18\x03 \x01(\x0b\x32\x1c.spark.connect.ListDatabasesH\x00R\rlistDatabases\x12<\n\x0blist_tables\x18\x04 \x01(\x0b\x32\x19.spark.connect.ListTablesH\x00R\nlistTables\x12\x45\n\x0elist_functions\x18\x05 \x01(\x0b\x32\x1c.spark.connect.ListFunctionsH\x00R\rlistFunctions\x12?\n\x0clist_columns\x18\x06 \x01(\x0b\x32\x1a.spark.connect.ListColumnsH\x00R\x0blistColumns\x12?\n\x0cget_database\x18\x07 \x01(\x0b\x32\x1a.spark.connect.GetDatabaseH\x00R\x0bgetDatabase\x12\x36\n\tget_table\x18\x08 \x01(\x0b\x32\x17.spark.connect.GetTableH\x00R\x08getTable\x12?\n\x0cget_function\x18\t \x01(\x0b\x32\x1a.spark.connect.GetFunctionH\x00R\x0bgetFunction\x12H\n\x0f\x64\x61tabase_exists\x18\n \x01(\x0b\x32\x1d.spark.connect.DatabaseExistsH\x00R\x0e\x64\x61tabaseExists\x12?\n\x0ctable_exists\x18\x0b \x01(\x0b\x32\x1a.spark.connect.TableExistsH\x00R\x0btableExists\x12H\n\x0f\x66unction_exists\x18\x0c \x01(\x0b\x32\x1d.spark.connect.FunctionExistsH\x00R\x0e\x66unctionExists\x12X\n\x15\x63reate_external_table\x18\r \x01(\x0b\x32".spark.connect.CreateExternalTableH\x00R\x13\x63reateExternalTable\x12?\n\x0c\x63reate_table\x18\x0e \x01(\x0b\x32\x1a.spark.connect.CreateTableH\x00R\x0b\x63reateTable\x12\x43\n\x0e\x64rop_temp_view\x18\x0f \x01(\x0b\x32\x1b.spark.connect.DropTempViewH\x00R\x0c\x64ropTempView\x12V\n\x15\x64rop_global_temp_view\x18\x10 \x01(\x0b\x32!.spark.connect.DropGlobalTempViewH\x00R\x12\x64ropGlobalTempView\x12Q\n\x12recover_partitions\x18\x11 \x01(\x0b\x32 .spark.connect.RecoverPartitionsH\x00R\x11recoverPartitions\x12\x36\n\tis_cached\x18\x12 \x01(\x0b\x32\x17.spark.connect.IsCachedH\x00R\x08isCached\x12<\n\x0b\x63\x61\x63he_table\x18\x13 \x01(\x0b\x32\x19.spark.connect.CacheTableH\x00R\ncacheTable\x12\x42\n\runcache_table\x18\x14 \x01(\x0b\x32\x1b.spark.connect.UncacheTableH\x00R\x0cuncacheTable\x12<\n\x0b\x63lear_cache\x18\x15 \x01(\x0b\x32\x19.spark.connect.ClearCacheH\x00R\nclearCache\x12\x42\n\rrefresh_table\x18\x16 \x01(\x0b\x32\x1b.spark.connect.RefreshTableH\x00R\x0crefreshTable\x12\x46\n\x0frefresh_by_path\x18\x17 \x01(\x0b\x32\x1c.spark.connect.RefreshByPathH\x00R\rrefreshByPath\x12H\n\x0f\x63urrent_catalog\x18\x18 \x01(\x0b\x32\x1d.spark.connect.CurrentCatalogH\x00R\x0e\x63urrentCatalog\x12R\n\x13set_current_catalog\x18\x19 \x01(\x0b\x32 .spark.connect.SetCurrentCatalogH\x00R\x11setCurrentCatalog\x12\x42\n\rlist_catalogs\x18\x1a \x01(\x0b\x32\x1b.spark.connect.ListCatalogsH\x00R\x0clistCatalogsB\n\n\x08\x63\x61t_type"\x11\n\x0f\x43urrentDatabase"-\n\x12SetCurrentDatabase\x12\x17\n\x07\x64\x62_name\x18\x01 \x01(\tR\x06\x64\x62Name":\n\rListDatabases\x12\x1d\n\x07pattern\x18\x01 \x01(\tH\x00R\x07pattern\x88\x01\x01\x42\n\n\x08_pattern"a\n\nListTables\x12\x1c\n\x07\x64\x62_name\x18\x01 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x12\x1d\n\x07pattern\x18\x02 \x01(\tH\x01R\x07pattern\x88\x01\x01\x42\n\n\x08_db_nameB\n\n\x08_pattern"d\n\rListFunctions\x12\x1c\n\x07\x64\x62_name\x18\x01 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x12\x1d\n\x07pattern\x18\x02 \x01(\tH\x01R\x07pattern\x88\x01\x01\x42\n\n\x08_db_nameB\n\n\x08_pattern"V\n\x0bListColumns\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x1c\n\x07\x64\x62_name\x18\x02 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x42\n\n\x08_db_name"&\n\x0bGetDatabase\x12\x17\n\x07\x64\x62_name\x18\x01 \x01(\tR\x06\x64\x62Name"S\n\x08GetTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x1c\n\x07\x64\x62_name\x18\x02 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x42\n\n\x08_db_name"\\\n\x0bGetFunction\x12#\n\rfunction_name\x18\x01 \x01(\tR\x0c\x66unctionName\x12\x1c\n\x07\x64\x62_name\x18\x02 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x42\n\n\x08_db_name")\n\x0e\x44\x61tabaseExists\x12\x17\n\x07\x64\x62_name\x18\x01 \x01(\tR\x06\x64\x62Name"V\n\x0bTableExists\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x1c\n\x07\x64\x62_name\x18\x02 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x42\n\n\x08_db_name"_\n\x0e\x46unctionExists\x12#\n\rfunction_name\x18\x01 \x01(\tR\x0c\x66unctionName\x12\x1c\n\x07\x64\x62_name\x18\x02 \x01(\tH\x00R\x06\x64\x62Name\x88\x01\x01\x42\n\n\x08_db_name"\xc6\x02\n\x13\x43reateExternalTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x17\n\x04path\x18\x02 \x01(\tH\x00R\x04path\x88\x01\x01\x12\x1b\n\x06source\x18\x03 \x01(\tH\x01R\x06source\x88\x01\x01\x12\x34\n\x06schema\x18\x04 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x02R\x06schema\x88\x01\x01\x12I\n\x07options\x18\x05 \x03(\x0b\x32/.spark.connect.CreateExternalTable.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x07\n\x05_pathB\t\n\x07_sourceB\t\n\x07_schema"\xed\x02\n\x0b\x43reateTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x17\n\x04path\x18\x02 \x01(\tH\x00R\x04path\x88\x01\x01\x12\x1b\n\x06source\x18\x03 \x01(\tH\x01R\x06source\x88\x01\x01\x12%\n\x0b\x64\x65scription\x18\x04 \x01(\tH\x02R\x0b\x64\x65scription\x88\x01\x01\x12\x34\n\x06schema\x18\x05 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x03R\x06schema\x88\x01\x01\x12\x41\n\x07options\x18\x06 \x03(\x0b\x32\'.spark.connect.CreateTable.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x07\n\x05_pathB\t\n\x07_sourceB\x0e\n\x0c_descriptionB\t\n\x07_schema"+\n\x0c\x44ropTempView\x12\x1b\n\tview_name\x18\x01 \x01(\tR\x08viewName"1\n\x12\x44ropGlobalTempView\x12\x1b\n\tview_name\x18\x01 \x01(\tR\x08viewName"2\n\x11RecoverPartitions\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName")\n\x08IsCached\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName"\x84\x01\n\nCacheTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12\x45\n\rstorage_level\x18\x02 \x01(\x0b\x32\x1b.spark.connect.StorageLevelH\x00R\x0cstorageLevel\x88\x01\x01\x42\x10\n\x0e_storage_level"-\n\x0cUncacheTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName"\x0c\n\nClearCache"-\n\x0cRefreshTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName"#\n\rRefreshByPath\x12\x12\n\x04path\x18\x01 \x01(\tR\x04path"\x10\n\x0e\x43urrentCatalog"6\n\x11SetCurrentCatalog\x12!\n\x0c\x63\x61talog_name\x18\x01 \x01(\tR\x0b\x63\x61talogName"9\n\x0cListCatalogs\x12\x1d\n\x07pattern\x18\x01 \x01(\tH\x00R\x07pattern\x88\x01\x01\x42\n\n\x08_patternB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_CATALOG = DESCRIPTOR.message_types_by_name["Catalog"] -_CURRENTDATABASE = DESCRIPTOR.message_types_by_name["CurrentDatabase"] -_SETCURRENTDATABASE = DESCRIPTOR.message_types_by_name["SetCurrentDatabase"] -_LISTDATABASES = DESCRIPTOR.message_types_by_name["ListDatabases"] -_LISTTABLES = DESCRIPTOR.message_types_by_name["ListTables"] -_LISTFUNCTIONS = DESCRIPTOR.message_types_by_name["ListFunctions"] -_LISTCOLUMNS = DESCRIPTOR.message_types_by_name["ListColumns"] -_GETDATABASE = DESCRIPTOR.message_types_by_name["GetDatabase"] -_GETTABLE = DESCRIPTOR.message_types_by_name["GetTable"] -_GETFUNCTION = DESCRIPTOR.message_types_by_name["GetFunction"] -_DATABASEEXISTS = DESCRIPTOR.message_types_by_name["DatabaseExists"] -_TABLEEXISTS = DESCRIPTOR.message_types_by_name["TableExists"] -_FUNCTIONEXISTS = DESCRIPTOR.message_types_by_name["FunctionExists"] -_CREATEEXTERNALTABLE = DESCRIPTOR.message_types_by_name["CreateExternalTable"] -_CREATEEXTERNALTABLE_OPTIONSENTRY = _CREATEEXTERNALTABLE.nested_types_by_name["OptionsEntry"] -_CREATETABLE = DESCRIPTOR.message_types_by_name["CreateTable"] -_CREATETABLE_OPTIONSENTRY = _CREATETABLE.nested_types_by_name["OptionsEntry"] -_DROPTEMPVIEW = DESCRIPTOR.message_types_by_name["DropTempView"] -_DROPGLOBALTEMPVIEW = DESCRIPTOR.message_types_by_name["DropGlobalTempView"] -_RECOVERPARTITIONS = DESCRIPTOR.message_types_by_name["RecoverPartitions"] -_ISCACHED = DESCRIPTOR.message_types_by_name["IsCached"] -_CACHETABLE = DESCRIPTOR.message_types_by_name["CacheTable"] -_UNCACHETABLE = DESCRIPTOR.message_types_by_name["UncacheTable"] -_CLEARCACHE = DESCRIPTOR.message_types_by_name["ClearCache"] -_REFRESHTABLE = DESCRIPTOR.message_types_by_name["RefreshTable"] -_REFRESHBYPATH = DESCRIPTOR.message_types_by_name["RefreshByPath"] -_CURRENTCATALOG = DESCRIPTOR.message_types_by_name["CurrentCatalog"] -_SETCURRENTCATALOG = DESCRIPTOR.message_types_by_name["SetCurrentCatalog"] -_LISTCATALOGS = DESCRIPTOR.message_types_by_name["ListCatalogs"] -Catalog = _reflection.GeneratedProtocolMessageType( - "Catalog", - (_message.Message,), - { - "DESCRIPTOR": _CATALOG, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Catalog) - }, -) -_sym_db.RegisterMessage(Catalog) - -CurrentDatabase = _reflection.GeneratedProtocolMessageType( - "CurrentDatabase", - (_message.Message,), - { - "DESCRIPTOR": _CURRENTDATABASE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CurrentDatabase) - }, -) -_sym_db.RegisterMessage(CurrentDatabase) - -SetCurrentDatabase = _reflection.GeneratedProtocolMessageType( - "SetCurrentDatabase", - (_message.Message,), - { - "DESCRIPTOR": _SETCURRENTDATABASE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SetCurrentDatabase) - }, -) -_sym_db.RegisterMessage(SetCurrentDatabase) - -ListDatabases = _reflection.GeneratedProtocolMessageType( - "ListDatabases", - (_message.Message,), - { - "DESCRIPTOR": _LISTDATABASES, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ListDatabases) - }, -) -_sym_db.RegisterMessage(ListDatabases) - -ListTables = _reflection.GeneratedProtocolMessageType( - "ListTables", - (_message.Message,), - { - "DESCRIPTOR": _LISTTABLES, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ListTables) - }, -) -_sym_db.RegisterMessage(ListTables) - -ListFunctions = _reflection.GeneratedProtocolMessageType( - "ListFunctions", - (_message.Message,), - { - "DESCRIPTOR": _LISTFUNCTIONS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ListFunctions) - }, -) -_sym_db.RegisterMessage(ListFunctions) - -ListColumns = _reflection.GeneratedProtocolMessageType( - "ListColumns", - (_message.Message,), - { - "DESCRIPTOR": _LISTCOLUMNS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ListColumns) - }, -) -_sym_db.RegisterMessage(ListColumns) - -GetDatabase = _reflection.GeneratedProtocolMessageType( - "GetDatabase", - (_message.Message,), - { - "DESCRIPTOR": _GETDATABASE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetDatabase) - }, -) -_sym_db.RegisterMessage(GetDatabase) - -GetTable = _reflection.GeneratedProtocolMessageType( - "GetTable", - (_message.Message,), - { - "DESCRIPTOR": _GETTABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetTable) - }, -) -_sym_db.RegisterMessage(GetTable) - -GetFunction = _reflection.GeneratedProtocolMessageType( - "GetFunction", - (_message.Message,), - { - "DESCRIPTOR": _GETFUNCTION, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetFunction) - }, -) -_sym_db.RegisterMessage(GetFunction) - -DatabaseExists = _reflection.GeneratedProtocolMessageType( - "DatabaseExists", - (_message.Message,), - { - "DESCRIPTOR": _DATABASEEXISTS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DatabaseExists) - }, -) -_sym_db.RegisterMessage(DatabaseExists) - -TableExists = _reflection.GeneratedProtocolMessageType( - "TableExists", - (_message.Message,), - { - "DESCRIPTOR": _TABLEEXISTS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.TableExists) - }, -) -_sym_db.RegisterMessage(TableExists) - -FunctionExists = _reflection.GeneratedProtocolMessageType( - "FunctionExists", - (_message.Message,), - { - "DESCRIPTOR": _FUNCTIONEXISTS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.FunctionExists) - }, -) -_sym_db.RegisterMessage(FunctionExists) - -CreateExternalTable = _reflection.GeneratedProtocolMessageType( - "CreateExternalTable", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _CREATEEXTERNALTABLE_OPTIONSENTRY, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CreateExternalTable.OptionsEntry) - }, - ), - "DESCRIPTOR": _CREATEEXTERNALTABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CreateExternalTable) - }, -) -_sym_db.RegisterMessage(CreateExternalTable) -_sym_db.RegisterMessage(CreateExternalTable.OptionsEntry) - -CreateTable = _reflection.GeneratedProtocolMessageType( - "CreateTable", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _CREATETABLE_OPTIONSENTRY, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CreateTable.OptionsEntry) - }, - ), - "DESCRIPTOR": _CREATETABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CreateTable) - }, -) -_sym_db.RegisterMessage(CreateTable) -_sym_db.RegisterMessage(CreateTable.OptionsEntry) - -DropTempView = _reflection.GeneratedProtocolMessageType( - "DropTempView", - (_message.Message,), - { - "DESCRIPTOR": _DROPTEMPVIEW, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DropTempView) - }, -) -_sym_db.RegisterMessage(DropTempView) - -DropGlobalTempView = _reflection.GeneratedProtocolMessageType( - "DropGlobalTempView", - (_message.Message,), - { - "DESCRIPTOR": _DROPGLOBALTEMPVIEW, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DropGlobalTempView) - }, -) -_sym_db.RegisterMessage(DropGlobalTempView) - -RecoverPartitions = _reflection.GeneratedProtocolMessageType( - "RecoverPartitions", - (_message.Message,), - { - "DESCRIPTOR": _RECOVERPARTITIONS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.RecoverPartitions) - }, -) -_sym_db.RegisterMessage(RecoverPartitions) - -IsCached = _reflection.GeneratedProtocolMessageType( - "IsCached", - (_message.Message,), - { - "DESCRIPTOR": _ISCACHED, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.IsCached) - }, -) -_sym_db.RegisterMessage(IsCached) - -CacheTable = _reflection.GeneratedProtocolMessageType( - "CacheTable", - (_message.Message,), - { - "DESCRIPTOR": _CACHETABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CacheTable) - }, -) -_sym_db.RegisterMessage(CacheTable) - -UncacheTable = _reflection.GeneratedProtocolMessageType( - "UncacheTable", - (_message.Message,), - { - "DESCRIPTOR": _UNCACHETABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.UncacheTable) - }, -) -_sym_db.RegisterMessage(UncacheTable) - -ClearCache = _reflection.GeneratedProtocolMessageType( - "ClearCache", - (_message.Message,), - { - "DESCRIPTOR": _CLEARCACHE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ClearCache) - }, -) -_sym_db.RegisterMessage(ClearCache) - -RefreshTable = _reflection.GeneratedProtocolMessageType( - "RefreshTable", - (_message.Message,), - { - "DESCRIPTOR": _REFRESHTABLE, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.RefreshTable) - }, -) -_sym_db.RegisterMessage(RefreshTable) - -RefreshByPath = _reflection.GeneratedProtocolMessageType( - "RefreshByPath", - (_message.Message,), - { - "DESCRIPTOR": _REFRESHBYPATH, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.RefreshByPath) - }, -) -_sym_db.RegisterMessage(RefreshByPath) - -CurrentCatalog = _reflection.GeneratedProtocolMessageType( - "CurrentCatalog", - (_message.Message,), - { - "DESCRIPTOR": _CURRENTCATALOG, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CurrentCatalog) - }, -) -_sym_db.RegisterMessage(CurrentCatalog) - -SetCurrentCatalog = _reflection.GeneratedProtocolMessageType( - "SetCurrentCatalog", - (_message.Message,), - { - "DESCRIPTOR": _SETCURRENTCATALOG, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SetCurrentCatalog) - }, -) -_sym_db.RegisterMessage(SetCurrentCatalog) - -ListCatalogs = _reflection.GeneratedProtocolMessageType( - "ListCatalogs", - (_message.Message,), - { - "DESCRIPTOR": _LISTCATALOGS, - "__module__": "spark.connect.catalog_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ListCatalogs) - }, -) -_sym_db.RegisterMessage(ListCatalogs) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.catalog_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/commands_pb2.py b/python/pyspark/sql/connect/proto/commands_pb2.py index 088026d8d0b83..af5fdd247ca53 100644 --- a/python/pyspark/sql/connect/proto/commands_pb2.py +++ b/python/pyspark/sql/connect/proto/commands_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/commands.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -39,433 +38,8 @@ b'\n\x1cspark/connect/commands.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1aspark/connect/common.proto\x1a\x1fspark/connect/expressions.proto\x1a\x1dspark/connect/relations.proto"\x86\x07\n\x07\x43ommand\x12]\n\x11register_function\x18\x01 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x10registerFunction\x12H\n\x0fwrite_operation\x18\x02 \x01(\x0b\x32\x1d.spark.connect.WriteOperationH\x00R\x0ewriteOperation\x12_\n\x15\x63reate_dataframe_view\x18\x03 \x01(\x0b\x32).spark.connect.CreateDataFrameViewCommandH\x00R\x13\x63reateDataframeView\x12O\n\x12write_operation_v2\x18\x04 \x01(\x0b\x32\x1f.spark.connect.WriteOperationV2H\x00R\x10writeOperationV2\x12<\n\x0bsql_command\x18\x05 \x01(\x0b\x32\x19.spark.connect.SqlCommandH\x00R\nsqlCommand\x12k\n\x1cwrite_stream_operation_start\x18\x06 \x01(\x0b\x32(.spark.connect.WriteStreamOperationStartH\x00R\x19writeStreamOperationStart\x12^\n\x17streaming_query_command\x18\x07 \x01(\x0b\x32$.spark.connect.StreamingQueryCommandH\x00R\x15streamingQueryCommand\x12X\n\x15get_resources_command\x18\x08 \x01(\x0b\x32".spark.connect.GetResourcesCommandH\x00R\x13getResourcesCommand\x12t\n\x1fstreaming_query_manager_command\x18\t \x01(\x0b\x32+.spark.connect.StreamingQueryManagerCommandH\x00R\x1cstreamingQueryManagerCommand\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textensionB\x0e\n\x0c\x63ommand_type"\xf1\x01\n\nSqlCommand\x12\x10\n\x03sql\x18\x01 \x01(\tR\x03sql\x12\x37\n\x04\x61rgs\x18\x02 \x03(\x0b\x32#.spark.connect.SqlCommand.ArgsEntryR\x04\x61rgs\x12<\n\x08pos_args\x18\x03 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x07posArgs\x1aZ\n\tArgsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x05value:\x02\x38\x01"\x96\x01\n\x1a\x43reateDataFrameViewCommand\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1b\n\tis_global\x18\x03 \x01(\x08R\x08isGlobal\x12\x18\n\x07replace\x18\x04 \x01(\x08R\x07replace"\x9b\x08\n\x0eWriteOperation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1b\n\x06source\x18\x02 \x01(\tH\x01R\x06source\x88\x01\x01\x12\x14\n\x04path\x18\x03 \x01(\tH\x00R\x04path\x12?\n\x05table\x18\x04 \x01(\x0b\x32\'.spark.connect.WriteOperation.SaveTableH\x00R\x05table\x12:\n\x04mode\x18\x05 \x01(\x0e\x32&.spark.connect.WriteOperation.SaveModeR\x04mode\x12*\n\x11sort_column_names\x18\x06 \x03(\tR\x0fsortColumnNames\x12\x31\n\x14partitioning_columns\x18\x07 \x03(\tR\x13partitioningColumns\x12\x43\n\tbucket_by\x18\x08 \x01(\x0b\x32&.spark.connect.WriteOperation.BucketByR\x08\x62ucketBy\x12\x44\n\x07options\x18\t \x03(\x0b\x32*.spark.connect.WriteOperation.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x82\x02\n\tSaveTable\x12\x1d\n\ntable_name\x18\x01 \x01(\tR\ttableName\x12X\n\x0bsave_method\x18\x02 \x01(\x0e\x32\x37.spark.connect.WriteOperation.SaveTable.TableSaveMethodR\nsaveMethod"|\n\x0fTableSaveMethod\x12!\n\x1dTABLE_SAVE_METHOD_UNSPECIFIED\x10\x00\x12#\n\x1fTABLE_SAVE_METHOD_SAVE_AS_TABLE\x10\x01\x12!\n\x1dTABLE_SAVE_METHOD_INSERT_INTO\x10\x02\x1a[\n\x08\x42ucketBy\x12.\n\x13\x62ucket_column_names\x18\x01 \x03(\tR\x11\x62ucketColumnNames\x12\x1f\n\x0bnum_buckets\x18\x02 \x01(\x05R\nnumBuckets"\x89\x01\n\x08SaveMode\x12\x19\n\x15SAVE_MODE_UNSPECIFIED\x10\x00\x12\x14\n\x10SAVE_MODE_APPEND\x10\x01\x12\x17\n\x13SAVE_MODE_OVERWRITE\x10\x02\x12\x1d\n\x19SAVE_MODE_ERROR_IF_EXISTS\x10\x03\x12\x14\n\x10SAVE_MODE_IGNORE\x10\x04\x42\x0b\n\tsave_typeB\t\n\x07_source"\xad\x06\n\x10WriteOperationV2\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1d\n\ntable_name\x18\x02 \x01(\tR\ttableName\x12\x1f\n\x08provider\x18\x03 \x01(\tH\x00R\x08provider\x88\x01\x01\x12L\n\x14partitioning_columns\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13partitioningColumns\x12\x46\n\x07options\x18\x05 \x03(\x0b\x32,.spark.connect.WriteOperationV2.OptionsEntryR\x07options\x12_\n\x10table_properties\x18\x06 \x03(\x0b\x32\x34.spark.connect.WriteOperationV2.TablePropertiesEntryR\x0ftableProperties\x12\x38\n\x04mode\x18\x07 \x01(\x0e\x32$.spark.connect.WriteOperationV2.ModeR\x04mode\x12J\n\x13overwrite_condition\x18\x08 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x12overwriteCondition\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x42\n\x14TablePropertiesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"\x9f\x01\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMODE_CREATE\x10\x01\x12\x12\n\x0eMODE_OVERWRITE\x10\x02\x12\x1d\n\x19MODE_OVERWRITE_PARTITIONS\x10\x03\x12\x0f\n\x0bMODE_APPEND\x10\x04\x12\x10\n\x0cMODE_REPLACE\x10\x05\x12\x1a\n\x16MODE_CREATE_OR_REPLACE\x10\x06\x42\x0b\n\t_provider"\xd0\x05\n\x19WriteStreamOperationStart\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x16\n\x06\x66ormat\x18\x02 \x01(\tR\x06\x66ormat\x12O\n\x07options\x18\x03 \x03(\x0b\x32\x35.spark.connect.WriteStreamOperationStart.OptionsEntryR\x07options\x12:\n\x19partitioning_column_names\x18\x04 \x03(\tR\x17partitioningColumnNames\x12:\n\x18processing_time_interval\x18\x05 \x01(\tH\x00R\x16processingTimeInterval\x12%\n\ravailable_now\x18\x06 \x01(\x08H\x00R\x0c\x61vailableNow\x12\x14\n\x04once\x18\x07 \x01(\x08H\x00R\x04once\x12\x46\n\x1e\x63ontinuous_checkpoint_interval\x18\x08 \x01(\tH\x00R\x1c\x63ontinuousCheckpointInterval\x12\x1f\n\x0boutput_mode\x18\t \x01(\tR\noutputMode\x12\x1d\n\nquery_name\x18\n \x01(\tR\tqueryName\x12\x14\n\x04path\x18\x0b \x01(\tH\x01R\x04path\x12\x1f\n\ntable_name\x18\x0c \x01(\tH\x01R\ttableName\x12L\n\x0e\x66oreach_writer\x18\r \x01(\x0b\x32%.spark.connect.StreamingForeachWriterR\rforeachWriter\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\t\n\x07triggerB\x12\n\x10sink_destination"\xa7\x01\n\x16StreamingForeachWriter\x12?\n\rpython_writer\x18\x01 \x01(\x0b\x32\x18.spark.connect.PythonUDFH\x00R\x0cpythonWriter\x12\x42\n\x0cscala_writer\x18\x02 \x01(\x0b\x32\x1d.spark.connect.ScalarScalaUDFH\x00R\x0bscalaWriterB\x08\n\x06writer"y\n\x1fWriteStreamOperationStartResult\x12\x42\n\x08query_id\x18\x01 \x01(\x0b\x32\'.spark.connect.StreamingQueryInstanceIdR\x07queryId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name"A\n\x18StreamingQueryInstanceId\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x15\n\x06run_id\x18\x02 \x01(\tR\x05runId"\xf8\x04\n\x15StreamingQueryCommand\x12\x42\n\x08query_id\x18\x01 \x01(\x0b\x32\'.spark.connect.StreamingQueryInstanceIdR\x07queryId\x12\x18\n\x06status\x18\x02 \x01(\x08H\x00R\x06status\x12%\n\rlast_progress\x18\x03 \x01(\x08H\x00R\x0clastProgress\x12)\n\x0frecent_progress\x18\x04 \x01(\x08H\x00R\x0erecentProgress\x12\x14\n\x04stop\x18\x05 \x01(\x08H\x00R\x04stop\x12\x34\n\x15process_all_available\x18\x06 \x01(\x08H\x00R\x13processAllAvailable\x12O\n\x07\x65xplain\x18\x07 \x01(\x0b\x32\x33.spark.connect.StreamingQueryCommand.ExplainCommandH\x00R\x07\x65xplain\x12\x1e\n\texception\x18\x08 \x01(\x08H\x00R\texception\x12k\n\x11\x61wait_termination\x18\t \x01(\x0b\x32<.spark.connect.StreamingQueryCommand.AwaitTerminationCommandH\x00R\x10\x61waitTermination\x1a,\n\x0e\x45xplainCommand\x12\x1a\n\x08\x65xtended\x18\x01 \x01(\x08R\x08\x65xtended\x1aL\n\x17\x41waitTerminationCommand\x12"\n\ntimeout_ms\x18\x02 \x01(\x03H\x00R\ttimeoutMs\x88\x01\x01\x42\r\n\x0b_timeout_msB\t\n\x07\x63ommand"\xf5\x08\n\x1bStreamingQueryCommandResult\x12\x42\n\x08query_id\x18\x01 \x01(\x0b\x32\'.spark.connect.StreamingQueryInstanceIdR\x07queryId\x12Q\n\x06status\x18\x02 \x01(\x0b\x32\x37.spark.connect.StreamingQueryCommandResult.StatusResultH\x00R\x06status\x12j\n\x0frecent_progress\x18\x03 \x01(\x0b\x32?.spark.connect.StreamingQueryCommandResult.RecentProgressResultH\x00R\x0erecentProgress\x12T\n\x07\x65xplain\x18\x04 \x01(\x0b\x32\x38.spark.connect.StreamingQueryCommandResult.ExplainResultH\x00R\x07\x65xplain\x12Z\n\texception\x18\x05 \x01(\x0b\x32:.spark.connect.StreamingQueryCommandResult.ExceptionResultH\x00R\texception\x12p\n\x11\x61wait_termination\x18\x06 \x01(\x0b\x32\x41.spark.connect.StreamingQueryCommandResult.AwaitTerminationResultH\x00R\x10\x61waitTermination\x1a\xaa\x01\n\x0cStatusResult\x12%\n\x0estatus_message\x18\x01 \x01(\tR\rstatusMessage\x12*\n\x11is_data_available\x18\x02 \x01(\x08R\x0fisDataAvailable\x12*\n\x11is_trigger_active\x18\x03 \x01(\x08R\x0fisTriggerActive\x12\x1b\n\tis_active\x18\x04 \x01(\x08R\x08isActive\x1aH\n\x14RecentProgressResult\x12\x30\n\x14recent_progress_json\x18\x05 \x03(\tR\x12recentProgressJson\x1a\'\n\rExplainResult\x12\x16\n\x06result\x18\x01 \x01(\tR\x06result\x1a\xc5\x01\n\x0f\x45xceptionResult\x12\x30\n\x11\x65xception_message\x18\x01 \x01(\tH\x00R\x10\x65xceptionMessage\x88\x01\x01\x12$\n\x0b\x65rror_class\x18\x02 \x01(\tH\x01R\nerrorClass\x88\x01\x01\x12$\n\x0bstack_trace\x18\x03 \x01(\tH\x02R\nstackTrace\x88\x01\x01\x42\x14\n\x12_exception_messageB\x0e\n\x0c_error_classB\x0e\n\x0c_stack_trace\x1a\x38\n\x16\x41waitTerminationResult\x12\x1e\n\nterminated\x18\x01 \x01(\x08R\nterminatedB\r\n\x0bresult_type"\xde\x02\n\x1cStreamingQueryManagerCommand\x12\x18\n\x06\x61\x63tive\x18\x01 \x01(\x08H\x00R\x06\x61\x63tive\x12\x1d\n\tget_query\x18\x02 \x01(\tH\x00R\x08getQuery\x12|\n\x15\x61wait_any_termination\x18\x03 \x01(\x0b\x32\x46.spark.connect.StreamingQueryManagerCommand.AwaitAnyTerminationCommandH\x00R\x13\x61waitAnyTermination\x12+\n\x10reset_terminated\x18\x04 \x01(\x08H\x00R\x0fresetTerminated\x1aO\n\x1a\x41waitAnyTerminationCommand\x12"\n\ntimeout_ms\x18\x01 \x01(\x03H\x00R\ttimeoutMs\x88\x01\x01\x42\r\n\x0b_timeout_msB\t\n\x07\x63ommand"\xd3\x05\n"StreamingQueryManagerCommandResult\x12X\n\x06\x61\x63tive\x18\x01 \x01(\x0b\x32>.spark.connect.StreamingQueryManagerCommandResult.ActiveResultH\x00R\x06\x61\x63tive\x12`\n\x05query\x18\x02 \x01(\x0b\x32H.spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstanceH\x00R\x05query\x12\x81\x01\n\x15\x61wait_any_termination\x18\x03 \x01(\x0b\x32K.spark.connect.StreamingQueryManagerCommandResult.AwaitAnyTerminationResultH\x00R\x13\x61waitAnyTermination\x12+\n\x10reset_terminated\x18\x04 \x01(\x08H\x00R\x0fresetTerminated\x1a\x7f\n\x0c\x41\x63tiveResult\x12o\n\x0e\x61\x63tive_queries\x18\x01 \x03(\x0b\x32H.spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstanceR\ractiveQueries\x1as\n\x16StreamingQueryInstance\x12\x37\n\x02id\x18\x01 \x01(\x0b\x32\'.spark.connect.StreamingQueryInstanceIdR\x02id\x12\x17\n\x04name\x18\x02 \x01(\tH\x00R\x04name\x88\x01\x01\x42\x07\n\x05_name\x1a;\n\x19\x41waitAnyTerminationResult\x12\x1e\n\nterminated\x18\x01 \x01(\x08R\nterminatedB\r\n\x0bresult_type"\x15\n\x13GetResourcesCommand"\xd4\x01\n\x19GetResourcesCommandResult\x12U\n\tresources\x18\x01 \x03(\x0b\x32\x37.spark.connect.GetResourcesCommandResult.ResourcesEntryR\tresources\x1a`\n\x0eResourcesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32".spark.connect.ResourceInformationR\x05value:\x02\x38\x01\x42\x36\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_COMMAND = DESCRIPTOR.message_types_by_name["Command"] -_SQLCOMMAND = DESCRIPTOR.message_types_by_name["SqlCommand"] -_SQLCOMMAND_ARGSENTRY = _SQLCOMMAND.nested_types_by_name["ArgsEntry"] -_CREATEDATAFRAMEVIEWCOMMAND = DESCRIPTOR.message_types_by_name["CreateDataFrameViewCommand"] -_WRITEOPERATION = DESCRIPTOR.message_types_by_name["WriteOperation"] -_WRITEOPERATION_OPTIONSENTRY = _WRITEOPERATION.nested_types_by_name["OptionsEntry"] -_WRITEOPERATION_SAVETABLE = _WRITEOPERATION.nested_types_by_name["SaveTable"] -_WRITEOPERATION_BUCKETBY = _WRITEOPERATION.nested_types_by_name["BucketBy"] -_WRITEOPERATIONV2 = DESCRIPTOR.message_types_by_name["WriteOperationV2"] -_WRITEOPERATIONV2_OPTIONSENTRY = _WRITEOPERATIONV2.nested_types_by_name["OptionsEntry"] -_WRITEOPERATIONV2_TABLEPROPERTIESENTRY = _WRITEOPERATIONV2.nested_types_by_name[ - "TablePropertiesEntry" -] -_WRITESTREAMOPERATIONSTART = DESCRIPTOR.message_types_by_name["WriteStreamOperationStart"] -_WRITESTREAMOPERATIONSTART_OPTIONSENTRY = _WRITESTREAMOPERATIONSTART.nested_types_by_name[ - "OptionsEntry" -] -_STREAMINGFOREACHWRITER = DESCRIPTOR.message_types_by_name["StreamingForeachWriter"] -_WRITESTREAMOPERATIONSTARTRESULT = DESCRIPTOR.message_types_by_name[ - "WriteStreamOperationStartResult" -] -_STREAMINGQUERYINSTANCEID = DESCRIPTOR.message_types_by_name["StreamingQueryInstanceId"] -_STREAMINGQUERYCOMMAND = DESCRIPTOR.message_types_by_name["StreamingQueryCommand"] -_STREAMINGQUERYCOMMAND_EXPLAINCOMMAND = _STREAMINGQUERYCOMMAND.nested_types_by_name[ - "ExplainCommand" -] -_STREAMINGQUERYCOMMAND_AWAITTERMINATIONCOMMAND = _STREAMINGQUERYCOMMAND.nested_types_by_name[ - "AwaitTerminationCommand" -] -_STREAMINGQUERYCOMMANDRESULT = DESCRIPTOR.message_types_by_name["StreamingQueryCommandResult"] -_STREAMINGQUERYCOMMANDRESULT_STATUSRESULT = _STREAMINGQUERYCOMMANDRESULT.nested_types_by_name[ - "StatusResult" -] -_STREAMINGQUERYCOMMANDRESULT_RECENTPROGRESSRESULT = ( - _STREAMINGQUERYCOMMANDRESULT.nested_types_by_name["RecentProgressResult"] -) -_STREAMINGQUERYCOMMANDRESULT_EXPLAINRESULT = _STREAMINGQUERYCOMMANDRESULT.nested_types_by_name[ - "ExplainResult" -] -_STREAMINGQUERYCOMMANDRESULT_EXCEPTIONRESULT = _STREAMINGQUERYCOMMANDRESULT.nested_types_by_name[ - "ExceptionResult" -] -_STREAMINGQUERYCOMMANDRESULT_AWAITTERMINATIONRESULT = ( - _STREAMINGQUERYCOMMANDRESULT.nested_types_by_name["AwaitTerminationResult"] -) -_STREAMINGQUERYMANAGERCOMMAND = DESCRIPTOR.message_types_by_name["StreamingQueryManagerCommand"] -_STREAMINGQUERYMANAGERCOMMAND_AWAITANYTERMINATIONCOMMAND = ( - _STREAMINGQUERYMANAGERCOMMAND.nested_types_by_name["AwaitAnyTerminationCommand"] -) -_STREAMINGQUERYMANAGERCOMMANDRESULT = DESCRIPTOR.message_types_by_name[ - "StreamingQueryManagerCommandResult" -] -_STREAMINGQUERYMANAGERCOMMANDRESULT_ACTIVERESULT = ( - _STREAMINGQUERYMANAGERCOMMANDRESULT.nested_types_by_name["ActiveResult"] -) -_STREAMINGQUERYMANAGERCOMMANDRESULT_STREAMINGQUERYINSTANCE = ( - _STREAMINGQUERYMANAGERCOMMANDRESULT.nested_types_by_name["StreamingQueryInstance"] -) -_STREAMINGQUERYMANAGERCOMMANDRESULT_AWAITANYTERMINATIONRESULT = ( - _STREAMINGQUERYMANAGERCOMMANDRESULT.nested_types_by_name["AwaitAnyTerminationResult"] -) -_GETRESOURCESCOMMAND = DESCRIPTOR.message_types_by_name["GetResourcesCommand"] -_GETRESOURCESCOMMANDRESULT = DESCRIPTOR.message_types_by_name["GetResourcesCommandResult"] -_GETRESOURCESCOMMANDRESULT_RESOURCESENTRY = _GETRESOURCESCOMMANDRESULT.nested_types_by_name[ - "ResourcesEntry" -] -_WRITEOPERATION_SAVETABLE_TABLESAVEMETHOD = _WRITEOPERATION_SAVETABLE.enum_types_by_name[ - "TableSaveMethod" -] -_WRITEOPERATION_SAVEMODE = _WRITEOPERATION.enum_types_by_name["SaveMode"] -_WRITEOPERATIONV2_MODE = _WRITEOPERATIONV2.enum_types_by_name["Mode"] -Command = _reflection.GeneratedProtocolMessageType( - "Command", - (_message.Message,), - { - "DESCRIPTOR": _COMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Command) - }, -) -_sym_db.RegisterMessage(Command) - -SqlCommand = _reflection.GeneratedProtocolMessageType( - "SqlCommand", - (_message.Message,), - { - "ArgsEntry": _reflection.GeneratedProtocolMessageType( - "ArgsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SQLCOMMAND_ARGSENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SqlCommand.ArgsEntry) - }, - ), - "DESCRIPTOR": _SQLCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SqlCommand) - }, -) -_sym_db.RegisterMessage(SqlCommand) -_sym_db.RegisterMessage(SqlCommand.ArgsEntry) - -CreateDataFrameViewCommand = _reflection.GeneratedProtocolMessageType( - "CreateDataFrameViewCommand", - (_message.Message,), - { - "DESCRIPTOR": _CREATEDATAFRAMEVIEWCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CreateDataFrameViewCommand) - }, -) -_sym_db.RegisterMessage(CreateDataFrameViewCommand) - -WriteOperation = _reflection.GeneratedProtocolMessageType( - "WriteOperation", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _WRITEOPERATION_OPTIONSENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation.OptionsEntry) - }, - ), - "SaveTable": _reflection.GeneratedProtocolMessageType( - "SaveTable", - (_message.Message,), - { - "DESCRIPTOR": _WRITEOPERATION_SAVETABLE, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation.SaveTable) - }, - ), - "BucketBy": _reflection.GeneratedProtocolMessageType( - "BucketBy", - (_message.Message,), - { - "DESCRIPTOR": _WRITEOPERATION_BUCKETBY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation.BucketBy) - }, - ), - "DESCRIPTOR": _WRITEOPERATION, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperation) - }, -) -_sym_db.RegisterMessage(WriteOperation) -_sym_db.RegisterMessage(WriteOperation.OptionsEntry) -_sym_db.RegisterMessage(WriteOperation.SaveTable) -_sym_db.RegisterMessage(WriteOperation.BucketBy) - -WriteOperationV2 = _reflection.GeneratedProtocolMessageType( - "WriteOperationV2", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _WRITEOPERATIONV2_OPTIONSENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperationV2.OptionsEntry) - }, - ), - "TablePropertiesEntry": _reflection.GeneratedProtocolMessageType( - "TablePropertiesEntry", - (_message.Message,), - { - "DESCRIPTOR": _WRITEOPERATIONV2_TABLEPROPERTIESENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperationV2.TablePropertiesEntry) - }, - ), - "DESCRIPTOR": _WRITEOPERATIONV2, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteOperationV2) - }, -) -_sym_db.RegisterMessage(WriteOperationV2) -_sym_db.RegisterMessage(WriteOperationV2.OptionsEntry) -_sym_db.RegisterMessage(WriteOperationV2.TablePropertiesEntry) - -WriteStreamOperationStart = _reflection.GeneratedProtocolMessageType( - "WriteStreamOperationStart", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _WRITESTREAMOPERATIONSTART_OPTIONSENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteStreamOperationStart.OptionsEntry) - }, - ), - "DESCRIPTOR": _WRITESTREAMOPERATIONSTART, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteStreamOperationStart) - }, -) -_sym_db.RegisterMessage(WriteStreamOperationStart) -_sym_db.RegisterMessage(WriteStreamOperationStart.OptionsEntry) - -StreamingForeachWriter = _reflection.GeneratedProtocolMessageType( - "StreamingForeachWriter", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGFOREACHWRITER, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingForeachWriter) - }, -) -_sym_db.RegisterMessage(StreamingForeachWriter) - -WriteStreamOperationStartResult = _reflection.GeneratedProtocolMessageType( - "WriteStreamOperationStartResult", - (_message.Message,), - { - "DESCRIPTOR": _WRITESTREAMOPERATIONSTARTRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WriteStreamOperationStartResult) - }, -) -_sym_db.RegisterMessage(WriteStreamOperationStartResult) - -StreamingQueryInstanceId = _reflection.GeneratedProtocolMessageType( - "StreamingQueryInstanceId", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYINSTANCEID, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryInstanceId) - }, -) -_sym_db.RegisterMessage(StreamingQueryInstanceId) - -StreamingQueryCommand = _reflection.GeneratedProtocolMessageType( - "StreamingQueryCommand", - (_message.Message,), - { - "ExplainCommand": _reflection.GeneratedProtocolMessageType( - "ExplainCommand", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMAND_EXPLAINCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommand.ExplainCommand) - }, - ), - "AwaitTerminationCommand": _reflection.GeneratedProtocolMessageType( - "AwaitTerminationCommand", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMAND_AWAITTERMINATIONCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommand.AwaitTerminationCommand) - }, - ), - "DESCRIPTOR": _STREAMINGQUERYCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommand) - }, -) -_sym_db.RegisterMessage(StreamingQueryCommand) -_sym_db.RegisterMessage(StreamingQueryCommand.ExplainCommand) -_sym_db.RegisterMessage(StreamingQueryCommand.AwaitTerminationCommand) - -StreamingQueryCommandResult = _reflection.GeneratedProtocolMessageType( - "StreamingQueryCommandResult", - (_message.Message,), - { - "StatusResult": _reflection.GeneratedProtocolMessageType( - "StatusResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT_STATUSRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult.StatusResult) - }, - ), - "RecentProgressResult": _reflection.GeneratedProtocolMessageType( - "RecentProgressResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT_RECENTPROGRESSRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult.RecentProgressResult) - }, - ), - "ExplainResult": _reflection.GeneratedProtocolMessageType( - "ExplainResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT_EXPLAINRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult.ExplainResult) - }, - ), - "ExceptionResult": _reflection.GeneratedProtocolMessageType( - "ExceptionResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT_EXCEPTIONRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult.ExceptionResult) - }, - ), - "AwaitTerminationResult": _reflection.GeneratedProtocolMessageType( - "AwaitTerminationResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT_AWAITTERMINATIONRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult.AwaitTerminationResult) - }, - ), - "DESCRIPTOR": _STREAMINGQUERYCOMMANDRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryCommandResult) - }, -) -_sym_db.RegisterMessage(StreamingQueryCommandResult) -_sym_db.RegisterMessage(StreamingQueryCommandResult.StatusResult) -_sym_db.RegisterMessage(StreamingQueryCommandResult.RecentProgressResult) -_sym_db.RegisterMessage(StreamingQueryCommandResult.ExplainResult) -_sym_db.RegisterMessage(StreamingQueryCommandResult.ExceptionResult) -_sym_db.RegisterMessage(StreamingQueryCommandResult.AwaitTerminationResult) - -StreamingQueryManagerCommand = _reflection.GeneratedProtocolMessageType( - "StreamingQueryManagerCommand", - (_message.Message,), - { - "AwaitAnyTerminationCommand": _reflection.GeneratedProtocolMessageType( - "AwaitAnyTerminationCommand", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMAND_AWAITANYTERMINATIONCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommand.AwaitAnyTerminationCommand) - }, - ), - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommand) - }, -) -_sym_db.RegisterMessage(StreamingQueryManagerCommand) -_sym_db.RegisterMessage(StreamingQueryManagerCommand.AwaitAnyTerminationCommand) - -StreamingQueryManagerCommandResult = _reflection.GeneratedProtocolMessageType( - "StreamingQueryManagerCommandResult", - (_message.Message,), - { - "ActiveResult": _reflection.GeneratedProtocolMessageType( - "ActiveResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMANDRESULT_ACTIVERESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommandResult.ActiveResult) - }, - ), - "StreamingQueryInstance": _reflection.GeneratedProtocolMessageType( - "StreamingQueryInstance", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMANDRESULT_STREAMINGQUERYINSTANCE, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommandResult.StreamingQueryInstance) - }, - ), - "AwaitAnyTerminationResult": _reflection.GeneratedProtocolMessageType( - "AwaitAnyTerminationResult", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMANDRESULT_AWAITANYTERMINATIONRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommandResult.AwaitAnyTerminationResult) - }, - ), - "DESCRIPTOR": _STREAMINGQUERYMANAGERCOMMANDRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StreamingQueryManagerCommandResult) - }, -) -_sym_db.RegisterMessage(StreamingQueryManagerCommandResult) -_sym_db.RegisterMessage(StreamingQueryManagerCommandResult.ActiveResult) -_sym_db.RegisterMessage(StreamingQueryManagerCommandResult.StreamingQueryInstance) -_sym_db.RegisterMessage(StreamingQueryManagerCommandResult.AwaitAnyTerminationResult) - -GetResourcesCommand = _reflection.GeneratedProtocolMessageType( - "GetResourcesCommand", - (_message.Message,), - { - "DESCRIPTOR": _GETRESOURCESCOMMAND, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetResourcesCommand) - }, -) -_sym_db.RegisterMessage(GetResourcesCommand) - -GetResourcesCommandResult = _reflection.GeneratedProtocolMessageType( - "GetResourcesCommandResult", - (_message.Message,), - { - "ResourcesEntry": _reflection.GeneratedProtocolMessageType( - "ResourcesEntry", - (_message.Message,), - { - "DESCRIPTOR": _GETRESOURCESCOMMANDRESULT_RESOURCESENTRY, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetResourcesCommandResult.ResourcesEntry) - }, - ), - "DESCRIPTOR": _GETRESOURCESCOMMANDRESULT, - "__module__": "spark.connect.commands_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GetResourcesCommandResult) - }, -) -_sym_db.RegisterMessage(GetResourcesCommandResult) -_sym_db.RegisterMessage(GetResourcesCommandResult.ResourcesEntry) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.commands_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/common_pb2.py b/python/pyspark/sql/connect/proto/common_pb2.py index cde2354c8533e..9564f634342e4 100644 --- a/python/pyspark/sql/connect/proto/common_pb2.py +++ b/python/pyspark/sql/connect/proto/common_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/common.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -33,31 +32,8 @@ b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01 \x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12 \n\x0cuse_off_heap\x18\x03 \x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04 \x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05 \x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddressesB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_STORAGELEVEL = DESCRIPTOR.message_types_by_name["StorageLevel"] -_RESOURCEINFORMATION = DESCRIPTOR.message_types_by_name["ResourceInformation"] -StorageLevel = _reflection.GeneratedProtocolMessageType( - "StorageLevel", - (_message.Message,), - { - "DESCRIPTOR": _STORAGELEVEL, - "__module__": "spark.connect.common_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StorageLevel) - }, -) -_sym_db.RegisterMessage(StorageLevel) - -ResourceInformation = _reflection.GeneratedProtocolMessageType( - "ResourceInformation", - (_message.Message,), - { - "DESCRIPTOR": _RESOURCEINFORMATION, - "__module__": "spark.connect.common_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ResourceInformation) - }, -) -_sym_db.RegisterMessage(ResourceInformation) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.common_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/example_plugins_pb2.py b/python/pyspark/sql/connect/proto/example_plugins_pb2.py index 625b1b0899f1e..285edd4210eca 100644 --- a/python/pyspark/sql/connect/proto/example_plugins_pb2.py +++ b/python/pyspark/sql/connect/proto/example_plugins_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/example_plugins.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -37,43 +36,8 @@ b'\n#spark/connect/example_plugins.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto"i\n\x15\x45xamplePluginRelation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"m\n\x17\x45xamplePluginExpression\x12/\n\x05\x63hild\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"9\n\x14\x45xamplePluginCommand\x12!\n\x0c\x63ustom_field\x18\x01 \x01(\tR\x0b\x63ustomFieldB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_EXAMPLEPLUGINRELATION = DESCRIPTOR.message_types_by_name["ExamplePluginRelation"] -_EXAMPLEPLUGINEXPRESSION = DESCRIPTOR.message_types_by_name["ExamplePluginExpression"] -_EXAMPLEPLUGINCOMMAND = DESCRIPTOR.message_types_by_name["ExamplePluginCommand"] -ExamplePluginRelation = _reflection.GeneratedProtocolMessageType( - "ExamplePluginRelation", - (_message.Message,), - { - "DESCRIPTOR": _EXAMPLEPLUGINRELATION, - "__module__": "spark.connect.example_plugins_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginRelation) - }, -) -_sym_db.RegisterMessage(ExamplePluginRelation) - -ExamplePluginExpression = _reflection.GeneratedProtocolMessageType( - "ExamplePluginExpression", - (_message.Message,), - { - "DESCRIPTOR": _EXAMPLEPLUGINEXPRESSION, - "__module__": "spark.connect.example_plugins_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginExpression) - }, -) -_sym_db.RegisterMessage(ExamplePluginExpression) - -ExamplePluginCommand = _reflection.GeneratedProtocolMessageType( - "ExamplePluginCommand", - (_message.Message,), - { - "DESCRIPTOR": _EXAMPLEPLUGINCOMMAND, - "__module__": "spark.connect.example_plugins_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginCommand) - }, -) -_sym_db.RegisterMessage(ExamplePluginCommand) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.example_plugins_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/expressions_pb2.py b/python/pyspark/sql/connect/proto/expressions_pb2.py index cbff74321f355..7a68d831a996c 100644 --- a/python/pyspark/sql/connect/proto/expressions_pb2.py +++ b/python/pyspark/sql/connect/proto/expressions_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/expressions.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -37,309 +36,8 @@ b'\n\x1fspark/connect/expressions.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x19spark/connect/types.proto"\x95+\n\nExpression\x12=\n\x07literal\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralH\x00R\x07literal\x12\x62\n\x14unresolved_attribute\x18\x02 \x01(\x0b\x32-.spark.connect.Expression.UnresolvedAttributeH\x00R\x13unresolvedAttribute\x12_\n\x13unresolved_function\x18\x03 \x01(\x0b\x32,.spark.connect.Expression.UnresolvedFunctionH\x00R\x12unresolvedFunction\x12Y\n\x11\x65xpression_string\x18\x04 \x01(\x0b\x32*.spark.connect.Expression.ExpressionStringH\x00R\x10\x65xpressionString\x12S\n\x0funresolved_star\x18\x05 \x01(\x0b\x32(.spark.connect.Expression.UnresolvedStarH\x00R\x0eunresolvedStar\x12\x37\n\x05\x61lias\x18\x06 \x01(\x0b\x32\x1f.spark.connect.Expression.AliasH\x00R\x05\x61lias\x12\x34\n\x04\x63\x61st\x18\x07 \x01(\x0b\x32\x1e.spark.connect.Expression.CastH\x00R\x04\x63\x61st\x12V\n\x10unresolved_regex\x18\x08 \x01(\x0b\x32).spark.connect.Expression.UnresolvedRegexH\x00R\x0funresolvedRegex\x12\x44\n\nsort_order\x18\t \x01(\x0b\x32#.spark.connect.Expression.SortOrderH\x00R\tsortOrder\x12S\n\x0flambda_function\x18\n \x01(\x0b\x32(.spark.connect.Expression.LambdaFunctionH\x00R\x0elambdaFunction\x12:\n\x06window\x18\x0b \x01(\x0b\x32 .spark.connect.Expression.WindowH\x00R\x06window\x12l\n\x18unresolved_extract_value\x18\x0c \x01(\x0b\x32\x30.spark.connect.Expression.UnresolvedExtractValueH\x00R\x16unresolvedExtractValue\x12M\n\rupdate_fields\x18\r \x01(\x0b\x32&.spark.connect.Expression.UpdateFieldsH\x00R\x0cupdateFields\x12\x82\x01\n unresolved_named_lambda_variable\x18\x0e \x01(\x0b\x32\x37.spark.connect.Expression.UnresolvedNamedLambdaVariableH\x00R\x1dunresolvedNamedLambdaVariable\x12~\n#common_inline_user_defined_function\x18\x0f \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionH\x00R\x1f\x63ommonInlineUserDefinedFunction\x12\x35\n\textension\x18\xe7\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textension\x1a\x8f\x06\n\x06Window\x12\x42\n\x0fwindow_function\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x0ewindowFunction\x12@\n\x0epartition_spec\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\rpartitionSpec\x12\x42\n\norder_spec\x18\x03 \x03(\x0b\x32#.spark.connect.Expression.SortOrderR\torderSpec\x12K\n\nframe_spec\x18\x04 \x01(\x0b\x32,.spark.connect.Expression.Window.WindowFrameR\tframeSpec\x1a\xed\x03\n\x0bWindowFrame\x12U\n\nframe_type\x18\x01 \x01(\x0e\x32\x36.spark.connect.Expression.Window.WindowFrame.FrameTypeR\tframeType\x12P\n\x05lower\x18\x02 \x01(\x0b\x32:.spark.connect.Expression.Window.WindowFrame.FrameBoundaryR\x05lower\x12P\n\x05upper\x18\x03 \x01(\x0b\x32:.spark.connect.Expression.Window.WindowFrame.FrameBoundaryR\x05upper\x1a\x91\x01\n\rFrameBoundary\x12!\n\x0b\x63urrent_row\x18\x01 \x01(\x08H\x00R\ncurrentRow\x12\x1e\n\tunbounded\x18\x02 \x01(\x08H\x00R\tunbounded\x12\x31\n\x05value\x18\x03 \x01(\x0b\x32\x19.spark.connect.ExpressionH\x00R\x05valueB\n\n\x08\x62oundary"O\n\tFrameType\x12\x18\n\x14\x46RAME_TYPE_UNDEFINED\x10\x00\x12\x12\n\x0e\x46RAME_TYPE_ROW\x10\x01\x12\x14\n\x10\x46RAME_TYPE_RANGE\x10\x02\x1a\xa9\x03\n\tSortOrder\x12/\n\x05\x63hild\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12O\n\tdirection\x18\x02 \x01(\x0e\x32\x31.spark.connect.Expression.SortOrder.SortDirectionR\tdirection\x12U\n\rnull_ordering\x18\x03 \x01(\x0e\x32\x30.spark.connect.Expression.SortOrder.NullOrderingR\x0cnullOrdering"l\n\rSortDirection\x12\x1e\n\x1aSORT_DIRECTION_UNSPECIFIED\x10\x00\x12\x1c\n\x18SORT_DIRECTION_ASCENDING\x10\x01\x12\x1d\n\x19SORT_DIRECTION_DESCENDING\x10\x02"U\n\x0cNullOrdering\x12\x1a\n\x16SORT_NULLS_UNSPECIFIED\x10\x00\x12\x14\n\x10SORT_NULLS_FIRST\x10\x01\x12\x13\n\x0fSORT_NULLS_LAST\x10\x02\x1a\x91\x01\n\x04\x43\x61st\x12-\n\x04\x65xpr\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x04\x65xpr\x12-\n\x04type\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x00R\x04type\x12\x1b\n\x08type_str\x18\x03 \x01(\tH\x00R\x07typeStrB\x0e\n\x0c\x63\x61st_to_type\x1a\x9b\x0c\n\x07Literal\x12-\n\x04null\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x00R\x04null\x12\x18\n\x06\x62inary\x18\x02 \x01(\x0cH\x00R\x06\x62inary\x12\x1a\n\x07\x62oolean\x18\x03 \x01(\x08H\x00R\x07\x62oolean\x12\x14\n\x04\x62yte\x18\x04 \x01(\x05H\x00R\x04\x62yte\x12\x16\n\x05short\x18\x05 \x01(\x05H\x00R\x05short\x12\x1a\n\x07integer\x18\x06 \x01(\x05H\x00R\x07integer\x12\x14\n\x04long\x18\x07 \x01(\x03H\x00R\x04long\x12\x16\n\x05\x66loat\x18\n \x01(\x02H\x00R\x05\x66loat\x12\x18\n\x06\x64ouble\x18\x0b \x01(\x01H\x00R\x06\x64ouble\x12\x45\n\x07\x64\x65\x63imal\x18\x0c \x01(\x0b\x32).spark.connect.Expression.Literal.DecimalH\x00R\x07\x64\x65\x63imal\x12\x18\n\x06string\x18\r \x01(\tH\x00R\x06string\x12\x14\n\x04\x64\x61te\x18\x10 \x01(\x05H\x00R\x04\x64\x61te\x12\x1e\n\ttimestamp\x18\x11 \x01(\x03H\x00R\ttimestamp\x12%\n\rtimestamp_ntz\x18\x12 \x01(\x03H\x00R\x0ctimestampNtz\x12\x61\n\x11\x63\x61lendar_interval\x18\x13 \x01(\x0b\x32\x32.spark.connect.Expression.Literal.CalendarIntervalH\x00R\x10\x63\x61lendarInterval\x12\x30\n\x13year_month_interval\x18\x14 \x01(\x05H\x00R\x11yearMonthInterval\x12,\n\x11\x64\x61y_time_interval\x18\x15 \x01(\x03H\x00R\x0f\x64\x61yTimeInterval\x12?\n\x05\x61rray\x18\x16 \x01(\x0b\x32\'.spark.connect.Expression.Literal.ArrayH\x00R\x05\x61rray\x12\x39\n\x03map\x18\x17 \x01(\x0b\x32%.spark.connect.Expression.Literal.MapH\x00R\x03map\x12\x42\n\x06struct\x18\x18 \x01(\x0b\x32(.spark.connect.Expression.Literal.StructH\x00R\x06struct\x1au\n\x07\x44\x65\x63imal\x12\x14\n\x05value\x18\x01 \x01(\tR\x05value\x12!\n\tprecision\x18\x02 \x01(\x05H\x00R\tprecision\x88\x01\x01\x12\x19\n\x05scale\x18\x03 \x01(\x05H\x01R\x05scale\x88\x01\x01\x42\x0c\n\n_precisionB\x08\n\x06_scale\x1a\x62\n\x10\x43\x61lendarInterval\x12\x16\n\x06months\x18\x01 \x01(\x05R\x06months\x12\x12\n\x04\x64\x61ys\x18\x02 \x01(\x05R\x04\x64\x61ys\x12"\n\x0cmicroseconds\x18\x03 \x01(\x03R\x0cmicroseconds\x1a\x82\x01\n\x05\x41rray\x12:\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x0b\x65lementType\x12=\n\x08\x65lements\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x08\x65lements\x1a\xe3\x01\n\x03Map\x12\x32\n\x08key_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x07keyType\x12\x36\n\nvalue_type\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeR\tvalueType\x12\x35\n\x04keys\x18\x03 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x04keys\x12\x39\n\x06values\x18\x04 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06values\x1a\x81\x01\n\x06Struct\x12\x38\n\x0bstruct_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\nstructType\x12=\n\x08\x65lements\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x08\x65lementsB\x0e\n\x0cliteral_type\x1ap\n\x13UnresolvedAttribute\x12/\n\x13unparsed_identifier\x18\x01 \x01(\tR\x12unparsedIdentifier\x12\x1c\n\x07plan_id\x18\x02 \x01(\x03H\x00R\x06planId\x88\x01\x01\x42\n\n\x08_plan_id\x1a\xcc\x01\n\x12UnresolvedFunction\x12#\n\rfunction_name\x18\x01 \x01(\tR\x0c\x66unctionName\x12\x37\n\targuments\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\targuments\x12\x1f\n\x0bis_distinct\x18\x03 \x01(\x08R\nisDistinct\x12\x37\n\x18is_user_defined_function\x18\x04 \x01(\x08R\x15isUserDefinedFunction\x1a\x32\n\x10\x45xpressionString\x12\x1e\n\nexpression\x18\x01 \x01(\tR\nexpression\x1aR\n\x0eUnresolvedStar\x12,\n\x0funparsed_target\x18\x01 \x01(\tH\x00R\x0eunparsedTarget\x88\x01\x01\x42\x12\n\x10_unparsed_target\x1aV\n\x0fUnresolvedRegex\x12\x19\n\x08\x63ol_name\x18\x01 \x01(\tR\x07\x63olName\x12\x1c\n\x07plan_id\x18\x02 \x01(\x03H\x00R\x06planId\x88\x01\x01\x42\n\n\x08_plan_id\x1a\x84\x01\n\x16UnresolvedExtractValue\x12/\n\x05\x63hild\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12\x39\n\nextraction\x18\x02 \x01(\x0b\x32\x19.spark.connect.ExpressionR\nextraction\x1a\xbb\x01\n\x0cUpdateFields\x12\x46\n\x11struct_expression\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x10structExpression\x12\x1d\n\nfield_name\x18\x02 \x01(\tR\tfieldName\x12\x44\n\x10value_expression\x18\x03 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x0fvalueExpression\x1ax\n\x05\x41lias\x12-\n\x04\x65xpr\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x04\x65xpr\x12\x12\n\x04name\x18\x02 \x03(\tR\x04name\x12\x1f\n\x08metadata\x18\x03 \x01(\tH\x00R\x08metadata\x88\x01\x01\x42\x0b\n\t_metadata\x1a\x9e\x01\n\x0eLambdaFunction\x12\x35\n\x08\x66unction\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x08\x66unction\x12U\n\targuments\x18\x02 \x03(\x0b\x32\x37.spark.connect.Expression.UnresolvedNamedLambdaVariableR\targuments\x1a>\n\x1dUnresolvedNamedLambdaVariable\x12\x1d\n\nname_parts\x18\x01 \x03(\tR\tnamePartsB\x0b\n\texpr_type"\xec\x02\n\x1f\x43ommonInlineUserDefinedFunction\x12#\n\rfunction_name\x18\x01 \x01(\tR\x0c\x66unctionName\x12$\n\rdeterministic\x18\x02 \x01(\x08R\rdeterministic\x12\x37\n\targuments\x18\x03 \x03(\x0b\x32\x19.spark.connect.ExpressionR\targuments\x12\x39\n\npython_udf\x18\x04 \x01(\x0b\x32\x18.spark.connect.PythonUDFH\x00R\tpythonUdf\x12I\n\x10scalar_scala_udf\x18\x05 \x01(\x0b\x32\x1d.spark.connect.ScalarScalaUDFH\x00R\x0escalarScalaUdf\x12\x33\n\x08java_udf\x18\x06 \x01(\x0b\x32\x16.spark.connect.JavaUDFH\x00R\x07javaUdfB\n\n\x08\x66unction"\x9b\x01\n\tPythonUDF\x12\x38\n\x0boutput_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\noutputType\x12\x1b\n\teval_type\x18\x02 \x01(\x05R\x08\x65valType\x12\x18\n\x07\x63ommand\x18\x03 \x01(\x0cR\x07\x63ommand\x12\x1d\n\npython_ver\x18\x04 \x01(\tR\tpythonVer"\xb8\x01\n\x0eScalarScalaUDF\x12\x18\n\x07payload\x18\x01 \x01(\x0cR\x07payload\x12\x37\n\ninputTypes\x18\x02 \x03(\x0b\x32\x17.spark.connect.DataTypeR\ninputTypes\x12\x37\n\noutputType\x18\x03 \x01(\x0b\x32\x17.spark.connect.DataTypeR\noutputType\x12\x1a\n\x08nullable\x18\x04 \x01(\x08R\x08nullable"\x95\x01\n\x07JavaUDF\x12\x1d\n\nclass_name\x18\x01 \x01(\tR\tclassName\x12=\n\x0boutput_type\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x00R\noutputType\x88\x01\x01\x12\x1c\n\taggregate\x18\x03 \x01(\x08R\taggregateB\x0e\n\x0c_output_typeB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_EXPRESSION = DESCRIPTOR.message_types_by_name["Expression"] -_EXPRESSION_WINDOW = _EXPRESSION.nested_types_by_name["Window"] -_EXPRESSION_WINDOW_WINDOWFRAME = _EXPRESSION_WINDOW.nested_types_by_name["WindowFrame"] -_EXPRESSION_WINDOW_WINDOWFRAME_FRAMEBOUNDARY = _EXPRESSION_WINDOW_WINDOWFRAME.nested_types_by_name[ - "FrameBoundary" -] -_EXPRESSION_SORTORDER = _EXPRESSION.nested_types_by_name["SortOrder"] -_EXPRESSION_CAST = _EXPRESSION.nested_types_by_name["Cast"] -_EXPRESSION_LITERAL = _EXPRESSION.nested_types_by_name["Literal"] -_EXPRESSION_LITERAL_DECIMAL = _EXPRESSION_LITERAL.nested_types_by_name["Decimal"] -_EXPRESSION_LITERAL_CALENDARINTERVAL = _EXPRESSION_LITERAL.nested_types_by_name["CalendarInterval"] -_EXPRESSION_LITERAL_ARRAY = _EXPRESSION_LITERAL.nested_types_by_name["Array"] -_EXPRESSION_LITERAL_MAP = _EXPRESSION_LITERAL.nested_types_by_name["Map"] -_EXPRESSION_LITERAL_STRUCT = _EXPRESSION_LITERAL.nested_types_by_name["Struct"] -_EXPRESSION_UNRESOLVEDATTRIBUTE = _EXPRESSION.nested_types_by_name["UnresolvedAttribute"] -_EXPRESSION_UNRESOLVEDFUNCTION = _EXPRESSION.nested_types_by_name["UnresolvedFunction"] -_EXPRESSION_EXPRESSIONSTRING = _EXPRESSION.nested_types_by_name["ExpressionString"] -_EXPRESSION_UNRESOLVEDSTAR = _EXPRESSION.nested_types_by_name["UnresolvedStar"] -_EXPRESSION_UNRESOLVEDREGEX = _EXPRESSION.nested_types_by_name["UnresolvedRegex"] -_EXPRESSION_UNRESOLVEDEXTRACTVALUE = _EXPRESSION.nested_types_by_name["UnresolvedExtractValue"] -_EXPRESSION_UPDATEFIELDS = _EXPRESSION.nested_types_by_name["UpdateFields"] -_EXPRESSION_ALIAS = _EXPRESSION.nested_types_by_name["Alias"] -_EXPRESSION_LAMBDAFUNCTION = _EXPRESSION.nested_types_by_name["LambdaFunction"] -_EXPRESSION_UNRESOLVEDNAMEDLAMBDAVARIABLE = _EXPRESSION.nested_types_by_name[ - "UnresolvedNamedLambdaVariable" -] -_COMMONINLINEUSERDEFINEDFUNCTION = DESCRIPTOR.message_types_by_name[ - "CommonInlineUserDefinedFunction" -] -_PYTHONUDF = DESCRIPTOR.message_types_by_name["PythonUDF"] -_SCALARSCALAUDF = DESCRIPTOR.message_types_by_name["ScalarScalaUDF"] -_JAVAUDF = DESCRIPTOR.message_types_by_name["JavaUDF"] -_EXPRESSION_WINDOW_WINDOWFRAME_FRAMETYPE = _EXPRESSION_WINDOW_WINDOWFRAME.enum_types_by_name[ - "FrameType" -] -_EXPRESSION_SORTORDER_SORTDIRECTION = _EXPRESSION_SORTORDER.enum_types_by_name["SortDirection"] -_EXPRESSION_SORTORDER_NULLORDERING = _EXPRESSION_SORTORDER.enum_types_by_name["NullOrdering"] -Expression = _reflection.GeneratedProtocolMessageType( - "Expression", - (_message.Message,), - { - "Window": _reflection.GeneratedProtocolMessageType( - "Window", - (_message.Message,), - { - "WindowFrame": _reflection.GeneratedProtocolMessageType( - "WindowFrame", - (_message.Message,), - { - "FrameBoundary": _reflection.GeneratedProtocolMessageType( - "FrameBoundary", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_WINDOW_WINDOWFRAME_FRAMEBOUNDARY, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Window.WindowFrame.FrameBoundary) - }, - ), - "DESCRIPTOR": _EXPRESSION_WINDOW_WINDOWFRAME, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Window.WindowFrame) - }, - ), - "DESCRIPTOR": _EXPRESSION_WINDOW, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Window) - }, - ), - "SortOrder": _reflection.GeneratedProtocolMessageType( - "SortOrder", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_SORTORDER, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.SortOrder) - }, - ), - "Cast": _reflection.GeneratedProtocolMessageType( - "Cast", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_CAST, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Cast) - }, - ), - "Literal": _reflection.GeneratedProtocolMessageType( - "Literal", - (_message.Message,), - { - "Decimal": _reflection.GeneratedProtocolMessageType( - "Decimal", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LITERAL_DECIMAL, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Decimal) - }, - ), - "CalendarInterval": _reflection.GeneratedProtocolMessageType( - "CalendarInterval", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LITERAL_CALENDARINTERVAL, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.CalendarInterval) - }, - ), - "Array": _reflection.GeneratedProtocolMessageType( - "Array", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LITERAL_ARRAY, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Array) - }, - ), - "Map": _reflection.GeneratedProtocolMessageType( - "Map", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LITERAL_MAP, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Map) - }, - ), - "Struct": _reflection.GeneratedProtocolMessageType( - "Struct", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LITERAL_STRUCT, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal.Struct) - }, - ), - "DESCRIPTOR": _EXPRESSION_LITERAL, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Literal) - }, - ), - "UnresolvedAttribute": _reflection.GeneratedProtocolMessageType( - "UnresolvedAttribute", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDATTRIBUTE, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedAttribute) - }, - ), - "UnresolvedFunction": _reflection.GeneratedProtocolMessageType( - "UnresolvedFunction", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDFUNCTION, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedFunction) - }, - ), - "ExpressionString": _reflection.GeneratedProtocolMessageType( - "ExpressionString", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_EXPRESSIONSTRING, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.ExpressionString) - }, - ), - "UnresolvedStar": _reflection.GeneratedProtocolMessageType( - "UnresolvedStar", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDSTAR, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedStar) - }, - ), - "UnresolvedRegex": _reflection.GeneratedProtocolMessageType( - "UnresolvedRegex", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDREGEX, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedRegex) - }, - ), - "UnresolvedExtractValue": _reflection.GeneratedProtocolMessageType( - "UnresolvedExtractValue", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDEXTRACTVALUE, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedExtractValue) - }, - ), - "UpdateFields": _reflection.GeneratedProtocolMessageType( - "UpdateFields", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UPDATEFIELDS, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UpdateFields) - }, - ), - "Alias": _reflection.GeneratedProtocolMessageType( - "Alias", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_ALIAS, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.Alias) - }, - ), - "LambdaFunction": _reflection.GeneratedProtocolMessageType( - "LambdaFunction", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_LAMBDAFUNCTION, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.LambdaFunction) - }, - ), - "UnresolvedNamedLambdaVariable": _reflection.GeneratedProtocolMessageType( - "UnresolvedNamedLambdaVariable", - (_message.Message,), - { - "DESCRIPTOR": _EXPRESSION_UNRESOLVEDNAMEDLAMBDAVARIABLE, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression.UnresolvedNamedLambdaVariable) - }, - ), - "DESCRIPTOR": _EXPRESSION, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Expression) - }, -) -_sym_db.RegisterMessage(Expression) -_sym_db.RegisterMessage(Expression.Window) -_sym_db.RegisterMessage(Expression.Window.WindowFrame) -_sym_db.RegisterMessage(Expression.Window.WindowFrame.FrameBoundary) -_sym_db.RegisterMessage(Expression.SortOrder) -_sym_db.RegisterMessage(Expression.Cast) -_sym_db.RegisterMessage(Expression.Literal) -_sym_db.RegisterMessage(Expression.Literal.Decimal) -_sym_db.RegisterMessage(Expression.Literal.CalendarInterval) -_sym_db.RegisterMessage(Expression.Literal.Array) -_sym_db.RegisterMessage(Expression.Literal.Map) -_sym_db.RegisterMessage(Expression.Literal.Struct) -_sym_db.RegisterMessage(Expression.UnresolvedAttribute) -_sym_db.RegisterMessage(Expression.UnresolvedFunction) -_sym_db.RegisterMessage(Expression.ExpressionString) -_sym_db.RegisterMessage(Expression.UnresolvedStar) -_sym_db.RegisterMessage(Expression.UnresolvedRegex) -_sym_db.RegisterMessage(Expression.UnresolvedExtractValue) -_sym_db.RegisterMessage(Expression.UpdateFields) -_sym_db.RegisterMessage(Expression.Alias) -_sym_db.RegisterMessage(Expression.LambdaFunction) -_sym_db.RegisterMessage(Expression.UnresolvedNamedLambdaVariable) - -CommonInlineUserDefinedFunction = _reflection.GeneratedProtocolMessageType( - "CommonInlineUserDefinedFunction", - (_message.Message,), - { - "DESCRIPTOR": _COMMONINLINEUSERDEFINEDFUNCTION, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CommonInlineUserDefinedFunction) - }, -) -_sym_db.RegisterMessage(CommonInlineUserDefinedFunction) - -PythonUDF = _reflection.GeneratedProtocolMessageType( - "PythonUDF", - (_message.Message,), - { - "DESCRIPTOR": _PYTHONUDF, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.PythonUDF) - }, -) -_sym_db.RegisterMessage(PythonUDF) - -ScalarScalaUDF = _reflection.GeneratedProtocolMessageType( - "ScalarScalaUDF", - (_message.Message,), - { - "DESCRIPTOR": _SCALARSCALAUDF, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ScalarScalaUDF) - }, -) -_sym_db.RegisterMessage(ScalarScalaUDF) - -JavaUDF = _reflection.GeneratedProtocolMessageType( - "JavaUDF", - (_message.Message,), - { - "DESCRIPTOR": _JAVAUDF, - "__module__": "spark.connect.expressions_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.JavaUDF) - }, -) -_sym_db.RegisterMessage(JavaUDF) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.expressions_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py b/python/pyspark/sql/connect/proto/relations_pb2.py index ce36df6f81e08..9535d4f071bd4 100644 --- a/python/pyspark/sql/connect/proto/relations_pb2.py +++ b/python/pyspark/sql/connect/proto/relations_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/relations.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -39,745 +38,8 @@ b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x19google/protobuf/any.proto\x1a\x1fspark/connect/expressions.proto\x1a\x19spark/connect/types.proto\x1a\x1bspark/connect/catalog.proto"\xd0\x17\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01 \x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02 \x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 \x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05 \x01(\x0b\x32\x13.spark.connect.JoinH\x00R\x04join\x12\x34\n\x06set_op\x18\x06 \x01(\x0b\x32\x1b.spark.connect.SetOperationH\x00R\x05setOp\x12)\n\x04sort\x18\x07 \x01(\x0b\x32\x13.spark.connect.SortH\x00R\x04sort\x12,\n\x05limit\x18\x08 \x01(\x0b\x32\x14.spark.connect.LimitH\x00R\x05limit\x12\x38\n\taggregate\x18\t \x01(\x0b\x32\x18.spark.connect.AggregateH\x00R\taggregate\x12&\n\x03sql\x18\n \x01(\x0b\x32\x12.spark.connect.SQLH\x00R\x03sql\x12\x45\n\x0elocal_relation\x18\x0b \x01(\x0b\x32\x1c.spark.connect.LocalRelationH\x00R\rlocalRelation\x12/\n\x06sample\x18\x0c \x01(\x0b\x32\x15.spark.connect.SampleH\x00R\x06sample\x12/\n\x06offset\x18\r \x01(\x0b\x32\x15.spark.connect.OffsetH\x00R\x06offset\x12>\n\x0b\x64\x65\x64uplicate\x18\x0e \x01(\x0b\x32\x1a.spark.connect.DeduplicateH\x00R\x0b\x64\x65\x64uplicate\x12,\n\x05range\x18\x0f \x01(\x0b\x32\x14.spark.connect.RangeH\x00R\x05range\x12\x45\n\x0esubquery_alias\x18\x10 \x01(\x0b\x32\x1c.spark.connect.SubqueryAliasH\x00R\rsubqueryAlias\x12>\n\x0brepartition\x18\x11 \x01(\x0b\x32\x1a.spark.connect.RepartitionH\x00R\x0brepartition\x12*\n\x05to_df\x18\x12 \x01(\x0b\x32\x13.spark.connect.ToDFH\x00R\x04toDf\x12U\n\x14with_columns_renamed\x18\x13 \x01(\x0b\x32!.spark.connect.WithColumnsRenamedH\x00R\x12withColumnsRenamed\x12<\n\x0bshow_string\x18\x14 \x01(\x0b\x32\x19.spark.connect.ShowStringH\x00R\nshowString\x12)\n\x04\x64rop\x18\x15 \x01(\x0b\x32\x13.spark.connect.DropH\x00R\x04\x64rop\x12)\n\x04tail\x18\x16 \x01(\x0b\x32\x13.spark.connect.TailH\x00R\x04tail\x12?\n\x0cwith_columns\x18\x17 \x01(\x0b\x32\x1a.spark.connect.WithColumnsH\x00R\x0bwithColumns\x12)\n\x04hint\x18\x18 \x01(\x0b\x32\x13.spark.connect.HintH\x00R\x04hint\x12\x32\n\x07unpivot\x18\x19 \x01(\x0b\x32\x16.spark.connect.UnpivotH\x00R\x07unpivot\x12\x36\n\tto_schema\x18\x1a \x01(\x0b\x32\x17.spark.connect.ToSchemaH\x00R\x08toSchema\x12\x64\n\x19repartition_by_expression\x18\x1b \x01(\x0b\x32&.spark.connect.RepartitionByExpressionH\x00R\x17repartitionByExpression\x12\x45\n\x0emap_partitions\x18\x1c \x01(\x0b\x32\x1c.spark.connect.MapPartitionsH\x00R\rmapPartitions\x12H\n\x0f\x63ollect_metrics\x18\x1d \x01(\x0b\x32\x1d.spark.connect.CollectMetricsH\x00R\x0e\x63ollectMetrics\x12,\n\x05parse\x18\x1e \x01(\x0b\x32\x14.spark.connect.ParseH\x00R\x05parse\x12\x36\n\tgroup_map\x18\x1f \x01(\x0b\x32\x17.spark.connect.GroupMapH\x00R\x08groupMap\x12=\n\x0c\x63o_group_map\x18 \x01(\x0b\x32\x19.spark.connect.CoGroupMapH\x00R\ncoGroupMap\x12\x45\n\x0ewith_watermark\x18! \x01(\x0b\x32\x1c.spark.connect.WithWatermarkH\x00R\rwithWatermark\x12\x63\n\x1a\x61pply_in_pandas_with_state\x18" \x01(\x0b\x32%.spark.connect.ApplyInPandasWithStateH\x00R\x16\x61pplyInPandasWithState\x12<\n\x0bhtml_string\x18# \x01(\x0b\x32\x19.spark.connect.HtmlStringH\x00R\nhtmlString\x12X\n\x15\x63\x61\x63hed_local_relation\x18$ \x01(\x0b\x32".spark.connect.CachedLocalRelationH\x00R\x13\x63\x61\x63hedLocalRelation\x12[\n\x16\x63\x61\x63hed_remote_relation\x18% \x01(\x0b\x32#.spark.connect.CachedRemoteRelationH\x00R\x14\x63\x61\x63hedRemoteRelation\x12\x30\n\x07\x66ill_na\x18Z \x01(\x0b\x32\x15.spark.connect.NAFillH\x00R\x06\x66illNa\x12\x30\n\x07\x64rop_na\x18[ \x01(\x0b\x32\x15.spark.connect.NADropH\x00R\x06\x64ropNa\x12\x34\n\x07replace\x18\\ \x01(\x0b\x32\x18.spark.connect.NAReplaceH\x00R\x07replace\x12\x36\n\x07summary\x18\x64 \x01(\x0b\x32\x1a.spark.connect.StatSummaryH\x00R\x07summary\x12\x39\n\x08\x63rosstab\x18\x65 \x01(\x0b\x32\x1b.spark.connect.StatCrosstabH\x00R\x08\x63rosstab\x12\x39\n\x08\x64\x65scribe\x18\x66 \x01(\x0b\x32\x1b.spark.connect.StatDescribeH\x00R\x08\x64\x65scribe\x12*\n\x03\x63ov\x18g \x01(\x0b\x32\x16.spark.connect.StatCovH\x00R\x03\x63ov\x12-\n\x04\x63orr\x18h \x01(\x0b\x32\x17.spark.connect.StatCorrH\x00R\x04\x63orr\x12L\n\x0f\x61pprox_quantile\x18i \x01(\x0b\x32!.spark.connect.StatApproxQuantileH\x00R\x0e\x61pproxQuantile\x12=\n\nfreq_items\x18j \x01(\x0b\x32\x1c.spark.connect.StatFreqItemsH\x00R\tfreqItems\x12:\n\tsample_by\x18k \x01(\x0b\x32\x1b.spark.connect.StatSampleByH\x00R\x08sampleBy\x12\x33\n\x07\x63\x61talog\x18\xc8\x01 \x01(\x0b\x32\x16.spark.connect.CatalogH\x00R\x07\x63\x61talog\x12\x35\n\textension\x18\xe6\x07 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00R\textension\x12\x33\n\x07unknown\x18\xe7\x07 \x01(\x0b\x32\x16.spark.connect.UnknownH\x00R\x07unknownB\n\n\x08rel_type"\t\n\x07Unknown"[\n\x0eRelationCommon\x12\x1f\n\x0bsource_info\x18\x01 \x01(\tR\nsourceInfo\x12\x1c\n\x07plan_id\x18\x02 \x01(\x03H\x00R\x06planId\x88\x01\x01\x42\n\n\x08_plan_id"\xe7\x01\n\x03SQL\x12\x14\n\x05query\x18\x01 \x01(\tR\x05query\x12\x30\n\x04\x61rgs\x18\x02 \x03(\x0b\x32\x1c.spark.connect.SQL.ArgsEntryR\x04\x61rgs\x12<\n\x08pos_args\x18\x03 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x07posArgs\x1aZ\n\tArgsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x05value:\x02\x38\x01"\x97\x05\n\x04Read\x12\x41\n\x0bnamed_table\x18\x01 \x01(\x0b\x32\x1e.spark.connect.Read.NamedTableH\x00R\nnamedTable\x12\x41\n\x0b\x64\x61ta_source\x18\x02 \x01(\x0b\x32\x1e.spark.connect.Read.DataSourceH\x00R\ndataSource\x12!\n\x0cis_streaming\x18\x03 \x01(\x08R\x0bisStreaming\x1a\xc0\x01\n\nNamedTable\x12/\n\x13unparsed_identifier\x18\x01 \x01(\tR\x12unparsedIdentifier\x12\x45\n\x07options\x18\x02 \x03(\x0b\x32+.spark.connect.Read.NamedTable.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1a\x95\x02\n\nDataSource\x12\x1b\n\x06\x66ormat\x18\x01 \x01(\tH\x00R\x06\x66ormat\x88\x01\x01\x12\x1b\n\x06schema\x18\x02 \x01(\tH\x01R\x06schema\x88\x01\x01\x12\x45\n\x07options\x18\x03 \x03(\x0b\x32+.spark.connect.Read.DataSource.OptionsEntryR\x07options\x12\x14\n\x05paths\x18\x04 \x03(\tR\x05paths\x12\x1e\n\npredicates\x18\x05 \x03(\tR\npredicates\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\t\n\x07_formatB\t\n\x07_schemaB\x0b\n\tread_type"u\n\x07Project\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12;\n\x0b\x65xpressions\x18\x03 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x0b\x65xpressions"p\n\x06\x46ilter\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x37\n\tcondition\x18\x02 \x01(\x0b\x32\x19.spark.connect.ExpressionR\tcondition"\xc2\x05\n\x04Join\x12+\n\x04left\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x04left\x12-\n\x05right\x18\x02 \x01(\x0b\x32\x17.spark.connect.RelationR\x05right\x12@\n\x0ejoin_condition\x18\x03 \x01(\x0b\x32\x19.spark.connect.ExpressionR\rjoinCondition\x12\x39\n\tjoin_type\x18\x04 \x01(\x0e\x32\x1c.spark.connect.Join.JoinTypeR\x08joinType\x12#\n\rusing_columns\x18\x05 \x03(\tR\x0cusingColumns\x12K\n\x0ejoin_data_type\x18\x06 \x01(\x0b\x32 .spark.connect.Join.JoinDataTypeH\x00R\x0cjoinDataType\x88\x01\x01\x1a\x88\x01\n\x0cJoinDataType\x12:\n\x1ais_left_flattenable_to_row\x18\x01 \x01(\x08R\x16isLeftFlattenableToRow\x12<\n\x1bis_right_flattenable_to_row\x18\x02 \x01(\x08R\x17isRightFlattenableToRow"\xd0\x01\n\x08JoinType\x12\x19\n\x15JOIN_TYPE_UNSPECIFIED\x10\x00\x12\x13\n\x0fJOIN_TYPE_INNER\x10\x01\x12\x18\n\x14JOIN_TYPE_FULL_OUTER\x10\x02\x12\x18\n\x14JOIN_TYPE_LEFT_OUTER\x10\x03\x12\x19\n\x15JOIN_TYPE_RIGHT_OUTER\x10\x04\x12\x17\n\x13JOIN_TYPE_LEFT_ANTI\x10\x05\x12\x17\n\x13JOIN_TYPE_LEFT_SEMI\x10\x06\x12\x13\n\x0fJOIN_TYPE_CROSS\x10\x07\x42\x11\n\x0f_join_data_type"\xdf\x03\n\x0cSetOperation\x12\x36\n\nleft_input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\tleftInput\x12\x38\n\x0bright_input\x18\x02 \x01(\x0b\x32\x17.spark.connect.RelationR\nrightInput\x12\x45\n\x0bset_op_type\x18\x03 \x01(\x0e\x32%.spark.connect.SetOperation.SetOpTypeR\tsetOpType\x12\x1a\n\x06is_all\x18\x04 \x01(\x08H\x00R\x05isAll\x88\x01\x01\x12\x1c\n\x07\x62y_name\x18\x05 \x01(\x08H\x01R\x06\x62yName\x88\x01\x01\x12\x37\n\x15\x61llow_missing_columns\x18\x06 \x01(\x08H\x02R\x13\x61llowMissingColumns\x88\x01\x01"r\n\tSetOpType\x12\x1b\n\x17SET_OP_TYPE_UNSPECIFIED\x10\x00\x12\x19\n\x15SET_OP_TYPE_INTERSECT\x10\x01\x12\x15\n\x11SET_OP_TYPE_UNION\x10\x02\x12\x16\n\x12SET_OP_TYPE_EXCEPT\x10\x03\x42\t\n\x07_is_allB\n\n\x08_by_nameB\x18\n\x16_allow_missing_columns"L\n\x05Limit\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x14\n\x05limit\x18\x02 \x01(\x05R\x05limit"O\n\x06Offset\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x16\n\x06offset\x18\x02 \x01(\x05R\x06offset"K\n\x04Tail\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x14\n\x05limit\x18\x02 \x01(\x05R\x05limit"\xc6\x04\n\tAggregate\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x41\n\ngroup_type\x18\x02 \x01(\x0e\x32".spark.connect.Aggregate.GroupTypeR\tgroupType\x12L\n\x14grouping_expressions\x18\x03 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13groupingExpressions\x12N\n\x15\x61ggregate_expressions\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x14\x61ggregateExpressions\x12\x34\n\x05pivot\x18\x05 \x01(\x0b\x32\x1e.spark.connect.Aggregate.PivotR\x05pivot\x1ao\n\x05Pivot\x12+\n\x03\x63ol\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x03\x63ol\x12\x39\n\x06values\x18\x02 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06values"\x81\x01\n\tGroupType\x12\x1a\n\x16GROUP_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12GROUP_TYPE_GROUPBY\x10\x01\x12\x15\n\x11GROUP_TYPE_ROLLUP\x10\x02\x12\x13\n\x0fGROUP_TYPE_CUBE\x10\x03\x12\x14\n\x10GROUP_TYPE_PIVOT\x10\x04"\xa0\x01\n\x04Sort\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x39\n\x05order\x18\x02 \x03(\x0b\x32#.spark.connect.Expression.SortOrderR\x05order\x12 \n\tis_global\x18\x03 \x01(\x08H\x00R\x08isGlobal\x88\x01\x01\x42\x0c\n\n_is_global"\x8d\x01\n\x04\x44rop\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x33\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x07\x63olumns\x12!\n\x0c\x63olumn_names\x18\x03 \x03(\tR\x0b\x63olumnNames"\xf0\x01\n\x0b\x44\x65\x64uplicate\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63olumn_names\x18\x02 \x03(\tR\x0b\x63olumnNames\x12\x32\n\x13\x61ll_columns_as_keys\x18\x03 \x01(\x08H\x00R\x10\x61llColumnsAsKeys\x88\x01\x01\x12.\n\x10within_watermark\x18\x04 \x01(\x08H\x01R\x0fwithinWatermark\x88\x01\x01\x42\x16\n\x14_all_columns_as_keysB\x13\n\x11_within_watermark"Y\n\rLocalRelation\x12\x17\n\x04\x64\x61ta\x18\x01 \x01(\x0cH\x00R\x04\x64\x61ta\x88\x01\x01\x12\x1b\n\x06schema\x18\x02 \x01(\tH\x01R\x06schema\x88\x01\x01\x42\x07\n\x05_dataB\t\n\x07_schema"_\n\x13\x43\x61\x63hedLocalRelation\x12\x16\n\x06userId\x18\x01 \x01(\tR\x06userId\x12\x1c\n\tsessionId\x18\x02 \x01(\tR\tsessionId\x12\x12\n\x04hash\x18\x03 \x01(\tR\x04hash"7\n\x14\x43\x61\x63hedRemoteRelation\x12\x1f\n\x0brelation_id\x18\x01 \x01(\tR\nrelationId"\x91\x02\n\x06Sample\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1f\n\x0blower_bound\x18\x02 \x01(\x01R\nlowerBound\x12\x1f\n\x0bupper_bound\x18\x03 \x01(\x01R\nupperBound\x12.\n\x10with_replacement\x18\x04 \x01(\x08H\x00R\x0fwithReplacement\x88\x01\x01\x12\x17\n\x04seed\x18\x05 \x01(\x03H\x01R\x04seed\x88\x01\x01\x12/\n\x13\x64\x65terministic_order\x18\x06 \x01(\x08R\x12\x64\x65terministicOrderB\x13\n\x11_with_replacementB\x07\n\x05_seed"\x91\x01\n\x05Range\x12\x19\n\x05start\x18\x01 \x01(\x03H\x00R\x05start\x88\x01\x01\x12\x10\n\x03\x65nd\x18\x02 \x01(\x03R\x03\x65nd\x12\x12\n\x04step\x18\x03 \x01(\x03R\x04step\x12*\n\x0enum_partitions\x18\x04 \x01(\x05H\x01R\rnumPartitions\x88\x01\x01\x42\x08\n\x06_startB\x11\n\x0f_num_partitions"r\n\rSubqueryAlias\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x14\n\x05\x61lias\x18\x02 \x01(\tR\x05\x61lias\x12\x1c\n\tqualifier\x18\x03 \x03(\tR\tqualifier"\x8e\x01\n\x0bRepartition\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12%\n\x0enum_partitions\x18\x02 \x01(\x05R\rnumPartitions\x12\x1d\n\x07shuffle\x18\x03 \x01(\x08H\x00R\x07shuffle\x88\x01\x01\x42\n\n\x08_shuffle"\x8e\x01\n\nShowString\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x19\n\x08num_rows\x18\x02 \x01(\x05R\x07numRows\x12\x1a\n\x08truncate\x18\x03 \x01(\x05R\x08truncate\x12\x1a\n\x08vertical\x18\x04 \x01(\x08R\x08vertical"r\n\nHtmlString\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x19\n\x08num_rows\x18\x02 \x01(\x05R\x07numRows\x12\x1a\n\x08truncate\x18\x03 \x01(\x05R\x08truncate"\\\n\x0bStatSummary\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1e\n\nstatistics\x18\x02 \x03(\tR\nstatistics"Q\n\x0cStatDescribe\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols"e\n\x0cStatCrosstab\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ol1\x18\x02 \x01(\tR\x04\x63ol1\x12\x12\n\x04\x63ol2\x18\x03 \x01(\tR\x04\x63ol2"`\n\x07StatCov\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ol1\x18\x02 \x01(\tR\x04\x63ol1\x12\x12\n\x04\x63ol2\x18\x03 \x01(\tR\x04\x63ol2"\x89\x01\n\x08StatCorr\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ol1\x18\x02 \x01(\tR\x04\x63ol1\x12\x12\n\x04\x63ol2\x18\x03 \x01(\tR\x04\x63ol2\x12\x1b\n\x06method\x18\x04 \x01(\tH\x00R\x06method\x88\x01\x01\x42\t\n\x07_method"\xa4\x01\n\x12StatApproxQuantile\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols\x12$\n\rprobabilities\x18\x03 \x03(\x01R\rprobabilities\x12%\n\x0erelative_error\x18\x04 \x01(\x01R\rrelativeError"}\n\rStatFreqItems\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols\x12\x1d\n\x07support\x18\x03 \x01(\x01H\x00R\x07support\x88\x01\x01\x42\n\n\x08_support"\xb5\x02\n\x0cStatSampleBy\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12+\n\x03\x63ol\x18\x02 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x03\x63ol\x12\x42\n\tfractions\x18\x03 \x03(\x0b\x32$.spark.connect.StatSampleBy.FractionR\tfractions\x12\x17\n\x04seed\x18\x05 \x01(\x03H\x00R\x04seed\x88\x01\x01\x1a\x63\n\x08\x46raction\x12;\n\x07stratum\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x07stratum\x12\x1a\n\x08\x66raction\x18\x02 \x01(\x01R\x08\x66ractionB\x07\n\x05_seed"\x86\x01\n\x06NAFill\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols\x12\x39\n\x06values\x18\x03 \x03(\x0b\x32!.spark.connect.Expression.LiteralR\x06values"\x86\x01\n\x06NADrop\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols\x12\'\n\rmin_non_nulls\x18\x03 \x01(\x05H\x00R\x0bminNonNulls\x88\x01\x01\x42\x10\n\x0e_min_non_nulls"\xa8\x02\n\tNAReplace\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04\x63ols\x18\x02 \x03(\tR\x04\x63ols\x12H\n\x0creplacements\x18\x03 \x03(\x0b\x32$.spark.connect.NAReplace.ReplacementR\x0creplacements\x1a\x8d\x01\n\x0bReplacement\x12>\n\told_value\x18\x01 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x08oldValue\x12>\n\tnew_value\x18\x02 \x01(\x0b\x32!.spark.connect.Expression.LiteralR\x08newValue"X\n\x04ToDF\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63olumn_names\x18\x02 \x03(\tR\x0b\x63olumnNames"\xef\x01\n\x12WithColumnsRenamed\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x65\n\x12rename_columns_map\x18\x02 \x03(\x0b\x32\x37.spark.connect.WithColumnsRenamed.RenameColumnsMapEntryR\x10renameColumnsMap\x1a\x43\n\x15RenameColumnsMapEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"w\n\x0bWithColumns\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x39\n\x07\x61liases\x18\x02 \x03(\x0b\x32\x1f.spark.connect.Expression.AliasR\x07\x61liases"\x86\x01\n\rWithWatermark\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x1d\n\nevent_time\x18\x02 \x01(\tR\teventTime\x12\'\n\x0f\x64\x65lay_threshold\x18\x03 \x01(\tR\x0e\x64\x65layThreshold"\x84\x01\n\x04Hint\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x39\n\nparameters\x18\x03 \x03(\x0b\x32\x19.spark.connect.ExpressionR\nparameters"\xc7\x02\n\x07Unpivot\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12+\n\x03ids\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x03ids\x12:\n\x06values\x18\x03 \x01(\x0b\x32\x1d.spark.connect.Unpivot.ValuesH\x00R\x06values\x88\x01\x01\x12\x30\n\x14variable_column_name\x18\x04 \x01(\tR\x12variableColumnName\x12*\n\x11value_column_name\x18\x05 \x01(\tR\x0fvalueColumnName\x1a;\n\x06Values\x12\x31\n\x06values\x18\x01 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x06valuesB\t\n\x07_values"j\n\x08ToSchema\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12/\n\x06schema\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x06schema"\xcb\x01\n\x17RepartitionByExpression\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x42\n\x0fpartition_exprs\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x0epartitionExprs\x12*\n\x0enum_partitions\x18\x03 \x01(\x05H\x00R\rnumPartitions\x88\x01\x01\x42\x11\n\x0f_num_partitions"\xb5\x01\n\rMapPartitions\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x42\n\x04\x66unc\x18\x02 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionR\x04\x66unc\x12"\n\nis_barrier\x18\x03 \x01(\x08H\x00R\tisBarrier\x88\x01\x01\x42\r\n\x0b_is_barrier"\xfb\x04\n\x08GroupMap\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12L\n\x14grouping_expressions\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13groupingExpressions\x12\x42\n\x04\x66unc\x18\x03 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionR\x04\x66unc\x12J\n\x13sorting_expressions\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x12sortingExpressions\x12<\n\rinitial_input\x18\x05 \x01(\x0b\x32\x17.spark.connect.RelationR\x0cinitialInput\x12[\n\x1cinitial_grouping_expressions\x18\x06 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x1ainitialGroupingExpressions\x12;\n\x18is_map_groups_with_state\x18\x07 \x01(\x08H\x00R\x14isMapGroupsWithState\x88\x01\x01\x12$\n\x0boutput_mode\x18\x08 \x01(\tH\x01R\noutputMode\x88\x01\x01\x12&\n\x0ctimeout_conf\x18\t \x01(\tH\x02R\x0btimeoutConf\x88\x01\x01\x42\x1b\n\x19_is_map_groups_with_stateB\x0e\n\x0c_output_modeB\x0f\n\r_timeout_conf"\x8e\x04\n\nCoGroupMap\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12W\n\x1ainput_grouping_expressions\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x18inputGroupingExpressions\x12-\n\x05other\x18\x03 \x01(\x0b\x32\x17.spark.connect.RelationR\x05other\x12W\n\x1aother_grouping_expressions\x18\x04 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x18otherGroupingExpressions\x12\x42\n\x04\x66unc\x18\x05 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionR\x04\x66unc\x12U\n\x19input_sorting_expressions\x18\x06 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x17inputSortingExpressions\x12U\n\x19other_sorting_expressions\x18\x07 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x17otherSortingExpressions"\xe5\x02\n\x16\x41pplyInPandasWithState\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12L\n\x14grouping_expressions\x18\x02 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x13groupingExpressions\x12\x42\n\x04\x66unc\x18\x03 \x01(\x0b\x32..spark.connect.CommonInlineUserDefinedFunctionR\x04\x66unc\x12#\n\routput_schema\x18\x04 \x01(\tR\x0coutputSchema\x12!\n\x0cstate_schema\x18\x05 \x01(\tR\x0bstateSchema\x12\x1f\n\x0boutput_mode\x18\x06 \x01(\tR\noutputMode\x12!\n\x0ctimeout_conf\x18\x07 \x01(\tR\x0btimeoutConf"\x88\x01\n\x0e\x43ollectMetrics\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x33\n\x07metrics\x18\x03 \x03(\x0b\x32\x19.spark.connect.ExpressionR\x07metrics"\x84\x03\n\x05Parse\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12\x38\n\x06\x66ormat\x18\x02 \x01(\x0e\x32 .spark.connect.Parse.ParseFormatR\x06\x66ormat\x12\x34\n\x06schema\x18\x03 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x00R\x06schema\x88\x01\x01\x12;\n\x07options\x18\x04 \x03(\x0b\x32!.spark.connect.Parse.OptionsEntryR\x07options\x1a:\n\x0cOptionsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01"X\n\x0bParseFormat\x12\x1c\n\x18PARSE_FORMAT_UNSPECIFIED\x10\x00\x12\x14\n\x10PARSE_FORMAT_CSV\x10\x01\x12\x15\n\x11PARSE_FORMAT_JSON\x10\x02\x42\t\n\x07_schemaB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3' ) - -_RELATION = DESCRIPTOR.message_types_by_name["Relation"] -_UNKNOWN = DESCRIPTOR.message_types_by_name["Unknown"] -_RELATIONCOMMON = DESCRIPTOR.message_types_by_name["RelationCommon"] -_SQL = DESCRIPTOR.message_types_by_name["SQL"] -_SQL_ARGSENTRY = _SQL.nested_types_by_name["ArgsEntry"] -_READ = DESCRIPTOR.message_types_by_name["Read"] -_READ_NAMEDTABLE = _READ.nested_types_by_name["NamedTable"] -_READ_NAMEDTABLE_OPTIONSENTRY = _READ_NAMEDTABLE.nested_types_by_name["OptionsEntry"] -_READ_DATASOURCE = _READ.nested_types_by_name["DataSource"] -_READ_DATASOURCE_OPTIONSENTRY = _READ_DATASOURCE.nested_types_by_name["OptionsEntry"] -_PROJECT = DESCRIPTOR.message_types_by_name["Project"] -_FILTER = DESCRIPTOR.message_types_by_name["Filter"] -_JOIN = DESCRIPTOR.message_types_by_name["Join"] -_JOIN_JOINDATATYPE = _JOIN.nested_types_by_name["JoinDataType"] -_SETOPERATION = DESCRIPTOR.message_types_by_name["SetOperation"] -_LIMIT = DESCRIPTOR.message_types_by_name["Limit"] -_OFFSET = DESCRIPTOR.message_types_by_name["Offset"] -_TAIL = DESCRIPTOR.message_types_by_name["Tail"] -_AGGREGATE = DESCRIPTOR.message_types_by_name["Aggregate"] -_AGGREGATE_PIVOT = _AGGREGATE.nested_types_by_name["Pivot"] -_SORT = DESCRIPTOR.message_types_by_name["Sort"] -_DROP = DESCRIPTOR.message_types_by_name["Drop"] -_DEDUPLICATE = DESCRIPTOR.message_types_by_name["Deduplicate"] -_LOCALRELATION = DESCRIPTOR.message_types_by_name["LocalRelation"] -_CACHEDLOCALRELATION = DESCRIPTOR.message_types_by_name["CachedLocalRelation"] -_CACHEDREMOTERELATION = DESCRIPTOR.message_types_by_name["CachedRemoteRelation"] -_SAMPLE = DESCRIPTOR.message_types_by_name["Sample"] -_RANGE = DESCRIPTOR.message_types_by_name["Range"] -_SUBQUERYALIAS = DESCRIPTOR.message_types_by_name["SubqueryAlias"] -_REPARTITION = DESCRIPTOR.message_types_by_name["Repartition"] -_SHOWSTRING = DESCRIPTOR.message_types_by_name["ShowString"] -_HTMLSTRING = DESCRIPTOR.message_types_by_name["HtmlString"] -_STATSUMMARY = DESCRIPTOR.message_types_by_name["StatSummary"] -_STATDESCRIBE = DESCRIPTOR.message_types_by_name["StatDescribe"] -_STATCROSSTAB = DESCRIPTOR.message_types_by_name["StatCrosstab"] -_STATCOV = DESCRIPTOR.message_types_by_name["StatCov"] -_STATCORR = DESCRIPTOR.message_types_by_name["StatCorr"] -_STATAPPROXQUANTILE = DESCRIPTOR.message_types_by_name["StatApproxQuantile"] -_STATFREQITEMS = DESCRIPTOR.message_types_by_name["StatFreqItems"] -_STATSAMPLEBY = DESCRIPTOR.message_types_by_name["StatSampleBy"] -_STATSAMPLEBY_FRACTION = _STATSAMPLEBY.nested_types_by_name["Fraction"] -_NAFILL = DESCRIPTOR.message_types_by_name["NAFill"] -_NADROP = DESCRIPTOR.message_types_by_name["NADrop"] -_NAREPLACE = DESCRIPTOR.message_types_by_name["NAReplace"] -_NAREPLACE_REPLACEMENT = _NAREPLACE.nested_types_by_name["Replacement"] -_TODF = DESCRIPTOR.message_types_by_name["ToDF"] -_WITHCOLUMNSRENAMED = DESCRIPTOR.message_types_by_name["WithColumnsRenamed"] -_WITHCOLUMNSRENAMED_RENAMECOLUMNSMAPENTRY = _WITHCOLUMNSRENAMED.nested_types_by_name[ - "RenameColumnsMapEntry" -] -_WITHCOLUMNS = DESCRIPTOR.message_types_by_name["WithColumns"] -_WITHWATERMARK = DESCRIPTOR.message_types_by_name["WithWatermark"] -_HINT = DESCRIPTOR.message_types_by_name["Hint"] -_UNPIVOT = DESCRIPTOR.message_types_by_name["Unpivot"] -_UNPIVOT_VALUES = _UNPIVOT.nested_types_by_name["Values"] -_TOSCHEMA = DESCRIPTOR.message_types_by_name["ToSchema"] -_REPARTITIONBYEXPRESSION = DESCRIPTOR.message_types_by_name["RepartitionByExpression"] -_MAPPARTITIONS = DESCRIPTOR.message_types_by_name["MapPartitions"] -_GROUPMAP = DESCRIPTOR.message_types_by_name["GroupMap"] -_COGROUPMAP = DESCRIPTOR.message_types_by_name["CoGroupMap"] -_APPLYINPANDASWITHSTATE = DESCRIPTOR.message_types_by_name["ApplyInPandasWithState"] -_COLLECTMETRICS = DESCRIPTOR.message_types_by_name["CollectMetrics"] -_PARSE = DESCRIPTOR.message_types_by_name["Parse"] -_PARSE_OPTIONSENTRY = _PARSE.nested_types_by_name["OptionsEntry"] -_JOIN_JOINTYPE = _JOIN.enum_types_by_name["JoinType"] -_SETOPERATION_SETOPTYPE = _SETOPERATION.enum_types_by_name["SetOpType"] -_AGGREGATE_GROUPTYPE = _AGGREGATE.enum_types_by_name["GroupType"] -_PARSE_PARSEFORMAT = _PARSE.enum_types_by_name["ParseFormat"] -Relation = _reflection.GeneratedProtocolMessageType( - "Relation", - (_message.Message,), - { - "DESCRIPTOR": _RELATION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Relation) - }, -) -_sym_db.RegisterMessage(Relation) - -Unknown = _reflection.GeneratedProtocolMessageType( - "Unknown", - (_message.Message,), - { - "DESCRIPTOR": _UNKNOWN, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Unknown) - }, -) -_sym_db.RegisterMessage(Unknown) - -RelationCommon = _reflection.GeneratedProtocolMessageType( - "RelationCommon", - (_message.Message,), - { - "DESCRIPTOR": _RELATIONCOMMON, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.RelationCommon) - }, -) -_sym_db.RegisterMessage(RelationCommon) - -SQL = _reflection.GeneratedProtocolMessageType( - "SQL", - (_message.Message,), - { - "ArgsEntry": _reflection.GeneratedProtocolMessageType( - "ArgsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SQL_ARGSENTRY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SQL.ArgsEntry) - }, - ), - "DESCRIPTOR": _SQL, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SQL) - }, -) -_sym_db.RegisterMessage(SQL) -_sym_db.RegisterMessage(SQL.ArgsEntry) - -Read = _reflection.GeneratedProtocolMessageType( - "Read", - (_message.Message,), - { - "NamedTable": _reflection.GeneratedProtocolMessageType( - "NamedTable", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _READ_NAMEDTABLE_OPTIONSENTRY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Read.NamedTable.OptionsEntry) - }, - ), - "DESCRIPTOR": _READ_NAMEDTABLE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Read.NamedTable) - }, - ), - "DataSource": _reflection.GeneratedProtocolMessageType( - "DataSource", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _READ_DATASOURCE_OPTIONSENTRY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Read.DataSource.OptionsEntry) - }, - ), - "DESCRIPTOR": _READ_DATASOURCE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Read.DataSource) - }, - ), - "DESCRIPTOR": _READ, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Read) - }, -) -_sym_db.RegisterMessage(Read) -_sym_db.RegisterMessage(Read.NamedTable) -_sym_db.RegisterMessage(Read.NamedTable.OptionsEntry) -_sym_db.RegisterMessage(Read.DataSource) -_sym_db.RegisterMessage(Read.DataSource.OptionsEntry) - -Project = _reflection.GeneratedProtocolMessageType( - "Project", - (_message.Message,), - { - "DESCRIPTOR": _PROJECT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Project) - }, -) -_sym_db.RegisterMessage(Project) - -Filter = _reflection.GeneratedProtocolMessageType( - "Filter", - (_message.Message,), - { - "DESCRIPTOR": _FILTER, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Filter) - }, -) -_sym_db.RegisterMessage(Filter) - -Join = _reflection.GeneratedProtocolMessageType( - "Join", - (_message.Message,), - { - "JoinDataType": _reflection.GeneratedProtocolMessageType( - "JoinDataType", - (_message.Message,), - { - "DESCRIPTOR": _JOIN_JOINDATATYPE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Join.JoinDataType) - }, - ), - "DESCRIPTOR": _JOIN, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Join) - }, -) -_sym_db.RegisterMessage(Join) -_sym_db.RegisterMessage(Join.JoinDataType) - -SetOperation = _reflection.GeneratedProtocolMessageType( - "SetOperation", - (_message.Message,), - { - "DESCRIPTOR": _SETOPERATION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SetOperation) - }, -) -_sym_db.RegisterMessage(SetOperation) - -Limit = _reflection.GeneratedProtocolMessageType( - "Limit", - (_message.Message,), - { - "DESCRIPTOR": _LIMIT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Limit) - }, -) -_sym_db.RegisterMessage(Limit) - -Offset = _reflection.GeneratedProtocolMessageType( - "Offset", - (_message.Message,), - { - "DESCRIPTOR": _OFFSET, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Offset) - }, -) -_sym_db.RegisterMessage(Offset) - -Tail = _reflection.GeneratedProtocolMessageType( - "Tail", - (_message.Message,), - { - "DESCRIPTOR": _TAIL, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Tail) - }, -) -_sym_db.RegisterMessage(Tail) - -Aggregate = _reflection.GeneratedProtocolMessageType( - "Aggregate", - (_message.Message,), - { - "Pivot": _reflection.GeneratedProtocolMessageType( - "Pivot", - (_message.Message,), - { - "DESCRIPTOR": _AGGREGATE_PIVOT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Aggregate.Pivot) - }, - ), - "DESCRIPTOR": _AGGREGATE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Aggregate) - }, -) -_sym_db.RegisterMessage(Aggregate) -_sym_db.RegisterMessage(Aggregate.Pivot) - -Sort = _reflection.GeneratedProtocolMessageType( - "Sort", - (_message.Message,), - { - "DESCRIPTOR": _SORT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Sort) - }, -) -_sym_db.RegisterMessage(Sort) - -Drop = _reflection.GeneratedProtocolMessageType( - "Drop", - (_message.Message,), - { - "DESCRIPTOR": _DROP, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Drop) - }, -) -_sym_db.RegisterMessage(Drop) - -Deduplicate = _reflection.GeneratedProtocolMessageType( - "Deduplicate", - (_message.Message,), - { - "DESCRIPTOR": _DEDUPLICATE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Deduplicate) - }, -) -_sym_db.RegisterMessage(Deduplicate) - -LocalRelation = _reflection.GeneratedProtocolMessageType( - "LocalRelation", - (_message.Message,), - { - "DESCRIPTOR": _LOCALRELATION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.LocalRelation) - }, -) -_sym_db.RegisterMessage(LocalRelation) - -CachedLocalRelation = _reflection.GeneratedProtocolMessageType( - "CachedLocalRelation", - (_message.Message,), - { - "DESCRIPTOR": _CACHEDLOCALRELATION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CachedLocalRelation) - }, -) -_sym_db.RegisterMessage(CachedLocalRelation) - -CachedRemoteRelation = _reflection.GeneratedProtocolMessageType( - "CachedRemoteRelation", - (_message.Message,), - { - "DESCRIPTOR": _CACHEDREMOTERELATION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CachedRemoteRelation) - }, -) -_sym_db.RegisterMessage(CachedRemoteRelation) - -Sample = _reflection.GeneratedProtocolMessageType( - "Sample", - (_message.Message,), - { - "DESCRIPTOR": _SAMPLE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Sample) - }, -) -_sym_db.RegisterMessage(Sample) - -Range = _reflection.GeneratedProtocolMessageType( - "Range", - (_message.Message,), - { - "DESCRIPTOR": _RANGE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Range) - }, -) -_sym_db.RegisterMessage(Range) - -SubqueryAlias = _reflection.GeneratedProtocolMessageType( - "SubqueryAlias", - (_message.Message,), - { - "DESCRIPTOR": _SUBQUERYALIAS, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.SubqueryAlias) - }, -) -_sym_db.RegisterMessage(SubqueryAlias) - -Repartition = _reflection.GeneratedProtocolMessageType( - "Repartition", - (_message.Message,), - { - "DESCRIPTOR": _REPARTITION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Repartition) - }, -) -_sym_db.RegisterMessage(Repartition) - -ShowString = _reflection.GeneratedProtocolMessageType( - "ShowString", - (_message.Message,), - { - "DESCRIPTOR": _SHOWSTRING, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ShowString) - }, -) -_sym_db.RegisterMessage(ShowString) - -HtmlString = _reflection.GeneratedProtocolMessageType( - "HtmlString", - (_message.Message,), - { - "DESCRIPTOR": _HTMLSTRING, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.HtmlString) - }, -) -_sym_db.RegisterMessage(HtmlString) - -StatSummary = _reflection.GeneratedProtocolMessageType( - "StatSummary", - (_message.Message,), - { - "DESCRIPTOR": _STATSUMMARY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatSummary) - }, -) -_sym_db.RegisterMessage(StatSummary) - -StatDescribe = _reflection.GeneratedProtocolMessageType( - "StatDescribe", - (_message.Message,), - { - "DESCRIPTOR": _STATDESCRIBE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatDescribe) - }, -) -_sym_db.RegisterMessage(StatDescribe) - -StatCrosstab = _reflection.GeneratedProtocolMessageType( - "StatCrosstab", - (_message.Message,), - { - "DESCRIPTOR": _STATCROSSTAB, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatCrosstab) - }, -) -_sym_db.RegisterMessage(StatCrosstab) - -StatCov = _reflection.GeneratedProtocolMessageType( - "StatCov", - (_message.Message,), - { - "DESCRIPTOR": _STATCOV, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatCov) - }, -) -_sym_db.RegisterMessage(StatCov) - -StatCorr = _reflection.GeneratedProtocolMessageType( - "StatCorr", - (_message.Message,), - { - "DESCRIPTOR": _STATCORR, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatCorr) - }, -) -_sym_db.RegisterMessage(StatCorr) - -StatApproxQuantile = _reflection.GeneratedProtocolMessageType( - "StatApproxQuantile", - (_message.Message,), - { - "DESCRIPTOR": _STATAPPROXQUANTILE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatApproxQuantile) - }, -) -_sym_db.RegisterMessage(StatApproxQuantile) - -StatFreqItems = _reflection.GeneratedProtocolMessageType( - "StatFreqItems", - (_message.Message,), - { - "DESCRIPTOR": _STATFREQITEMS, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatFreqItems) - }, -) -_sym_db.RegisterMessage(StatFreqItems) - -StatSampleBy = _reflection.GeneratedProtocolMessageType( - "StatSampleBy", - (_message.Message,), - { - "Fraction": _reflection.GeneratedProtocolMessageType( - "Fraction", - (_message.Message,), - { - "DESCRIPTOR": _STATSAMPLEBY_FRACTION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatSampleBy.Fraction) - }, - ), - "DESCRIPTOR": _STATSAMPLEBY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.StatSampleBy) - }, -) -_sym_db.RegisterMessage(StatSampleBy) -_sym_db.RegisterMessage(StatSampleBy.Fraction) - -NAFill = _reflection.GeneratedProtocolMessageType( - "NAFill", - (_message.Message,), - { - "DESCRIPTOR": _NAFILL, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.NAFill) - }, -) -_sym_db.RegisterMessage(NAFill) - -NADrop = _reflection.GeneratedProtocolMessageType( - "NADrop", - (_message.Message,), - { - "DESCRIPTOR": _NADROP, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.NADrop) - }, -) -_sym_db.RegisterMessage(NADrop) - -NAReplace = _reflection.GeneratedProtocolMessageType( - "NAReplace", - (_message.Message,), - { - "Replacement": _reflection.GeneratedProtocolMessageType( - "Replacement", - (_message.Message,), - { - "DESCRIPTOR": _NAREPLACE_REPLACEMENT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.NAReplace.Replacement) - }, - ), - "DESCRIPTOR": _NAREPLACE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.NAReplace) - }, -) -_sym_db.RegisterMessage(NAReplace) -_sym_db.RegisterMessage(NAReplace.Replacement) - -ToDF = _reflection.GeneratedProtocolMessageType( - "ToDF", - (_message.Message,), - { - "DESCRIPTOR": _TODF, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ToDF) - }, -) -_sym_db.RegisterMessage(ToDF) - -WithColumnsRenamed = _reflection.GeneratedProtocolMessageType( - "WithColumnsRenamed", - (_message.Message,), - { - "RenameColumnsMapEntry": _reflection.GeneratedProtocolMessageType( - "RenameColumnsMapEntry", - (_message.Message,), - { - "DESCRIPTOR": _WITHCOLUMNSRENAMED_RENAMECOLUMNSMAPENTRY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WithColumnsRenamed.RenameColumnsMapEntry) - }, - ), - "DESCRIPTOR": _WITHCOLUMNSRENAMED, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WithColumnsRenamed) - }, -) -_sym_db.RegisterMessage(WithColumnsRenamed) -_sym_db.RegisterMessage(WithColumnsRenamed.RenameColumnsMapEntry) - -WithColumns = _reflection.GeneratedProtocolMessageType( - "WithColumns", - (_message.Message,), - { - "DESCRIPTOR": _WITHCOLUMNS, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WithColumns) - }, -) -_sym_db.RegisterMessage(WithColumns) - -WithWatermark = _reflection.GeneratedProtocolMessageType( - "WithWatermark", - (_message.Message,), - { - "DESCRIPTOR": _WITHWATERMARK, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.WithWatermark) - }, -) -_sym_db.RegisterMessage(WithWatermark) - -Hint = _reflection.GeneratedProtocolMessageType( - "Hint", - (_message.Message,), - { - "DESCRIPTOR": _HINT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Hint) - }, -) -_sym_db.RegisterMessage(Hint) - -Unpivot = _reflection.GeneratedProtocolMessageType( - "Unpivot", - (_message.Message,), - { - "Values": _reflection.GeneratedProtocolMessageType( - "Values", - (_message.Message,), - { - "DESCRIPTOR": _UNPIVOT_VALUES, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Unpivot.Values) - }, - ), - "DESCRIPTOR": _UNPIVOT, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Unpivot) - }, -) -_sym_db.RegisterMessage(Unpivot) -_sym_db.RegisterMessage(Unpivot.Values) - -ToSchema = _reflection.GeneratedProtocolMessageType( - "ToSchema", - (_message.Message,), - { - "DESCRIPTOR": _TOSCHEMA, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ToSchema) - }, -) -_sym_db.RegisterMessage(ToSchema) - -RepartitionByExpression = _reflection.GeneratedProtocolMessageType( - "RepartitionByExpression", - (_message.Message,), - { - "DESCRIPTOR": _REPARTITIONBYEXPRESSION, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.RepartitionByExpression) - }, -) -_sym_db.RegisterMessage(RepartitionByExpression) - -MapPartitions = _reflection.GeneratedProtocolMessageType( - "MapPartitions", - (_message.Message,), - { - "DESCRIPTOR": _MAPPARTITIONS, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.MapPartitions) - }, -) -_sym_db.RegisterMessage(MapPartitions) - -GroupMap = _reflection.GeneratedProtocolMessageType( - "GroupMap", - (_message.Message,), - { - "DESCRIPTOR": _GROUPMAP, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.GroupMap) - }, -) -_sym_db.RegisterMessage(GroupMap) - -CoGroupMap = _reflection.GeneratedProtocolMessageType( - "CoGroupMap", - (_message.Message,), - { - "DESCRIPTOR": _COGROUPMAP, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CoGroupMap) - }, -) -_sym_db.RegisterMessage(CoGroupMap) - -ApplyInPandasWithState = _reflection.GeneratedProtocolMessageType( - "ApplyInPandasWithState", - (_message.Message,), - { - "DESCRIPTOR": _APPLYINPANDASWITHSTATE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.ApplyInPandasWithState) - }, -) -_sym_db.RegisterMessage(ApplyInPandasWithState) - -CollectMetrics = _reflection.GeneratedProtocolMessageType( - "CollectMetrics", - (_message.Message,), - { - "DESCRIPTOR": _COLLECTMETRICS, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.CollectMetrics) - }, -) -_sym_db.RegisterMessage(CollectMetrics) - -Parse = _reflection.GeneratedProtocolMessageType( - "Parse", - (_message.Message,), - { - "OptionsEntry": _reflection.GeneratedProtocolMessageType( - "OptionsEntry", - (_message.Message,), - { - "DESCRIPTOR": _PARSE_OPTIONSENTRY, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Parse.OptionsEntry) - }, - ), - "DESCRIPTOR": _PARSE, - "__module__": "spark.connect.relations_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.Parse) - }, -) -_sym_db.RegisterMessage(Parse) -_sym_db.RegisterMessage(Parse.OptionsEntry) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.relations_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None diff --git a/python/pyspark/sql/connect/proto/types_pb2.py b/python/pyspark/sql/connect/proto/types_pb2.py index d844d2da6fc7f..3619703348015 100644 --- a/python/pyspark/sql/connect/proto/types_pb2.py +++ b/python/pyspark/sql/connect/proto/types_pb2.py @@ -18,10 +18,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: spark/connect/types.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -33,294 +32,8 @@ b"\n\x19spark/connect/types.proto\x12\rspark.connect\"\xc7 \n\x08\x44\x61taType\x12\x32\n\x04null\x18\x01 \x01(\x0b\x32\x1c.spark.connect.DataType.NULLH\x00R\x04null\x12\x38\n\x06\x62inary\x18\x02 \x01(\x0b\x32\x1e.spark.connect.DataType.BinaryH\x00R\x06\x62inary\x12;\n\x07\x62oolean\x18\x03 \x01(\x0b\x32\x1f.spark.connect.DataType.BooleanH\x00R\x07\x62oolean\x12\x32\n\x04\x62yte\x18\x04 \x01(\x0b\x32\x1c.spark.connect.DataType.ByteH\x00R\x04\x62yte\x12\x35\n\x05short\x18\x05 \x01(\x0b\x32\x1d.spark.connect.DataType.ShortH\x00R\x05short\x12;\n\x07integer\x18\x06 \x01(\x0b\x32\x1f.spark.connect.DataType.IntegerH\x00R\x07integer\x12\x32\n\x04long\x18\x07 \x01(\x0b\x32\x1c.spark.connect.DataType.LongH\x00R\x04long\x12\x35\n\x05\x66loat\x18\x08 \x01(\x0b\x32\x1d.spark.connect.DataType.FloatH\x00R\x05\x66loat\x12\x38\n\x06\x64ouble\x18\t \x01(\x0b\x32\x1e.spark.connect.DataType.DoubleH\x00R\x06\x64ouble\x12;\n\x07\x64\x65\x63imal\x18\n \x01(\x0b\x32\x1f.spark.connect.DataType.DecimalH\x00R\x07\x64\x65\x63imal\x12\x38\n\x06string\x18\x0b \x01(\x0b\x32\x1e.spark.connect.DataType.StringH\x00R\x06string\x12\x32\n\x04\x63har\x18\x0c \x01(\x0b\x32\x1c.spark.connect.DataType.CharH\x00R\x04\x63har\x12<\n\x08var_char\x18\r \x01(\x0b\x32\x1f.spark.connect.DataType.VarCharH\x00R\x07varChar\x12\x32\n\x04\x64\x61te\x18\x0e \x01(\x0b\x32\x1c.spark.connect.DataType.DateH\x00R\x04\x64\x61te\x12\x41\n\ttimestamp\x18\x0f \x01(\x0b\x32!.spark.connect.DataType.TimestampH\x00R\ttimestamp\x12K\n\rtimestamp_ntz\x18\x10 \x01(\x0b\x32$.spark.connect.DataType.TimestampNTZH\x00R\x0ctimestampNtz\x12W\n\x11\x63\x61lendar_interval\x18\x11 \x01(\x0b\x32(.spark.connect.DataType.CalendarIntervalH\x00R\x10\x63\x61lendarInterval\x12[\n\x13year_month_interval\x18\x12 \x01(\x0b\x32).spark.connect.DataType.YearMonthIntervalH\x00R\x11yearMonthInterval\x12U\n\x11\x64\x61y_time_interval\x18\x13 \x01(\x0b\x32'.spark.connect.DataType.DayTimeIntervalH\x00R\x0f\x64\x61yTimeInterval\x12\x35\n\x05\x61rray\x18\x14 \x01(\x0b\x32\x1d.spark.connect.DataType.ArrayH\x00R\x05\x61rray\x12\x38\n\x06struct\x18\x15 \x01(\x0b\x32\x1e.spark.connect.DataType.StructH\x00R\x06struct\x12/\n\x03map\x18\x16 \x01(\x0b\x32\x1b.spark.connect.DataType.MapH\x00R\x03map\x12/\n\x03udt\x18\x17 \x01(\x0b\x32\x1b.spark.connect.DataType.UDTH\x00R\x03udt\x12>\n\x08unparsed\x18\x18 \x01(\x0b\x32 .spark.connect.DataType.UnparsedH\x00R\x08unparsed\x1a\x43\n\x07\x42oolean\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a@\n\x04\x42yte\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x41\n\x05Short\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x43\n\x07Integer\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a@\n\x04Long\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x41\n\x05\x46loat\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x42\n\x06\x44ouble\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x42\n\x06String\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x42\n\x06\x42inary\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a@\n\x04NULL\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\x45\n\tTimestamp\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a@\n\x04\x44\x61te\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1aH\n\x0cTimestampNTZ\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1aL\n\x10\x43\x61lendarInterval\x12\x38\n\x18type_variation_reference\x18\x01 \x01(\rR\x16typeVariationReference\x1a\xb3\x01\n\x11YearMonthInterval\x12$\n\x0bstart_field\x18\x01 \x01(\x05H\x00R\nstartField\x88\x01\x01\x12 \n\tend_field\x18\x02 \x01(\x05H\x01R\x08\x65ndField\x88\x01\x01\x12\x38\n\x18type_variation_reference\x18\x03 \x01(\rR\x16typeVariationReferenceB\x0e\n\x0c_start_fieldB\x0c\n\n_end_field\x1a\xb1\x01\n\x0f\x44\x61yTimeInterval\x12$\n\x0bstart_field\x18\x01 \x01(\x05H\x00R\nstartField\x88\x01\x01\x12 \n\tend_field\x18\x02 \x01(\x05H\x01R\x08\x65ndField\x88\x01\x01\x12\x38\n\x18type_variation_reference\x18\x03 \x01(\rR\x16typeVariationReferenceB\x0e\n\x0c_start_fieldB\x0c\n\n_end_field\x1aX\n\x04\x43har\x12\x16\n\x06length\x18\x01 \x01(\x05R\x06length\x12\x38\n\x18type_variation_reference\x18\x02 \x01(\rR\x16typeVariationReference\x1a[\n\x07VarChar\x12\x16\n\x06length\x18\x01 \x01(\x05R\x06length\x12\x38\n\x18type_variation_reference\x18\x02 \x01(\rR\x16typeVariationReference\x1a\x99\x01\n\x07\x44\x65\x63imal\x12\x19\n\x05scale\x18\x01 \x01(\x05H\x00R\x05scale\x88\x01\x01\x12!\n\tprecision\x18\x02 \x01(\x05H\x01R\tprecision\x88\x01\x01\x12\x38\n\x18type_variation_reference\x18\x03 \x01(\rR\x16typeVariationReferenceB\x08\n\x06_scaleB\x0c\n\n_precision\x1a\xa1\x01\n\x0bStructField\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x34\n\tdata_type\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x08\x64\x61taType\x12\x1a\n\x08nullable\x18\x03 \x01(\x08R\x08nullable\x12\x1f\n\x08metadata\x18\x04 \x01(\tH\x00R\x08metadata\x88\x01\x01\x42\x0b\n\t_metadata\x1a\x7f\n\x06Struct\x12;\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.spark.connect.DataType.StructFieldR\x06\x66ields\x12\x38\n\x18type_variation_reference\x18\x02 \x01(\rR\x16typeVariationReference\x1a\xa2\x01\n\x05\x41rray\x12:\n\x0c\x65lement_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x0b\x65lementType\x12#\n\rcontains_null\x18\x02 \x01(\x08R\x0c\x63ontainsNull\x12\x38\n\x18type_variation_reference\x18\x03 \x01(\rR\x16typeVariationReference\x1a\xdb\x01\n\x03Map\x12\x32\n\x08key_type\x18\x01 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x07keyType\x12\x36\n\nvalue_type\x18\x02 \x01(\x0b\x32\x17.spark.connect.DataTypeR\tvalueType\x12.\n\x13value_contains_null\x18\x03 \x01(\x08R\x11valueContainsNull\x12\x38\n\x18type_variation_reference\x18\x04 \x01(\rR\x16typeVariationReference\x1a\x8f\x02\n\x03UDT\x12\x12\n\x04type\x18\x01 \x01(\tR\x04type\x12 \n\tjvm_class\x18\x02 \x01(\tH\x00R\x08jvmClass\x88\x01\x01\x12&\n\x0cpython_class\x18\x03 \x01(\tH\x01R\x0bpythonClass\x88\x01\x01\x12;\n\x17serialized_python_class\x18\x04 \x01(\tH\x02R\x15serializedPythonClass\x88\x01\x01\x12\x32\n\x08sql_type\x18\x05 \x01(\x0b\x32\x17.spark.connect.DataTypeR\x07sqlTypeB\x0c\n\n_jvm_classB\x0f\n\r_python_classB\x1a\n\x18_serialized_python_class\x1a\x34\n\x08Unparsed\x12(\n\x10\x64\x61ta_type_string\x18\x01 \x01(\tR\x0e\x64\x61taTypeStringB\x06\n\x04kindB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3" ) - -_DATATYPE = DESCRIPTOR.message_types_by_name["DataType"] -_DATATYPE_BOOLEAN = _DATATYPE.nested_types_by_name["Boolean"] -_DATATYPE_BYTE = _DATATYPE.nested_types_by_name["Byte"] -_DATATYPE_SHORT = _DATATYPE.nested_types_by_name["Short"] -_DATATYPE_INTEGER = _DATATYPE.nested_types_by_name["Integer"] -_DATATYPE_LONG = _DATATYPE.nested_types_by_name["Long"] -_DATATYPE_FLOAT = _DATATYPE.nested_types_by_name["Float"] -_DATATYPE_DOUBLE = _DATATYPE.nested_types_by_name["Double"] -_DATATYPE_STRING = _DATATYPE.nested_types_by_name["String"] -_DATATYPE_BINARY = _DATATYPE.nested_types_by_name["Binary"] -_DATATYPE_NULL = _DATATYPE.nested_types_by_name["NULL"] -_DATATYPE_TIMESTAMP = _DATATYPE.nested_types_by_name["Timestamp"] -_DATATYPE_DATE = _DATATYPE.nested_types_by_name["Date"] -_DATATYPE_TIMESTAMPNTZ = _DATATYPE.nested_types_by_name["TimestampNTZ"] -_DATATYPE_CALENDARINTERVAL = _DATATYPE.nested_types_by_name["CalendarInterval"] -_DATATYPE_YEARMONTHINTERVAL = _DATATYPE.nested_types_by_name["YearMonthInterval"] -_DATATYPE_DAYTIMEINTERVAL = _DATATYPE.nested_types_by_name["DayTimeInterval"] -_DATATYPE_CHAR = _DATATYPE.nested_types_by_name["Char"] -_DATATYPE_VARCHAR = _DATATYPE.nested_types_by_name["VarChar"] -_DATATYPE_DECIMAL = _DATATYPE.nested_types_by_name["Decimal"] -_DATATYPE_STRUCTFIELD = _DATATYPE.nested_types_by_name["StructField"] -_DATATYPE_STRUCT = _DATATYPE.nested_types_by_name["Struct"] -_DATATYPE_ARRAY = _DATATYPE.nested_types_by_name["Array"] -_DATATYPE_MAP = _DATATYPE.nested_types_by_name["Map"] -_DATATYPE_UDT = _DATATYPE.nested_types_by_name["UDT"] -_DATATYPE_UNPARSED = _DATATYPE.nested_types_by_name["Unparsed"] -DataType = _reflection.GeneratedProtocolMessageType( - "DataType", - (_message.Message,), - { - "Boolean": _reflection.GeneratedProtocolMessageType( - "Boolean", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_BOOLEAN, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Boolean) - }, - ), - "Byte": _reflection.GeneratedProtocolMessageType( - "Byte", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_BYTE, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Byte) - }, - ), - "Short": _reflection.GeneratedProtocolMessageType( - "Short", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_SHORT, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Short) - }, - ), - "Integer": _reflection.GeneratedProtocolMessageType( - "Integer", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_INTEGER, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Integer) - }, - ), - "Long": _reflection.GeneratedProtocolMessageType( - "Long", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_LONG, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Long) - }, - ), - "Float": _reflection.GeneratedProtocolMessageType( - "Float", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_FLOAT, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Float) - }, - ), - "Double": _reflection.GeneratedProtocolMessageType( - "Double", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_DOUBLE, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Double) - }, - ), - "String": _reflection.GeneratedProtocolMessageType( - "String", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_STRING, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.String) - }, - ), - "Binary": _reflection.GeneratedProtocolMessageType( - "Binary", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_BINARY, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Binary) - }, - ), - "NULL": _reflection.GeneratedProtocolMessageType( - "NULL", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_NULL, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.NULL) - }, - ), - "Timestamp": _reflection.GeneratedProtocolMessageType( - "Timestamp", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_TIMESTAMP, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Timestamp) - }, - ), - "Date": _reflection.GeneratedProtocolMessageType( - "Date", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_DATE, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Date) - }, - ), - "TimestampNTZ": _reflection.GeneratedProtocolMessageType( - "TimestampNTZ", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_TIMESTAMPNTZ, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.TimestampNTZ) - }, - ), - "CalendarInterval": _reflection.GeneratedProtocolMessageType( - "CalendarInterval", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_CALENDARINTERVAL, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.CalendarInterval) - }, - ), - "YearMonthInterval": _reflection.GeneratedProtocolMessageType( - "YearMonthInterval", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_YEARMONTHINTERVAL, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.YearMonthInterval) - }, - ), - "DayTimeInterval": _reflection.GeneratedProtocolMessageType( - "DayTimeInterval", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_DAYTIMEINTERVAL, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.DayTimeInterval) - }, - ), - "Char": _reflection.GeneratedProtocolMessageType( - "Char", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_CHAR, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Char) - }, - ), - "VarChar": _reflection.GeneratedProtocolMessageType( - "VarChar", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_VARCHAR, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.VarChar) - }, - ), - "Decimal": _reflection.GeneratedProtocolMessageType( - "Decimal", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_DECIMAL, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Decimal) - }, - ), - "StructField": _reflection.GeneratedProtocolMessageType( - "StructField", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_STRUCTFIELD, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.StructField) - }, - ), - "Struct": _reflection.GeneratedProtocolMessageType( - "Struct", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_STRUCT, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Struct) - }, - ), - "Array": _reflection.GeneratedProtocolMessageType( - "Array", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_ARRAY, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Array) - }, - ), - "Map": _reflection.GeneratedProtocolMessageType( - "Map", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_MAP, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Map) - }, - ), - "UDT": _reflection.GeneratedProtocolMessageType( - "UDT", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_UDT, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.UDT) - }, - ), - "Unparsed": _reflection.GeneratedProtocolMessageType( - "Unparsed", - (_message.Message,), - { - "DESCRIPTOR": _DATATYPE_UNPARSED, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType.Unparsed) - }, - ), - "DESCRIPTOR": _DATATYPE, - "__module__": "spark.connect.types_pb2" - # @@protoc_insertion_point(class_scope:spark.connect.DataType) - }, -) -_sym_db.RegisterMessage(DataType) -_sym_db.RegisterMessage(DataType.Boolean) -_sym_db.RegisterMessage(DataType.Byte) -_sym_db.RegisterMessage(DataType.Short) -_sym_db.RegisterMessage(DataType.Integer) -_sym_db.RegisterMessage(DataType.Long) -_sym_db.RegisterMessage(DataType.Float) -_sym_db.RegisterMessage(DataType.Double) -_sym_db.RegisterMessage(DataType.String) -_sym_db.RegisterMessage(DataType.Binary) -_sym_db.RegisterMessage(DataType.NULL) -_sym_db.RegisterMessage(DataType.Timestamp) -_sym_db.RegisterMessage(DataType.Date) -_sym_db.RegisterMessage(DataType.TimestampNTZ) -_sym_db.RegisterMessage(DataType.CalendarInterval) -_sym_db.RegisterMessage(DataType.YearMonthInterval) -_sym_db.RegisterMessage(DataType.DayTimeInterval) -_sym_db.RegisterMessage(DataType.Char) -_sym_db.RegisterMessage(DataType.VarChar) -_sym_db.RegisterMessage(DataType.Decimal) -_sym_db.RegisterMessage(DataType.StructField) -_sym_db.RegisterMessage(DataType.Struct) -_sym_db.RegisterMessage(DataType.Array) -_sym_db.RegisterMessage(DataType.Map) -_sym_db.RegisterMessage(DataType.UDT) -_sym_db.RegisterMessage(DataType.Unparsed) - +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.types_pb2", globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None