Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-44370][CONNECT] Migrate Buf remote generation alpha to remote plugins #41933

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions connector/connect/common/src/main/buf.gen.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,18 @@
#
version: v1
plugins:
- remote: buf.build/protocolbuffers/plugins/cpp:v3.20.0-1
- plugin: buf.build/protocolbuffers/cpp:v21.7
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure the version should be updated to this, but this (and v3.14.0) is the most older version maintained by the Buf team.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe as follow:

  • plugin: buf.build/grpc/cpp:v1.56.0
    out: gen/proto/cpp
  • plugin: buf.build/protoco
    out: gen/proto/cpp

Its writing is consistent with the original plugin's Ruby.

out: gen/proto/cpp
- remote: buf.build/protocolbuffers/plugins/csharp:v3.20.0-1
- plugin: buf.build/protocolbuffers/csharp:v21.7
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

out: gen/proto/csharp
- remote: buf.build/protocolbuffers/plugins/java:v3.20.0-1
- plugin: buf.build/protocolbuffers/java:v21.7
out: gen/proto/java
- plugin: buf.build/grpc/ruby:v1.56.0
out: gen/proto/ruby
- remote: buf.build/protocolbuffers/plugins/ruby:v21.2.0-1
- plugin: buf.build/protocolbuffers/ruby:v21.7
out: gen/proto/ruby
# Building the Python build and building the mypy interfaces.
- remote: buf.build/protocolbuffers/plugins/python:v3.19.3-1
- plugin: buf.build/protocolbuffers/python:v21.7
Hisoka-X marked this conversation as resolved.
Show resolved Hide resolved
out: gen/proto/python
- plugin: buf.build/grpc/python:v1.56.0
out: gen/proto/python
Expand Down
748 changes: 3 additions & 745 deletions python/pyspark/sql/connect/proto/base_pb2.py

Large diffs are not rendered by default.

352 changes: 3 additions & 349 deletions python/pyspark/sql/connect/proto/catalog_pb2.py

Large diffs are not rendered by default.

432 changes: 3 additions & 429 deletions python/pyspark/sql/connect/proto/commands_pb2.py

Large diffs are not rendered by default.

30 changes: 3 additions & 27 deletions python/pyspark/sql/connect/proto/common_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spark/connect/common.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)
Expand All @@ -33,31 +32,8 @@
b'\n\x1aspark/connect/common.proto\x12\rspark.connect"\xb0\x01\n\x0cStorageLevel\x12\x19\n\x08use_disk\x18\x01 \x01(\x08R\x07useDisk\x12\x1d\n\nuse_memory\x18\x02 \x01(\x08R\tuseMemory\x12 \n\x0cuse_off_heap\x18\x03 \x01(\x08R\nuseOffHeap\x12"\n\x0c\x64\x65serialized\x18\x04 \x01(\x08R\x0c\x64\x65serialized\x12 \n\x0breplication\x18\x05 \x01(\x05R\x0breplication"G\n\x13ResourceInformation\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12\x1c\n\taddresses\x18\x02 \x03(\tR\taddressesB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)


_STORAGELEVEL = DESCRIPTOR.message_types_by_name["StorageLevel"]
_RESOURCEINFORMATION = DESCRIPTOR.message_types_by_name["ResourceInformation"]
StorageLevel = _reflection.GeneratedProtocolMessageType(
"StorageLevel",
(_message.Message,),
{
"DESCRIPTOR": _STORAGELEVEL,
"__module__": "spark.connect.common_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.StorageLevel)
},
)
_sym_db.RegisterMessage(StorageLevel)

ResourceInformation = _reflection.GeneratedProtocolMessageType(
"ResourceInformation",
(_message.Message,),
{
"DESCRIPTOR": _RESOURCEINFORMATION,
"__module__": "spark.connect.common_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ResourceInformation)
},
)
_sym_db.RegisterMessage(ResourceInformation)

_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.common_pb2", globals())
if _descriptor._USE_C_DESCRIPTORS == False:

DESCRIPTOR._options = None
Expand Down
42 changes: 3 additions & 39 deletions python/pyspark/sql/connect/proto/example_plugins_pb2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: spark/connect/example_plugins.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database

# @@protoc_insertion_point(imports)
Expand All @@ -37,43 +36,8 @@
b'\n#spark/connect/example_plugins.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x1fspark/connect/expressions.proto"i\n\x15\x45xamplePluginRelation\x12-\n\x05input\x18\x01 \x01(\x0b\x32\x17.spark.connect.RelationR\x05input\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"m\n\x17\x45xamplePluginExpression\x12/\n\x05\x63hild\x18\x01 \x01(\x0b\x32\x19.spark.connect.ExpressionR\x05\x63hild\x12!\n\x0c\x63ustom_field\x18\x02 \x01(\tR\x0b\x63ustomField"9\n\x14\x45xamplePluginCommand\x12!\n\x0c\x63ustom_field\x18\x01 \x01(\tR\x0b\x63ustomFieldB6\n\x1eorg.apache.spark.connect.protoP\x01Z\x12internal/generatedb\x06proto3'
)


_EXAMPLEPLUGINRELATION = DESCRIPTOR.message_types_by_name["ExamplePluginRelation"]
_EXAMPLEPLUGINEXPRESSION = DESCRIPTOR.message_types_by_name["ExamplePluginExpression"]
_EXAMPLEPLUGINCOMMAND = DESCRIPTOR.message_types_by_name["ExamplePluginCommand"]
ExamplePluginRelation = _reflection.GeneratedProtocolMessageType(
"ExamplePluginRelation",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINRELATION,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginRelation)
},
)
_sym_db.RegisterMessage(ExamplePluginRelation)

ExamplePluginExpression = _reflection.GeneratedProtocolMessageType(
"ExamplePluginExpression",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINEXPRESSION,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginExpression)
},
)
_sym_db.RegisterMessage(ExamplePluginExpression)

ExamplePluginCommand = _reflection.GeneratedProtocolMessageType(
"ExamplePluginCommand",
(_message.Message,),
{
"DESCRIPTOR": _EXAMPLEPLUGINCOMMAND,
"__module__": "spark.connect.example_plugins_pb2"
# @@protoc_insertion_point(class_scope:spark.connect.ExamplePluginCommand)
},
)
_sym_db.RegisterMessage(ExamplePluginCommand)

_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "spark.connect.example_plugins_pb2", globals())
if _descriptor._USE_C_DESCRIPTORS == False:

DESCRIPTOR._options = None
Expand Down
Loading