This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c09c7793376 [SPARK-40938][CONNECT] Support Alias for every type of 
Relation
c09c7793376 is described below

commit c09c77933764053b15dfd5a9c2f8826dfb9cd6e7
Author: Rui Wang <rui.w...@databricks.com>
AuthorDate: Tue Nov 1 12:41:37 2022 +0800

    [SPARK-40938][CONNECT] Support Alias for every type of Relation
    
    ### What changes were proposed in this pull request?
    
    In the past, Connect server can check `alias` for `Read` and `Project`. 
However for Spark DataFrame, every DataFrame can be chained with `as(alias: 
String)` thus every Relation/LogicalPlan can have an `alias`. This PR refactors 
to make this work.
    
    ### Why are the changes needed?
    
    Improve API coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    UT
    
    Closes #38415 from amaliujia/every_relation_has_alias.
    
    Authored-by: Rui Wang <rui.w...@databricks.com>
    Signed-off-by: Wenchen Fan <wenc...@databricks.com>
---
 .../main/protobuf/spark/connect/relations.proto    |  13 ++-
 .../org/apache/spark/sql/connect/dsl/package.scala |   2 +-
 .../sql/connect/planner/SparkConnectPlanner.scala  |  52 ++++-----
 python/pyspark/sql/connect/dataframe.py            |   2 +-
 python/pyspark/sql/connect/plan.py                 |  36 +++++--
 python/pyspark/sql/connect/proto/relations_pb2.py  | 118 +++++++++++----------
 python/pyspark/sql/connect/proto/relations_pb2.pyi |  53 ++++++++-
 .../sql/tests/connect/test_connect_plan_only.py    |   2 +-
 8 files changed, 173 insertions(+), 105 deletions(-)

diff --git a/connector/connect/src/main/protobuf/spark/connect/relations.proto 
b/connector/connect/src/main/protobuf/spark/connect/relations.proto
index bf4fb1f031a..e519a564d5c 100644
--- a/connector/connect/src/main/protobuf/spark/connect/relations.proto
+++ b/connector/connect/src/main/protobuf/spark/connect/relations.proto
@@ -45,6 +45,7 @@ message Relation {
     Offset offset = 13;
     Deduplicate deduplicate = 14;
     Range range = 15;
+    SubqueryAlias subquery_alias = 16;
 
     Unknown unknown = 999;
   }
@@ -56,7 +57,6 @@ message Unknown {}
 // Common metadata of all relations.
 message RelationCommon {
   string source_info = 1;
-  string alias = 2;
 }
 
 // Relation that uses a SQL query to generate the output.
@@ -223,6 +223,7 @@ message Sample {
 message Range {
   // Optional. Default value = 0
   int32 start = 1;
+  // Required.
   int32 end = 2;
   // Optional. Default value = 1
   Step step = 3;
@@ -238,3 +239,13 @@ message Range {
     int32 num_partitions = 1;
   }
 }
+
+// Relation alias.
+message SubqueryAlias {
+  // Required. The input relation.
+  Relation input = 1;
+  // Required. The alias.
+  string alias = 2;
+  // Optional. Qualifier of the alias.
+  repeated string qualifier = 3;
+}
diff --git 
a/connector/connect/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
 
b/connector/connect/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
index 4d3df49dc72..ae884864006 100644
--- 
a/connector/connect/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
+++ 
b/connector/connect/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
@@ -308,7 +308,7 @@ package object dsl {
       def as(alias: String): Relation = {
         Relation
           .newBuilder(logicalPlan)
-          .setCommon(RelationCommon.newBuilder().setAlias(alias))
+          
.setSubqueryAlias(SubqueryAlias.newBuilder().setAlias(alias).setInput(logicalPlan))
           .build()
       }
 
diff --git 
a/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
 
b/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
index 481dbff10ee..cb04d6eaf29 100644
--- 
a/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
+++ 
b/connector/connect/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
@@ -22,6 +22,7 @@ import scala.collection.JavaConverters._
 
 import org.apache.spark.connect.proto
 import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.catalyst.AliasIdentifier
 import org.apache.spark.sql.catalyst.analysis.{UnresolvedAlias, 
UnresolvedAttribute, UnresolvedFunction, UnresolvedRelation, UnresolvedStar}
 import org.apache.spark.sql.catalyst.expressions
 import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, 
AttributeReference, Expression}
@@ -54,8 +55,8 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
     }
 
     rel.getRelTypeCase match {
-      case proto.Relation.RelTypeCase.READ => transformReadRel(rel.getRead, 
common)
-      case proto.Relation.RelTypeCase.PROJECT => 
transformProject(rel.getProject, common)
+      case proto.Relation.RelTypeCase.READ => transformReadRel(rel.getRead)
+      case proto.Relation.RelTypeCase.PROJECT => 
transformProject(rel.getProject)
       case proto.Relation.RelTypeCase.FILTER => transformFilter(rel.getFilter)
       case proto.Relation.RelTypeCase.LIMIT => transformLimit(rel.getLimit)
       case proto.Relation.RelTypeCase.OFFSET => transformOffset(rel.getOffset)
@@ -66,9 +67,11 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
       case proto.Relation.RelTypeCase.AGGREGATE => 
transformAggregate(rel.getAggregate)
       case proto.Relation.RelTypeCase.SQL => transformSql(rel.getSql)
       case proto.Relation.RelTypeCase.LOCAL_RELATION =>
-        transformLocalRelation(rel.getLocalRelation, common)
+        transformLocalRelation(rel.getLocalRelation)
       case proto.Relation.RelTypeCase.SAMPLE => transformSample(rel.getSample)
       case proto.Relation.RelTypeCase.RANGE => transformRange(rel.getRange)
+      case proto.Relation.RelTypeCase.SUBQUERY_ALIAS =>
+        transformSubqueryAlias(rel.getSubqueryAlias)
       case proto.Relation.RelTypeCase.RELTYPE_NOT_SET =>
         throw new IndexOutOfBoundsException("Expected Relation to be set, but 
is empty.")
       case _ => throw InvalidPlanInput(s"${rel.getUnknown} not supported.")
@@ -79,6 +82,16 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
     session.sessionState.sqlParser.parsePlan(sql.getQuery)
   }
 
+  private def transformSubqueryAlias(alias: proto.SubqueryAlias): LogicalPlan 
= {
+    val aliasIdentifier =
+      if (alias.getQualifierCount > 0) {
+        AliasIdentifier.apply(alias.getAlias, 
alias.getQualifierList.asScala.toSeq)
+      } else {
+        AliasIdentifier.apply(alias.getAlias)
+      }
+    SubqueryAlias(aliasIdentifier, transformRelation(alias.getInput))
+  }
+
   /**
    * All fields of [[proto.Sample]] are optional. However, given those are 
proto primitive types,
    * we cannot differentiate if the field is not or set when the field's value 
equals to the type
@@ -141,35 +154,21 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
     }
   }
 
-  private def transformLocalRelation(
-      rel: proto.LocalRelation,
-      common: Option[proto.RelationCommon]): LogicalPlan = {
+  private def transformLocalRelation(rel: proto.LocalRelation): LogicalPlan = {
     val attributes = 
rel.getAttributesList.asScala.map(transformAttribute(_)).toSeq
-    val relation = new 
org.apache.spark.sql.catalyst.plans.logical.LocalRelation(attributes)
-    if (common.nonEmpty && common.get.getAlias.nonEmpty) {
-      logical.SubqueryAlias(identifier = common.get.getAlias, child = relation)
-    } else {
-      relation
-    }
+    new org.apache.spark.sql.catalyst.plans.logical.LocalRelation(attributes)
   }
 
   private def transformAttribute(exp: proto.Expression.QualifiedAttribute): 
Attribute = {
     AttributeReference(exp.getName, 
DataTypeProtoConverter.toCatalystType(exp.getType))()
   }
 
-  private def transformReadRel(
-      rel: proto.Read,
-      common: Option[proto.RelationCommon]): LogicalPlan = {
+  private def transformReadRel(rel: proto.Read): LogicalPlan = {
     val baseRelation = rel.getReadTypeCase match {
       case proto.Read.ReadTypeCase.NAMED_TABLE =>
         val multipartIdentifier =
           
CatalystSqlParser.parseMultipartIdentifier(rel.getNamedTable.getUnparsedIdentifier)
-        val child = UnresolvedRelation(multipartIdentifier)
-        if (common.nonEmpty && common.get.getAlias.nonEmpty) {
-          SubqueryAlias(identifier = common.get.getAlias, child = child)
-        } else {
-          child
-        }
+        UnresolvedRelation(multipartIdentifier)
       case proto.Read.ReadTypeCase.DATA_SOURCE =>
         if (rel.getDataSource.getFormat == "") {
           throw InvalidPlanInput("DataSource requires a format")
@@ -193,9 +192,7 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
     logical.Filter(condition = transformExpression(rel.getCondition), child = 
baseRel)
   }
 
-  private def transformProject(
-      rel: proto.Project,
-      common: Option[proto.RelationCommon]): LogicalPlan = {
+  private def transformProject(rel: proto.Project): LogicalPlan = {
     val baseRel = transformRelation(rel.getInput)
     // TODO: support the target field for *.
     val projection =
@@ -204,12 +201,7 @@ class SparkConnectPlanner(plan: proto.Relation, session: 
SparkSession) {
       } else {
         
rel.getExpressionsList.asScala.map(transformExpression).map(UnresolvedAlias(_))
       }
-    val project = logical.Project(projectList = projection.toSeq, child = 
baseRel)
-    if (common.nonEmpty && common.get.getAlias.nonEmpty) {
-      logical.SubqueryAlias(identifier = common.get.getAlias, child = project)
-    } else {
-      project
-    }
+    logical.Project(projectList = projection.toSeq, child = baseRel)
   }
 
   private def transformUnresolvedExpression(exp: proto.Expression): 
UnresolvedAttribute = {
diff --git a/python/pyspark/sql/connect/dataframe.py 
b/python/pyspark/sql/connect/dataframe.py
index 1ec105f5afd..ea355e150bd 100644
--- a/python/pyspark/sql/connect/dataframe.py
+++ b/python/pyspark/sql/connect/dataframe.py
@@ -121,7 +121,7 @@ class DataFrame(object):
         return self.groupBy().agg(exprs)
 
     def alias(self, alias: str) -> "DataFrame":
-        return DataFrame.withPlan(plan.Project(self._plan).withAlias(alias), 
session=self._session)
+        return DataFrame.withPlan(plan.SubqueryAlias(self._plan, alias), 
session=self._session)
 
     def approxQuantile(self, col: ColumnRef, probabilities: Any, 
relativeError: Any) -> "DataFrame":
         ...
diff --git a/python/pyspark/sql/connect/plan.py 
b/python/pyspark/sql/connect/plan.py
index 07fecfb47f3..f919b5156cf 100644
--- a/python/pyspark/sql/connect/plan.py
+++ b/python/pyspark/sql/connect/plan.py
@@ -201,10 +201,6 @@ class Project(LogicalPlan):
                     f"Only Expressions or String can be used for projections: 
'{c}'."
                 )
 
-    def withAlias(self, alias: str) -> LogicalPlan:
-        self.alias = alias
-        return self
-
     def plan(self, session: Optional["RemoteSparkSession"]) -> proto.Relation:
         assert self._child is not None
         proj_exprs = []
@@ -217,14 +213,10 @@ class Project(LogicalPlan):
                 proj_exprs.append(exp)
             else:
                 proj_exprs.append(self.unresolved_attr(c))
-        common = proto.RelationCommon()
-        if self.alias is not None:
-            common.alias = self.alias
 
         plan = proto.Relation()
         plan.project.input.CopyFrom(self._child.plan(session))
         plan.project.expressions.extend(proj_exprs)
-        plan.common.CopyFrom(common)
         return plan
 
     def print(self, indent: int = 0) -> str:
@@ -648,6 +640,34 @@ class UnionAll(LogicalPlan):
         """
 
 
+class SubqueryAlias(LogicalPlan):
+    """Alias for a relation."""
+
+    def __init__(self, child: Optional["LogicalPlan"], alias: str) -> None:
+        super().__init__(child)
+        self._alias = alias
+
+    def plan(self, session: Optional["RemoteSparkSession"]) -> proto.Relation:
+        rel = proto.Relation()
+        rel.subquery_alias.alias = self._alias
+        return rel
+
+    def print(self, indent: int = 0) -> str:
+        c_buf = self._child.print(indent + LogicalPlan.INDENT) if self._child 
else ""
+        return f"{' ' * indent}<SubqueryAlias alias={self._alias}>\n{c_buf}"
+
+    def _repr_html_(self) -> str:
+        return f"""
+        <ul>
+           <li>
+              <b>SubqueryAlias</b><br />
+              Child: {self._child_repr_()}
+              Alias: {self._alias}
+           </li>
+        </ul>
+        """
+
+
 class SQL(LogicalPlan):
     def __init__(self, query: str) -> None:
         super().__init__(None)
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.py 
b/python/pyspark/sql/connect/proto/relations_pb2.py
index bf242a5d7ca..17b4d6fa8b6 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.py
+++ b/python/pyspark/sql/connect/proto/relations_pb2.py
@@ -32,7 +32,7 @@ from pyspark.sql.connect.proto import expressions_pb2 as 
spark_dot_connect_dot_e
 
 
 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
-    
b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\xc5\x06\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01
 
\x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02
 
\x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 
\x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04
 
\x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05
 \x01(\x0 [...]
+    
b'\n\x1dspark/connect/relations.proto\x12\rspark.connect\x1a\x1fspark/connect/expressions.proto"\x8c\x07\n\x08Relation\x12\x35\n\x06\x63ommon\x18\x01
 
\x01(\x0b\x32\x1d.spark.connect.RelationCommonR\x06\x63ommon\x12)\n\x04read\x18\x02
 
\x01(\x0b\x32\x13.spark.connect.ReadH\x00R\x04read\x12\x32\n\x07project\x18\x03 
\x01(\x0b\x32\x16.spark.connect.ProjectH\x00R\x07project\x12/\n\x06\x66ilter\x18\x04
 
\x01(\x0b\x32\x15.spark.connect.FilterH\x00R\x06\x66ilter\x12)\n\x04join\x18\x05
 \x01(\x0 [...]
 )
 
 _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
@@ -44,61 +44,63 @@ if _descriptor._USE_C_DESCRIPTORS == False:
     _READ_DATASOURCE_OPTIONSENTRY._options = None
     _READ_DATASOURCE_OPTIONSENTRY._serialized_options = b"8\001"
     _RELATION._serialized_start = 82
-    _RELATION._serialized_end = 919
-    _UNKNOWN._serialized_start = 921
-    _UNKNOWN._serialized_end = 930
-    _RELATIONCOMMON._serialized_start = 932
-    _RELATIONCOMMON._serialized_end = 1003
-    _SQL._serialized_start = 1005
-    _SQL._serialized_end = 1032
-    _READ._serialized_start = 1035
-    _READ._serialized_end = 1445
-    _READ_NAMEDTABLE._serialized_start = 1177
-    _READ_NAMEDTABLE._serialized_end = 1238
-    _READ_DATASOURCE._serialized_start = 1241
-    _READ_DATASOURCE._serialized_end = 1432
-    _READ_DATASOURCE_OPTIONSENTRY._serialized_start = 1374
-    _READ_DATASOURCE_OPTIONSENTRY._serialized_end = 1432
-    _PROJECT._serialized_start = 1447
-    _PROJECT._serialized_end = 1564
-    _FILTER._serialized_start = 1566
-    _FILTER._serialized_end = 1678
-    _JOIN._serialized_start = 1681
-    _JOIN._serialized_end = 2131
-    _JOIN_JOINTYPE._serialized_start = 1944
-    _JOIN_JOINTYPE._serialized_end = 2131
-    _SETOPERATION._serialized_start = 2134
-    _SETOPERATION._serialized_end = 2497
-    _SETOPERATION_SETOPTYPE._serialized_start = 2383
-    _SETOPERATION_SETOPTYPE._serialized_end = 2497
-    _LIMIT._serialized_start = 2499
-    _LIMIT._serialized_end = 2575
-    _OFFSET._serialized_start = 2577
-    _OFFSET._serialized_end = 2656
-    _AGGREGATE._serialized_start = 2659
-    _AGGREGATE._serialized_end = 2984
-    _AGGREGATE_AGGREGATEFUNCTION._serialized_start = 2888
-    _AGGREGATE_AGGREGATEFUNCTION._serialized_end = 2984
-    _SORT._serialized_start = 2987
-    _SORT._serialized_end = 3489
-    _SORT_SORTFIELD._serialized_start = 3107
-    _SORT_SORTFIELD._serialized_end = 3295
-    _SORT_SORTDIRECTION._serialized_start = 3297
-    _SORT_SORTDIRECTION._serialized_end = 3405
-    _SORT_SORTNULLS._serialized_start = 3407
-    _SORT_SORTNULLS._serialized_end = 3489
-    _DEDUPLICATE._serialized_start = 3492
-    _DEDUPLICATE._serialized_end = 3634
-    _LOCALRELATION._serialized_start = 3636
-    _LOCALRELATION._serialized_end = 3729
-    _SAMPLE._serialized_start = 3732
-    _SAMPLE._serialized_end = 3972
-    _SAMPLE_SEED._serialized_start = 3946
-    _SAMPLE_SEED._serialized_end = 3972
-    _RANGE._serialized_start = 3975
-    _RANGE._serialized_end = 4228
-    _RANGE_STEP._serialized_start = 4146
-    _RANGE_STEP._serialized_end = 4172
-    _RANGE_NUMPARTITIONS._serialized_start = 4174
-    _RANGE_NUMPARTITIONS._serialized_end = 4228
+    _RELATION._serialized_end = 990
+    _UNKNOWN._serialized_start = 992
+    _UNKNOWN._serialized_end = 1001
+    _RELATIONCOMMON._serialized_start = 1003
+    _RELATIONCOMMON._serialized_end = 1052
+    _SQL._serialized_start = 1054
+    _SQL._serialized_end = 1081
+    _READ._serialized_start = 1084
+    _READ._serialized_end = 1494
+    _READ_NAMEDTABLE._serialized_start = 1226
+    _READ_NAMEDTABLE._serialized_end = 1287
+    _READ_DATASOURCE._serialized_start = 1290
+    _READ_DATASOURCE._serialized_end = 1481
+    _READ_DATASOURCE_OPTIONSENTRY._serialized_start = 1423
+    _READ_DATASOURCE_OPTIONSENTRY._serialized_end = 1481
+    _PROJECT._serialized_start = 1496
+    _PROJECT._serialized_end = 1613
+    _FILTER._serialized_start = 1615
+    _FILTER._serialized_end = 1727
+    _JOIN._serialized_start = 1730
+    _JOIN._serialized_end = 2180
+    _JOIN_JOINTYPE._serialized_start = 1993
+    _JOIN_JOINTYPE._serialized_end = 2180
+    _SETOPERATION._serialized_start = 2183
+    _SETOPERATION._serialized_end = 2546
+    _SETOPERATION_SETOPTYPE._serialized_start = 2432
+    _SETOPERATION_SETOPTYPE._serialized_end = 2546
+    _LIMIT._serialized_start = 2548
+    _LIMIT._serialized_end = 2624
+    _OFFSET._serialized_start = 2626
+    _OFFSET._serialized_end = 2705
+    _AGGREGATE._serialized_start = 2708
+    _AGGREGATE._serialized_end = 3033
+    _AGGREGATE_AGGREGATEFUNCTION._serialized_start = 2937
+    _AGGREGATE_AGGREGATEFUNCTION._serialized_end = 3033
+    _SORT._serialized_start = 3036
+    _SORT._serialized_end = 3538
+    _SORT_SORTFIELD._serialized_start = 3156
+    _SORT_SORTFIELD._serialized_end = 3344
+    _SORT_SORTDIRECTION._serialized_start = 3346
+    _SORT_SORTDIRECTION._serialized_end = 3454
+    _SORT_SORTNULLS._serialized_start = 3456
+    _SORT_SORTNULLS._serialized_end = 3538
+    _DEDUPLICATE._serialized_start = 3541
+    _DEDUPLICATE._serialized_end = 3683
+    _LOCALRELATION._serialized_start = 3685
+    _LOCALRELATION._serialized_end = 3778
+    _SAMPLE._serialized_start = 3781
+    _SAMPLE._serialized_end = 4021
+    _SAMPLE_SEED._serialized_start = 3995
+    _SAMPLE_SEED._serialized_end = 4021
+    _RANGE._serialized_start = 4024
+    _RANGE._serialized_end = 4277
+    _RANGE_STEP._serialized_start = 4195
+    _RANGE_STEP._serialized_end = 4221
+    _RANGE_NUMPARTITIONS._serialized_start = 4223
+    _RANGE_NUMPARTITIONS._serialized_end = 4277
+    _SUBQUERYALIAS._serialized_start = 4279
+    _SUBQUERYALIAS._serialized_end = 4393
 # @@protoc_insertion_point(module_scope)
diff --git a/python/pyspark/sql/connect/proto/relations_pb2.pyi 
b/python/pyspark/sql/connect/proto/relations_pb2.pyi
index 7618ed230e7..720a3bc99be 100644
--- a/python/pyspark/sql/connect/proto/relations_pb2.pyi
+++ b/python/pyspark/sql/connect/proto/relations_pb2.pyi
@@ -74,6 +74,7 @@ class Relation(google.protobuf.message.Message):
     OFFSET_FIELD_NUMBER: builtins.int
     DEDUPLICATE_FIELD_NUMBER: builtins.int
     RANGE_FIELD_NUMBER: builtins.int
+    SUBQUERY_ALIAS_FIELD_NUMBER: builtins.int
     UNKNOWN_FIELD_NUMBER: builtins.int
     @property
     def common(self) -> global___RelationCommon: ...
@@ -106,6 +107,8 @@ class Relation(google.protobuf.message.Message):
     @property
     def range(self) -> global___Range: ...
     @property
+    def subquery_alias(self) -> global___SubqueryAlias: ...
+    @property
     def unknown(self) -> global___Unknown: ...
     def __init__(
         self,
@@ -125,6 +128,7 @@ class Relation(google.protobuf.message.Message):
         offset: global___Offset | None = ...,
         deduplicate: global___Deduplicate | None = ...,
         range: global___Range | None = ...,
+        subquery_alias: global___SubqueryAlias | None = ...,
         unknown: global___Unknown | None = ...,
     ) -> None: ...
     def HasField(
@@ -162,6 +166,8 @@ class Relation(google.protobuf.message.Message):
             b"sort",
             "sql",
             b"sql",
+            "subquery_alias",
+            b"subquery_alias",
             "unknown",
             b"unknown",
         ],
@@ -201,6 +207,8 @@ class Relation(google.protobuf.message.Message):
             b"sort",
             "sql",
             b"sql",
+            "subquery_alias",
+            b"subquery_alias",
             "unknown",
             b"unknown",
         ],
@@ -222,6 +230,7 @@ class Relation(google.protobuf.message.Message):
         "offset",
         "deduplicate",
         "range",
+        "subquery_alias",
         "unknown",
     ] | None: ...
 
@@ -244,18 +253,14 @@ class RelationCommon(google.protobuf.message.Message):
     DESCRIPTOR: google.protobuf.descriptor.Descriptor
 
     SOURCE_INFO_FIELD_NUMBER: builtins.int
-    ALIAS_FIELD_NUMBER: builtins.int
     source_info: builtins.str
-    alias: builtins.str
     def __init__(
         self,
         *,
         source_info: builtins.str = ...,
-        alias: builtins.str = ...,
     ) -> None: ...
     def ClearField(
-        self,
-        field_name: typing_extensions.Literal["alias", b"alias", 
"source_info", b"source_info"],
+        self, field_name: typing_extensions.Literal["source_info", 
b"source_info"]
     ) -> None: ...
 
 global___RelationCommon = RelationCommon
@@ -983,6 +988,7 @@ class Range(google.protobuf.message.Message):
     start: builtins.int
     """Optional. Default value = 0"""
     end: builtins.int
+    """Required."""
     @property
     def step(self) -> global___Range.Step:
         """Optional. Default value = 1"""
@@ -1011,3 +1017,40 @@ class Range(google.protobuf.message.Message):
     ) -> None: ...
 
 global___Range = Range
+
+class SubqueryAlias(google.protobuf.message.Message):
+    """Relation alias."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    INPUT_FIELD_NUMBER: builtins.int
+    ALIAS_FIELD_NUMBER: builtins.int
+    QUALIFIER_FIELD_NUMBER: builtins.int
+    @property
+    def input(self) -> global___Relation:
+        """Required. The input relation."""
+    alias: builtins.str
+    """Required. The alias."""
+    @property
+    def qualifier(
+        self,
+    ) -> 
google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+        """Optional. Qualifier of the alias."""
+    def __init__(
+        self,
+        *,
+        input: global___Relation | None = ...,
+        alias: builtins.str = ...,
+        qualifier: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def HasField(
+        self, field_name: typing_extensions.Literal["input", b"input"]
+    ) -> builtins.bool: ...
+    def ClearField(
+        self,
+        field_name: typing_extensions.Literal[
+            "alias", b"alias", "input", b"input", "qualifier", b"qualifier"
+        ],
+    ) -> None: ...
+
+global___SubqueryAlias = SubqueryAlias
diff --git a/python/pyspark/sql/tests/connect/test_connect_plan_only.py 
b/python/pyspark/sql/tests/connect/test_connect_plan_only.py
index 622340a3ef1..05fcedd5c14 100644
--- a/python/pyspark/sql/tests/connect/test_connect_plan_only.py
+++ b/python/pyspark/sql/tests/connect/test_connect_plan_only.py
@@ -116,7 +116,7 @@ class SparkConnectTestsPlanOnly(PlanOnlyTestFixture):
     def test_relation_alias(self):
         df = self.connect.readTable(table_name=self.tbl_name)
         plan = df.alias("table_alias")._plan.to_proto(self.connect)
-        self.assertEqual(plan.root.common.alias, "table_alias")
+        self.assertEqual(plan.root.subquery_alias.alias, "table_alias")
 
     def test_datasource_read(self):
         reader = DataFrameReader(self.connect)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to