anishshri-db commented on code in PR #45674:
URL: https://github.com/apache/spark/pull/45674#discussion_r1546642636


##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala:
##########
@@ -17,17 +17,22 @@
 
 package org.apache.spark.sql.execution.streaming
 
+import org.apache.spark.internal.Logging

Review Comment:
   same here ?



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala:
##########
@@ -49,12 +54,17 @@ object StateKeyValueRowSchema {
 class StateTypesEncoder[GK, V](
     keySerializer: Serializer[GK],
     valEncoder: Encoder[V],
-    stateName: String) {
-  import org.apache.spark.sql.execution.streaming.StateKeyValueRowSchema._
+    stateName: String,
+    hasTtl: Boolean) extends Logging {
+  import 
org.apache.spark.sql.execution.streaming.TransformWithStateKeyValueRowSchema._
 
   /** Variables reused for conversions between byte array and UnsafeRow */
   private val keyProjection = UnsafeProjection.create(KEY_ROW_SCHEMA)
-  private val valueProjection = UnsafeProjection.create(VALUE_ROW_SCHEMA)
+  private val valueProjection = if (hasTtl) {
+      UnsafeProjection.create(VALUE_ROW_SCHEMA_WITH_TTL)

Review Comment:
   nit: indent seems off ?



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala:
##########
@@ -49,12 +54,17 @@ object StateKeyValueRowSchema {
 class StateTypesEncoder[GK, V](
     keySerializer: Serializer[GK],
     valEncoder: Encoder[V],
-    stateName: String) {
-  import org.apache.spark.sql.execution.streaming.StateKeyValueRowSchema._
+    stateName: String,
+    hasTtl: Boolean) extends Logging {

Review Comment:
   nit: do we use `Logging` somewhere ?



##########
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StateTypesEncoderUtils.scala:
##########
@@ -65,22 +75,49 @@ class StateTypesEncoder[GK, V](
   // TODO: validate places that are trying to encode the key and check if we 
can eliminate/
   // add caching for some of these calls.
   def encodeGroupingKey(): UnsafeRow = {
+    val keyRow = keyProjection(InternalRow(serializeGroupingKey()))
+    keyRow
+  }
+
+  /**
+   * Encodes the provided grouping key into Spark UnsafeRow.
+   *
+   * @param groupingKeyBytes serialized grouping key byte array
+   * @return encoded UnsafeRow
+   */
+  def encodeSerializedGroupingKey(
+      groupingKeyBytes: Array[Byte]): UnsafeRow = {

Review Comment:
   nit: could we move to same line above ?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to