Github user maropu commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22316#discussion_r214761811
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/DataFramePivotSuite.scala ---
    @@ -308,4 +308,27 @@ class DataFramePivotSuite extends QueryTest with 
SharedSQLContext {
     
         assert(exception.getMessage.contains("aggregate functions are not 
allowed"))
       }
    +
    +  test("pivoting column list with values") {
    +    val expected = Row(2012, 10000.0, null) :: Row(2013, 48000.0, 30000.0) 
:: Nil
    +    val df = trainingSales
    +      .groupBy($"sales.year")
    +      .pivot(struct(lower($"sales.course"), $"training"), Seq(
    +        struct(lit("dotnet"), lit("Experts")),
    +        struct(lit("java"), lit("Dummies")))
    +      ).agg(sum($"sales.earnings"))
    +
    +    checkAnswer(df, expected)
    +  }
    +
    +  test("pivoting column list") {
    +    val exception = intercept[RuntimeException] {
    +      trainingSales
    +        .groupBy($"sales.year")
    +        .pivot(struct(lower($"sales.course"), $"training"))
    +        .agg(sum($"sales.earnings"))
    +        .collect()
    --- End diff --
    
    I tried in your branch;
    ```
    scala> df.show
    +--------+--------------------+
    |training|               sales|
    +--------+--------------------+
    | Experts|[dotNET, 2012, 10...|
    | Experts|[JAVA, 2012, 2000...|
    | Dummies|[dotNet, 2012, 50...|
    | Experts|[dotNET, 2013, 48...|
    | Dummies|[Java, 2013, 3000...|
    +--------+--------------------+
    
    scala> df.groupBy($"sales.year").pivot(struct(lower($"sales.course"), 
$"training")).agg(sum($"sales.earnings"))
    java.lang.RuntimeException: Unsupported literal type class 
org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema [dotnet,Dummies]
      at 
org.apache.spark.sql.catalyst.expressions.Literal$.apply(literals.scala:78)
      at 
org.apache.spark.sql.catalyst.expressions.Literal$$anonfun$create$2.apply(literals.scala:164)
      at 
org.apache.spark.sql.catalyst.expressions.Literal$$anonfun$create$2.apply(literals.scala:164)
      at scala.util.Try.getOrElse(Try.scala:79)
      at 
org.apache.spark.sql.catalyst.expressions.Literal$.create(literals.scala:163)
      at org.apache.spark.sql.functions$.typedLit(functions.scala:127)
    ```
    I miss something?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to