GideonPotok commented on code in PR #46041: URL: https://github.com/apache/spark/pull/46041#discussion_r1567244086
########## sql/core/src/test/scala/org/apache/spark/sql/CollationStringExpressionsSuite.scala: ########## @@ -212,6 +212,48 @@ class CollationStringExpressionsSuite }) } + test("Support StringRPad string expressions with collation") { + // Supported collations + case class StringRPadTestCase[R](s: String, len: Int, pad: String, c: String, result: R) + val testCases = Seq( + StringRPadTestCase("", 5, " ", "UTF8_BINARY", " "), + StringRPadTestCase("abc", 5, " ", "UNICODE", "abc "), + StringRPadTestCase("Hello", 7, "Wörld", "UTF8_BINARY_LCASE", "HelloWö"), // scalastyle:ignore + StringRPadTestCase("1234567890", 5, "aaaAAa", "UNICODE_CI", "12345"), + StringRPadTestCase("aaAA", 2, " ", "UTF8_BINARY", "aa"), + StringRPadTestCase("ÀÃÂĀĂȦÄäåäáâãȻȻȻȻȻǢǼÆ℀℃", 2, "1", "UNICODE", "ÀÃ"), // scalastyle:ignore + StringRPadTestCase("ĂȦÄäåäá", 20, "ÀÃÂĀĂȦÄäåäáâãȻȻȻȻȻǢǼÆ", "UTF8_BINARY_LCASE", "ĂȦÄäåäáÀÃÂĀĂȦÄäåäáâã"), // scalastyle:ignore + StringRPadTestCase("aȦÄä", 8, "a1", "UNICODE_CI", "aȦÄäa1a1") // scalastyle:ignore + ) + testCases.foreach(t => { + val query = s"SELECT rpad(collate('${t.s}', '${t.c}'), ${t.len}, '${t.pad}')" + // Result & data type + checkAnswer(sql(query), Row(t.result)) + assert(sql(query).schema.fields.head.dataType.sameType(StringType(t.c))) + }) + } + + test("Support StringLPad string expressions with collation") { + // Supported collations + case class StringLPadTestCase[R](s: String, len: Int, pad: String, c: String, result: R) + val testCases = Seq( + StringLPadTestCase("", 5, " ", "UTF8_BINARY", " "), + StringLPadTestCase("abc", 5, " ", "UNICODE", " abc"), + StringLPadTestCase("Hello", 7, "Wörld", "UTF8_BINARY_LCASE", "WöHello"), // scalastyle:ignore + StringLPadTestCase("1234567890", 5, "aaaAAa", "UNICODE_CI", "12345"), + StringLPadTestCase("aaAA", 2, " ", "UTF8_BINARY", "aa"), + StringLPadTestCase("ÀÃÂĀĂȦÄäåäáâãȻȻȻȻȻǢǼÆ℀℃", 2, "1", "UNICODE", "ÀÃ"), // scalastyle:ignore + StringLPadTestCase("ĂȦÄäåäá", 20, "ÀÃÂĀĂȦÄäåäáâãȻȻȻȻȻǢǼÆ", "UTF8_BINARY_LCASE", "ÀÃÂĀĂȦÄäåäáâãĂȦÄäåäá"), // scalastyle:ignore + StringLPadTestCase("aȦÄä", 8, "a1", "UNICODE_CI", "a1a1aȦÄä") // scalastyle:ignore + ) + testCases.foreach(t => { + val query = s"SELECT lpad(collate('${t.s}', '${t.c}'), ${t.len}, '${t.pad}')" Review Comment: @uros-db Sure, I think stringExpressions is going to have to change, though, need some guidance on how to change it. If we do not change it, but I add these tests proposed, `assert(sql(query).schema.fields.head.dataType.sameType(StringType(t.c)))` will fail for `SELECT lpad('${t.s}', ${t.len}, collate('${t.pad}', '${t.c}'))`. If nothing else, a change to the implementation of `override def dataType: DataType = str.dataType` will likely be needed... I could try to change `dataType` to `= StringTypeAnyCollation` but in my experience, that won't work. So `dataType` will have to be defined as `= someOperation(str.dataType, pad.dataType)` When are we planning to implement orders of precedence? Until that is implemented, the sort of change we are discussing will be only adding the check that pad and str have the same collation (or else spark will throw an exception)... as there is no order of precedence (explicit, implicit, session). -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org