Github user janplus commented on a diff in the pull request:

    https://github.com/apache/spark/pull/14008#discussion_r69385193
  
    --- Diff: 
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
 ---
    @@ -725,4 +725,51 @@ class StringExpressionsSuite extends SparkFunSuite 
with ExpressionEvalHelper {
         checkEvaluation(FindInSet(Literal("abf"), Literal("abc,b,ab,c,def")), 
0)
         checkEvaluation(FindInSet(Literal("ab,"), Literal("abc,b,ab,c,def")), 
0)
       }
    +
    +  test("ParseUrl") {
    +    def checkParseUrl(expected: String, urlStr: String, partToExtract: 
String): Unit = {
    +      checkEvaluation(
    +        ParseUrl(Seq(Literal.create(urlStr, StringType),
    +          Literal.create(partToExtract, StringType))), expected)
    +    }
    +    def checkParseUrlWithKey(
    +        expected: String, urlStr: String,
    +        partToExtract: String, key: String): Unit = {
    +      checkEvaluation(
    +        ParseUrl(Seq(Literal.create(urlStr, StringType), 
Literal.create(partToExtract, StringType),
    +          Literal.create(key, StringType))), expected)
    +    }
    +
    +    checkParseUrl("spark.apache.org", 
"http://spark.apache.org/path?query=1";, "HOST")
    +    checkParseUrl("/path", "http://spark.apache.org/path?query=1";, "PATH")
    +    checkParseUrl("query=1", "http://spark.apache.org/path?query=1";, 
"QUERY")
    +    checkParseUrl("Ref", "http://spark.apache.org/path?query=1#Ref";, "REF")
    +    checkParseUrl("http", "http://spark.apache.org/path?query=1";, 
"PROTOCOL")
    +    checkParseUrl("/path?query=1", "http://spark.apache.org/path?query=1";, 
"FILE")
    +    checkParseUrl("spark.apache.org:8080", 
"http://spark.apache.org:8080/path?query=1";, "AUTHORITY")
    +    checkParseUrl("userinfo", 
"http://useri...@spark.apache.org/path?query=1";, "USERINFO")
    +    checkParseUrlWithKey("1", "http://spark.apache.org/path?query=1";, 
"QUERY", "query")
    +
    +    // Null checking
    +    checkParseUrl(null, null, "HOST")
    +    checkParseUrl(null, "http://spark.apache.org/path?query=1";, null)
    +    checkParseUrl(null, null, null)
    +    checkParseUrl(null, "test", "HOST")
    +    checkParseUrl(null, "http://spark.apache.org/path?query=1";, "NO")
    +    checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1";, 
"HOST", "query")
    +    checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1";, 
"QUERY", "quer")
    +    checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1";, 
"QUERY", null)
    +    checkParseUrlWithKey(null, "http://spark.apache.org/path?query=1";, 
"QUERY", "")
    +
    +    // exceptional cases
    +    intercept[java.util.regex.PatternSyntaxException] {
    --- End diff --
    
    Hi, @dongjoon-hyun 
    It seems only when `url`, `partToExtract` and `key` is all `Literal`, then 
hive may give a `SemanticException`.
    
    > hive> select * from url_parse_data;
    OK
    http://spark/path?  QUERY   ???
    Time taken: 0.054 seconds, Fetched: 1 row(s)
    
    > hive> select parse_url("http://spark/path?";, "QUERY", "???") from 
url_parse_data;
    FAILED: SemanticException [Error 10014]: Line 1:7 Wrong arguments '"???"': 
org.apache.hadoop.hive.ql.metadata.HiveException: Unable to execute method 
public java.lang.String 
org.apache.hadoop.hive.ql.udf.UDFParseUrl.evaluate(java.lang.String,java.lang.String,java.lang.String)
  on object org.apache.hadoop.hive.ql.udf.UDFParseUrl@59e082f8 of class 
org.apache.hadoop.hive.ql.udf.UDFParseUrl with arguments 
{http://spark/path?:java.lang.String, QUERY:java.lang.String, 
???:java.lang.String} of size 3
    
    > hive> select parse_url(url, "QUERY", "???") from url_parse_data;
    OK
    Failed with exception 
java.io.IOException:org.apache.hadoop.hive.ql.metadata.HiveException: Unable to 
execute method public java.lang.String 
org.apache.hadoop.hive.ql.udf.UDFParseUrl.evaluate(java.lang.String,java.lang.String,java.lang.String)
  on object org.apache.hadoop.hive.ql.udf.UDFParseUrl@7d1f3fe9 of class 
org.apache.hadoop.hive.ql.udf.UDFParseUrl with arguments 
{http://spark/path?:java.lang.String, QUERY:java.lang.String, 
???:java.lang.String} of size 3
    
    > hive> select parse_url("http://spark/path?";, part, "???") from 
url_parse_data;
    OK
    Failed with exception 
java.io.IOException:org.apache.hadoop.hive.ql.metadata.HiveException: Unable to 
execute method public java.lang.String 
org.apache.hadoop.hive.ql.udf.UDFParseUrl.evaluate(java.lang.String,java.lang.String,java.lang.String)
  on object org.apache.hadoop.hive.ql.udf.UDFParseUrl@37fef327 of class 
org.apache.hadoop.hive.ql.udf.UDFParseUrl with arguments 
{http://spark/path?:java.lang.String, QUERY:java.lang.String, 
???:java.lang.String} of size 3
    
    > hive> select parse_url("http://spark/path?";, "QUERY", key) from 
url_parse_data;
    OK
    Failed with exception 
java.io.IOException:org.apache.hadoop.hive.ql.metadata.HiveException: Unable to 
execute method public java.lang.String 
org.apache.hadoop.hive.ql.udf.UDFParseUrl.evaluate(java.lang.String,java.lang.String,java.lang.String)
  on object org.apache.hadoop.hive.ql.udf.UDFParseUrl@1d944fc0 of class 
org.apache.hadoop.hive.ql.udf.UDFParseUrl with arguments 
{http://spark/path?:java.lang.String, QUERY:java.lang.String, 
???:java.lang.String} of size 3
    
    Given that, it seems not that valuable to do this optimization.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to