This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new fd695be19d3f [SPARK-47903][PYTHON][FOLLOW-UP] Removed changes relating 
to try_parse_json
fd695be19d3f is described below

commit fd695be19d3fcdca5503e8f1f4222732ef3ac6ce
Author: Harsh Motwani <harsh.motw...@databricks.com>
AuthorDate: Wed Apr 24 15:41:51 2024 +0900

    [SPARK-47903][PYTHON][FOLLOW-UP] Removed changes relating to try_parse_json
    
    ### What changes were proposed in this pull request?
    
    Removed changes relating to `try_parse_json` that were accidentally pushed 
during the late stages of this PR.
    
    ### Why are the changes needed?
    
    There is already another PR in progress adding support for `try_parse_json` 
and the implementation that was accidentally pushed is outdated.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, it removes the `try_parse_json` that was added just now. This feature 
will be added again soon.
    
    ### How was this patch tested?
    
    NA
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #46170 from harshmotw-db/python_scalar_variant.
    
    Authored-by: Harsh Motwani <harsh.motw...@databricks.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../sql/catalyst/analysis/FunctionRegistry.scala   |  1 -
 .../catalyst/expressions/ExpectsInputTypes.scala   |  1 +
 .../expressions/variant/variantExpressions.scala   | 30 ----------------
 .../sql-functions/sql-expression-schema.md         |  1 -
 .../apache/spark/sql/VariantEndToEndSuite.scala    | 40 ----------------------
 5 files changed, 1 insertion(+), 72 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 5f43cc106e67..e4e663d15167 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -822,7 +822,6 @@ object FunctionRegistry {
 
     // Variant
     expression[ParseJson]("parse_json"),
-    expression[TryParseJson]("try_parse_json"),
     expression[IsVariantNull]("is_variant_null"),
     expressionBuilder("variant_get", VariantGetExpressionBuilder),
     expressionBuilder("try_variant_get", TryVariantGetExpressionBuilder),
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
index 66c2f736f235..1a4a0271c54b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
@@ -48,6 +48,7 @@ trait ExpectsInputTypes extends Expression {
 }
 
 object ExpectsInputTypes extends QueryErrorsBase {
+
   def checkInputDataTypes(
       inputs: Seq[Expression],
       inputTypes: Seq[AbstractDataType]): TypeCheckResult = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/variant/variantExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/variant/variantExpressions.scala
index 07f08aa7e70e..6c4a8f90e3b5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/variant/variantExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/variant/variantExpressions.scala
@@ -75,36 +75,6 @@ case class ParseJson(child: Expression)
     copy(child = newChild)
 }
 
-// scalastyle:off line.size.limit
-@ExpressionDescription(
-  usage = "_FUNC_(jsonStr) - Parse a JSON string as an Variant value. Returns 
null when the string is not valid JSON value.",
-  examples = """
-    Examples:
-      > SELECT _FUNC_('{"a":1,"b":0.8}');
-       {"a":1,"b":0.8}
-  """,
-  since = "4.0.0",
-  group = "variant_funcs"
-)
-// scalastyle:on line.size.limit
-case class TryParseJson(expr: Expression, replacement: Expression)
-  extends RuntimeReplaceable with InheritAnalysisRules {
-  def this(child: Expression) = this(child, TryEval(ParseJson(child)))
-
-  override def parameters: Seq[Expression] = Seq(expr)
-
-  override def dataType: DataType = VariantType
-
-  override def prettyName: String = "try_parse_json"
-
-  override protected def withNewChildInternal(newChild: Expression): 
Expression =
-    copy(replacement = newChild)
-
-  override def checkInputDataTypes(): TypeCheckResult = {
-    ExpectsInputTypes.checkInputDataTypes(Seq(expr), Seq(StringType))
-  }
-}
-
 // scalastyle:off line.size.limit
 @ExpressionDescription(
   usage = "_FUNC_(expr) - Check if a variant value is a variant null. Returns 
true if and only if the input is a variant null and false otherwise (including 
in the case of SQL NULL).",
diff --git a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md 
b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
index a2fa30b7f364..ae9e68c4cbb1 100644
--- a/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
+++ b/sql/core/src/test/resources/sql-functions/sql-expression-schema.md
@@ -440,7 +440,6 @@
 | org.apache.spark.sql.catalyst.expressions.variant.ParseJson | parse_json | 
SELECT parse_json('{"a":1,"b":0.8}') | 
struct<parse_json({"a":1,"b":0.8}):variant> |
 | org.apache.spark.sql.catalyst.expressions.variant.SchemaOfVariant | 
schema_of_variant | SELECT schema_of_variant(parse_json('null')) | 
struct<schema_of_variant(parse_json(null)):string> |
 | org.apache.spark.sql.catalyst.expressions.variant.SchemaOfVariantAgg | 
schema_of_variant_agg | SELECT schema_of_variant_agg(parse_json(j)) FROM VALUES 
('1'), ('2'), ('3') AS tab(j) | 
struct<schema_of_variant_agg(parse_json(j)):string> |
-| org.apache.spark.sql.catalyst.expressions.variant.TryParseJson | 
try_parse_json | SELECT try_parse_json('{"a":1,"b":0.8}') | 
struct<try_parse_json({"a":1,"b":0.8}):variant> |
 | 
org.apache.spark.sql.catalyst.expressions.variant.TryVariantGetExpressionBuilder
 | try_variant_get | SELECT try_variant_get(parse_json('{"a": 1}'), '$.a', 
'int') | struct<try_variant_get(parse_json({"a": 1}), $.a):int> |
 | 
org.apache.spark.sql.catalyst.expressions.variant.VariantGetExpressionBuilder | 
variant_get | SELECT variant_get(parse_json('{"a": 1}'), '$.a', 'int') | 
struct<variant_get(parse_json({"a": 1}), $.a):int> |
 | org.apache.spark.sql.catalyst.expressions.xml.XPathBoolean | xpath_boolean | 
SELECT xpath_boolean('<a><b>1</b></a>','a/b') | 
struct<xpath_boolean(<a><b>1</b></a>, a/b):boolean> |
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/VariantEndToEndSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/VariantEndToEndSuite.scala
index 96e85dc58b40..d53b49f7ab5a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/VariantEndToEndSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/VariantEndToEndSuite.scala
@@ -88,46 +88,6 @@ class VariantEndToEndSuite extends QueryTest with 
SharedSparkSession {
     check("[0.0, 1.00, 1.10, 1.23]", "[0,1,1.1,1.23]")
   }
 
-  test("try_parse_json/to_json round-trip") {
-    def check(input: String, output: String = "INPUT IS OUTPUT"): Unit = {
-      val df = Seq(input).toDF("v")
-      val variantDF = df.selectExpr("to_json(try_parse_json(v)) as 
v").select(Column("v"))
-      val expected = if (output != "INPUT IS OUTPUT") output else input
-      checkAnswer(variantDF, Seq(Row(expected)))
-    }
-
-    check("null")
-    check("true")
-    check("false")
-    check("-1")
-    check("1.0E10")
-    check("\"\"")
-    check("\"" + ("a" * 63) + "\"")
-    check("\"" + ("b" * 64) + "\"")
-    // scalastyle:off nonascii
-    check("\"" + ("你好,世界" * 20) + "\"")
-    // scalastyle:on nonascii
-    check("[]")
-    check("{}")
-    // scalastyle:off nonascii
-    check(
-      "[null, true,   false,-1, 1e10, \"\\uD83D\\uDE05\", [ ], { } ]",
-      "[null,true,false,-1,1.0E10,\"😅\",[],{}]"
-    )
-    // scalastyle:on nonascii
-    check("[0.0, 1.00, 1.10, 1.23]", "[0,1,1.1,1.23]")
-    // Places where parse_json should fail and therefore, try_parse_json 
should return null
-    check("{1:2}", null)
-    check("{\"a\":1", null)
-    check("{\"a\":[a,b,c]}", null)
-  }
-
-  test("try_parse_json with invalid input type") {
-    // This test is required because the type checking logic in try_parse_json 
is custom.
-    val exception = intercept[Exception](spark.sql("select try_parse_json(1)"))
-    assert(exception != null)
-  }
-
   test("to_json with nested variant") {
     val df = Seq(1).toDF("v")
     val variantDF1 = df.select(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to