This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 61a6dee3ed4e [SPARK-51905][SQL] Disallow NOT ENFORCED CHECK constraint
61a6dee3ed4e is described below

commit 61a6dee3ed4e0773454198c80a639a97de0df2af
Author: Gengliang Wang <[email protected]>
AuthorDate: Mon May 5 20:32:27 2025 -0700

    [SPARK-51905][SQL] Disallow NOT ENFORCED CHECK constraint
    
    ### What changes were proposed in this pull request?
    
    As we plan to enforce check constraints by default, it remains unclear what 
the behavior should be when it is "NOT ENFORCED".
    
    - Option 1: Never support NOVALIDATE/NOT VALID in ALTER TABLE. Use 
ENFORCED/NOT ENFORCED to check if validation must be done.
    - Option 2: Add NOVALIDATE/NOT VALID to control this.
    - Option 3: Don't support of "NOT ENFORCED" for now. Let's think about 
Option 1 or Option 2 later
    
    I would prefer option 3. We can make a decision later based on the user 
expectations.
    
    ### Why are the changes needed?
    
    Clarify the behavior of a "NOT ENFORCED" CHECK constraint.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is DSV2 framework and it is not released yet.
    ### How was this patch tested?
    
    New UT
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #50772 from gengliangwang/disallowNotEnforcedCheck.
    
    Authored-by: Gengliang Wang <[email protected]>
    Signed-off-by: Gengliang Wang <[email protected]>
---
 .../sql/catalyst/expressions/constraints.scala     | 14 +++-
 .../command/CheckConstraintParseSuite.scala        | 83 ++++++++++++++++++++++
 .../command/ConstraintParseSuiteBase.scala         | 20 ++++--
 .../command/ForeignKeyConstraintParseSuite.scala   |  4 ++
 .../command/PrimaryKeyConstraintParseSuite.scala   |  2 +
 .../command/UniqueConstraintParseSuite.scala       |  3 +
 6 files changed, 119 insertions(+), 7 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala
index 15b8b7e2e731..8dad8285d50a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala
@@ -150,8 +150,20 @@ case class CheckConstraint(
 
   override def withTableName(tableName: String): TableConstraint = 
copy(tableName = tableName)
 
-  override def withUserProvidedCharacteristic(c: ConstraintCharacteristic): 
TableConstraint =
+  override def withUserProvidedCharacteristic(c: ConstraintCharacteristic): 
TableConstraint = {
+    if (c.enforced.contains(false)) {
+      val origin = CurrentOrigin.get
+      throw new ParseException(
+        command = origin.sqlText,
+        start = origin,
+        errorClass = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC",
+        messageParameters = Map(
+          "characteristic" -> "NOT ENFORCED",
+          "constraintType" -> "CHECK")
+      )
+    }
     copy(userProvidedCharacteristic = c)
+  }
 }
 
 // scalastyle:off line.size.limit
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala
index 2df42b1b429e..cfae1ea31e0d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala
@@ -316,4 +316,87 @@ class CheckConstraintParseSuite extends 
ConstraintParseSuiteBase {
     }
   }
 
+  test("NOT ENFORCED is not supported for CHECK -- table level") {
+    notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) =>
+      val characteristic = if (c2.isEmpty) {
+        c1
+      } else {
+        s"$c1 $c2"
+      }
+      val sql =
+        s"""
+           |CREATE TABLE a.b.t (a INT, b STRING, CONSTRAINT C1 CHECK (a > 0) 
$characteristic)
+           |""".stripMargin
+
+      val expectedContext = ExpectedContext(
+        fragment = s"CONSTRAINT C1 CHECK (a > 0) $characteristic"
+      )
+
+      checkError(
+        exception = intercept[ParseException] {
+          parsePlan(sql)
+        },
+        condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC",
+        parameters = Map(
+          "characteristic" -> "NOT ENFORCED",
+          "constraintType" -> "CHECK"),
+        queryContext = Array(expectedContext))
+    }
+  }
+
+  test("NOT ENFORCED is not supported for CHECK -- column level") {
+    notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) =>
+      val characteristic = if (c2.isEmpty) {
+        c1
+      } else {
+        s"$c1 $c2"
+      }
+      val sql =
+        s"""
+           |CREATE TABLE a.b.t (a INT CHECK (a > 0) $characteristic, b STRING)
+           |""".stripMargin
+
+      val expectedContext = ExpectedContext(
+        fragment = s"CHECK (a > 0) $characteristic"
+      )
+
+      checkError(
+        exception = intercept[ParseException] {
+          parsePlan(sql)
+        },
+        condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC",
+        parameters = Map(
+          "characteristic" -> "NOT ENFORCED",
+          "constraintType" -> "CHECK"),
+        queryContext = Array(expectedContext))
+    }
+  }
+
+  test("NOT ENFORCED is not supported for CHECK -- ALTER TABLE") {
+    notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) =>
+      val characteristic = if (c2.isEmpty) {
+        c1
+      } else {
+        s"$c1 $c2"
+      }
+      val sql =
+        s"""
+           |ALTER TABLE a.b.t ADD CONSTRAINT C1 CHECK (a > 0) $characteristic
+           |""".stripMargin
+
+      val expectedContext = ExpectedContext(
+        fragment = s"CONSTRAINT C1 CHECK (a > 0) $characteristic"
+      )
+
+      checkError(
+        exception = intercept[ParseException] {
+          parsePlan(sql)
+        },
+        condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC",
+        parameters = Map(
+          "characteristic" -> "NOT ENFORCED",
+          "constraintType" -> "CHECK"),
+        queryContext = Array(expectedContext))
+    }
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala
index 8b5ddb506f78..dadc791138ac 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala
@@ -26,13 +26,8 @@ import org.apache.spark.sql.types.{IntegerType, StringType}
 abstract class ConstraintParseSuiteBase extends AnalysisTest with 
SharedSparkSession {
   protected def validConstraintCharacteristics = Seq(
     ("", "", ConstraintCharacteristic(enforced = None, rely = None)),
-    ("NOT ENFORCED", "", ConstraintCharacteristic(enforced = Some(false), rely 
= None)),
     ("", "RELY", ConstraintCharacteristic(enforced = None, rely = Some(true))),
-    ("", "NORELY", ConstraintCharacteristic(enforced = None, rely = 
Some(false))),
-    ("NOT ENFORCED", "RELY",
-      ConstraintCharacteristic(enforced = Some(false), rely = Some(true))),
-    ("NOT ENFORCED", "NORELY",
-      ConstraintCharacteristic(enforced = Some(false), rely = Some(false)))
+    ("", "NORELY", ConstraintCharacteristic(enforced = None, rely = 
Some(false)))
   )
 
   protected def enforcedConstraintCharacteristics = Seq(
@@ -43,6 +38,19 @@ abstract class ConstraintParseSuiteBase extends AnalysisTest 
with SharedSparkSes
     ("NORELY", "ENFORCED", ConstraintCharacteristic(enforced = Some(true), 
rely = Some(false)))
   )
 
+  protected def notEnforcedConstraintCharacteristics = Seq(
+    ("NOT ENFORCED", "RELY",
+      ConstraintCharacteristic(enforced = Some(false), rely = Some(true))),
+    ("NOT ENFORCED", "NORELY",
+      ConstraintCharacteristic(enforced = Some(false), rely = Some(false))),
+    ("RELY", "NOT ENFORCED",
+      ConstraintCharacteristic(enforced = Some(false), rely = Some(true))),
+    ("NORELY", "NOT ENFORCED",
+      ConstraintCharacteristic(enforced = Some(false), rely = Some(false))),
+    ("NOT ENFORCED", "",
+      ConstraintCharacteristic(enforced = Some(false), rely = None))
+  )
+
   protected val invalidConstraintCharacteristics = Seq(
     ("ENFORCED", "ENFORCED"),
     ("ENFORCED", "NOT ENFORCED"),
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala
index a4736555a24e..4b89611a8b87 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala
@@ -23,6 +23,10 @@ import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.AddConstraint
 
 class ForeignKeyConstraintParseSuite extends ConstraintParseSuiteBase {
+
+  override val validConstraintCharacteristics =
+    super.validConstraintCharacteristics ++ 
notEnforcedConstraintCharacteristics
+
   test("Create table with foreign key - table level") {
     val sql = "CREATE TABLE t (a INT, b STRING," +
       " FOREIGN KEY (a) REFERENCES parent(id)) USING parquet"
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala
index fae7ed14de7b..711db63ad424 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala
@@ -24,6 +24,8 @@ import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.AddConstraint
 
 class PrimaryKeyConstraintParseSuite extends ConstraintParseSuiteBase {
+  override val validConstraintCharacteristics =
+    super.validConstraintCharacteristics ++ 
notEnforcedConstraintCharacteristics
 
   test("Create table with primary key - table level") {
     val sql = "CREATE TABLE t (a INT, b STRING, PRIMARY KEY (a)) USING parquet"
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala
index 704b8417cd55..4f07ffa08736 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala
@@ -23,6 +23,9 @@ import org.apache.spark.sql.catalyst.parser.ParseException
 import org.apache.spark.sql.catalyst.plans.logical.AddConstraint
 
 class UniqueConstraintParseSuite extends ConstraintParseSuiteBase {
+  override val validConstraintCharacteristics =
+    super.validConstraintCharacteristics ++ 
notEnforcedConstraintCharacteristics
+
   test("Create table with unnamed unique constraint") {
     Seq(
       "CREATE TABLE t (a INT, b STRING, UNIQUE (a)) USING parquet",


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to