[spark] branch branch-3.0 updated: [SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take effect at runtime

2020-10-26 Thread yamamuro
This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 590ccb3  [SPARK-33197][SQL] Make changes to 
spark.sql.analyzer.maxIterations take effect at runtime
590ccb3 is described below

commit 590ccb3ac4c32dd67226d2765406d23b949574f6
Author: Yuning Zhang 
AuthorDate: Mon Oct 26 16:19:06 2020 +0900

[SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take 
effect at runtime

### What changes were proposed in this pull request?

Make changes to `spark.sql.analyzer.maxIterations` take effect at runtime.

### Why are the changes needed?

`spark.sql.analyzer.maxIterations` is not a static conf. However, before 
this patch, changing `spark.sql.analyzer.maxIterations` at runtime does not 
take effect.

### Does this PR introduce _any_ user-facing change?

Yes. Before this patch, changing `spark.sql.analyzer.maxIterations` at 
runtime does not take effect.

### How was this patch tested?

modified unit test

Closes #30108 from yuningzh-db/dynamic-analyzer-max-iterations.

Authored-by: Yuning Zhang 
Signed-off-by: Takeshi Yamamuro 
(cherry picked from commit a21945ce6c725896d19647891d1f9fa9ef74bd87)
Signed-off-by: Takeshi Yamamuro 
---
 .../spark/sql/catalyst/analysis/Analyzer.scala | 16 +++--
 .../sql/catalyst/analysis/AnalysisSuite.scala  | 41 ++
 2 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index bf3d4f0..2c9681b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -129,8 +129,7 @@ object AnalysisContext {
  */
 class Analyzer(
 override val catalogManager: CatalogManager,
-conf: SQLConf,
-maxIterations: Int)
+conf: SQLConf)
   extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
 
   private val v1SessionCatalog: SessionCatalog = 
catalogManager.v1SessionCatalog
@@ -141,12 +140,7 @@ class Analyzer(
   def this(catalog: SessionCatalog, conf: SQLConf) = {
 this(
   new CatalogManager(conf, FakeV2SessionCatalog, catalog),
-  conf,
-  conf.analyzerMaxIterations)
-  }
-
-  def this(catalogManager: CatalogManager, conf: SQLConf) = {
-this(catalogManager, conf, conf.analyzerMaxIterations)
+  conf)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
@@ -181,9 +175,9 @@ class Analyzer(
* If the plan cannot be resolved within maxIterations, analyzer will throw 
exception to inform
* user to increase the value of SQLConf.ANALYZER_MAX_ITERATIONS.
*/
-  protected val fixedPoint =
+  protected def fixedPoint =
 FixedPoint(
-  maxIterations,
+  conf.analyzerMaxIterations,
   errorOnExceed = true,
   maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key)
 
@@ -199,7 +193,7 @@ class Analyzer(
*/
   val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
 
-  lazy val batches: Seq[Batch] = Seq(
+  override def batches: Seq[Batch] = Seq(
 Batch("Substitution", fixedPoint,
   CTESubstitution,
   WindowsSubstitution,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 6ec341d..b432747 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -910,4 +910,45 @@ class AnalysisSuite extends AnalysisTest with Matchers {
 )
 assertAnalysisSuccess(plan)
   }
+
+  test("SPARK-33197: Make sure changes to ANALYZER_MAX_ITERATIONS take effect 
at runtime") {
+// RuleExecutor only throw exception or log warning when the rule is 
supposed to run
+// more than once.
+val maxIterations = 2
+val maxIterationsEnough = 5
+withSQLConf(SQLConf.ANALYZER_MAX_ITERATIONS.key -> maxIterations.toString) 
{
+  val conf = SQLConf.get
+  val testAnalyzer = new Analyzer(
+new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, 
conf), conf)
+
+  val plan = testRelation2.select(
+$"a" / Literal(2) as "div1",
+$"a" / $"b" as "div2",
+$"a" / $"c" as "div3",
+$"a" / $"d" as "div4",
+$"e" / $"e" as "div5")
+
+  val message1 = intercept[TreeNodeException[LogicalPlan]] {
+testAnalyzer.execute(plan)
+  }.getMessage
+  assert(message1.startsWith(s"Max 

[spark] branch branch-3.0 updated: [SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take effect at runtime

2020-10-26 Thread yamamuro
This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 590ccb3  [SPARK-33197][SQL] Make changes to 
spark.sql.analyzer.maxIterations take effect at runtime
590ccb3 is described below

commit 590ccb3ac4c32dd67226d2765406d23b949574f6
Author: Yuning Zhang 
AuthorDate: Mon Oct 26 16:19:06 2020 +0900

[SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take 
effect at runtime

### What changes were proposed in this pull request?

Make changes to `spark.sql.analyzer.maxIterations` take effect at runtime.

### Why are the changes needed?

`spark.sql.analyzer.maxIterations` is not a static conf. However, before 
this patch, changing `spark.sql.analyzer.maxIterations` at runtime does not 
take effect.

### Does this PR introduce _any_ user-facing change?

Yes. Before this patch, changing `spark.sql.analyzer.maxIterations` at 
runtime does not take effect.

### How was this patch tested?

modified unit test

Closes #30108 from yuningzh-db/dynamic-analyzer-max-iterations.

Authored-by: Yuning Zhang 
Signed-off-by: Takeshi Yamamuro 
(cherry picked from commit a21945ce6c725896d19647891d1f9fa9ef74bd87)
Signed-off-by: Takeshi Yamamuro 
---
 .../spark/sql/catalyst/analysis/Analyzer.scala | 16 +++--
 .../sql/catalyst/analysis/AnalysisSuite.scala  | 41 ++
 2 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index bf3d4f0..2c9681b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -129,8 +129,7 @@ object AnalysisContext {
  */
 class Analyzer(
 override val catalogManager: CatalogManager,
-conf: SQLConf,
-maxIterations: Int)
+conf: SQLConf)
   extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
 
   private val v1SessionCatalog: SessionCatalog = 
catalogManager.v1SessionCatalog
@@ -141,12 +140,7 @@ class Analyzer(
   def this(catalog: SessionCatalog, conf: SQLConf) = {
 this(
   new CatalogManager(conf, FakeV2SessionCatalog, catalog),
-  conf,
-  conf.analyzerMaxIterations)
-  }
-
-  def this(catalogManager: CatalogManager, conf: SQLConf) = {
-this(catalogManager, conf, conf.analyzerMaxIterations)
+  conf)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
@@ -181,9 +175,9 @@ class Analyzer(
* If the plan cannot be resolved within maxIterations, analyzer will throw 
exception to inform
* user to increase the value of SQLConf.ANALYZER_MAX_ITERATIONS.
*/
-  protected val fixedPoint =
+  protected def fixedPoint =
 FixedPoint(
-  maxIterations,
+  conf.analyzerMaxIterations,
   errorOnExceed = true,
   maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key)
 
@@ -199,7 +193,7 @@ class Analyzer(
*/
   val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
 
-  lazy val batches: Seq[Batch] = Seq(
+  override def batches: Seq[Batch] = Seq(
 Batch("Substitution", fixedPoint,
   CTESubstitution,
   WindowsSubstitution,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 6ec341d..b432747 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -910,4 +910,45 @@ class AnalysisSuite extends AnalysisTest with Matchers {
 )
 assertAnalysisSuccess(plan)
   }
+
+  test("SPARK-33197: Make sure changes to ANALYZER_MAX_ITERATIONS take effect 
at runtime") {
+// RuleExecutor only throw exception or log warning when the rule is 
supposed to run
+// more than once.
+val maxIterations = 2
+val maxIterationsEnough = 5
+withSQLConf(SQLConf.ANALYZER_MAX_ITERATIONS.key -> maxIterations.toString) 
{
+  val conf = SQLConf.get
+  val testAnalyzer = new Analyzer(
+new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, 
conf), conf)
+
+  val plan = testRelation2.select(
+$"a" / Literal(2) as "div1",
+$"a" / $"b" as "div2",
+$"a" / $"c" as "div3",
+$"a" / $"d" as "div4",
+$"e" / $"e" as "div5")
+
+  val message1 = intercept[TreeNodeException[LogicalPlan]] {
+testAnalyzer.execute(plan)
+  }.getMessage
+  assert(message1.startsWith(s"Max 

[spark] branch branch-3.0 updated: [SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take effect at runtime

2020-10-26 Thread yamamuro
This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 590ccb3  [SPARK-33197][SQL] Make changes to 
spark.sql.analyzer.maxIterations take effect at runtime
590ccb3 is described below

commit 590ccb3ac4c32dd67226d2765406d23b949574f6
Author: Yuning Zhang 
AuthorDate: Mon Oct 26 16:19:06 2020 +0900

[SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take 
effect at runtime

### What changes were proposed in this pull request?

Make changes to `spark.sql.analyzer.maxIterations` take effect at runtime.

### Why are the changes needed?

`spark.sql.analyzer.maxIterations` is not a static conf. However, before 
this patch, changing `spark.sql.analyzer.maxIterations` at runtime does not 
take effect.

### Does this PR introduce _any_ user-facing change?

Yes. Before this patch, changing `spark.sql.analyzer.maxIterations` at 
runtime does not take effect.

### How was this patch tested?

modified unit test

Closes #30108 from yuningzh-db/dynamic-analyzer-max-iterations.

Authored-by: Yuning Zhang 
Signed-off-by: Takeshi Yamamuro 
(cherry picked from commit a21945ce6c725896d19647891d1f9fa9ef74bd87)
Signed-off-by: Takeshi Yamamuro 
---
 .../spark/sql/catalyst/analysis/Analyzer.scala | 16 +++--
 .../sql/catalyst/analysis/AnalysisSuite.scala  | 41 ++
 2 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index bf3d4f0..2c9681b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -129,8 +129,7 @@ object AnalysisContext {
  */
 class Analyzer(
 override val catalogManager: CatalogManager,
-conf: SQLConf,
-maxIterations: Int)
+conf: SQLConf)
   extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
 
   private val v1SessionCatalog: SessionCatalog = 
catalogManager.v1SessionCatalog
@@ -141,12 +140,7 @@ class Analyzer(
   def this(catalog: SessionCatalog, conf: SQLConf) = {
 this(
   new CatalogManager(conf, FakeV2SessionCatalog, catalog),
-  conf,
-  conf.analyzerMaxIterations)
-  }
-
-  def this(catalogManager: CatalogManager, conf: SQLConf) = {
-this(catalogManager, conf, conf.analyzerMaxIterations)
+  conf)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
@@ -181,9 +175,9 @@ class Analyzer(
* If the plan cannot be resolved within maxIterations, analyzer will throw 
exception to inform
* user to increase the value of SQLConf.ANALYZER_MAX_ITERATIONS.
*/
-  protected val fixedPoint =
+  protected def fixedPoint =
 FixedPoint(
-  maxIterations,
+  conf.analyzerMaxIterations,
   errorOnExceed = true,
   maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key)
 
@@ -199,7 +193,7 @@ class Analyzer(
*/
   val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
 
-  lazy val batches: Seq[Batch] = Seq(
+  override def batches: Seq[Batch] = Seq(
 Batch("Substitution", fixedPoint,
   CTESubstitution,
   WindowsSubstitution,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 6ec341d..b432747 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -910,4 +910,45 @@ class AnalysisSuite extends AnalysisTest with Matchers {
 )
 assertAnalysisSuccess(plan)
   }
+
+  test("SPARK-33197: Make sure changes to ANALYZER_MAX_ITERATIONS take effect 
at runtime") {
+// RuleExecutor only throw exception or log warning when the rule is 
supposed to run
+// more than once.
+val maxIterations = 2
+val maxIterationsEnough = 5
+withSQLConf(SQLConf.ANALYZER_MAX_ITERATIONS.key -> maxIterations.toString) 
{
+  val conf = SQLConf.get
+  val testAnalyzer = new Analyzer(
+new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, 
conf), conf)
+
+  val plan = testRelation2.select(
+$"a" / Literal(2) as "div1",
+$"a" / $"b" as "div2",
+$"a" / $"c" as "div3",
+$"a" / $"d" as "div4",
+$"e" / $"e" as "div5")
+
+  val message1 = intercept[TreeNodeException[LogicalPlan]] {
+testAnalyzer.execute(plan)
+  }.getMessage
+  assert(message1.startsWith(s"Max 

[spark] branch branch-3.0 updated: [SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take effect at runtime

2020-10-26 Thread yamamuro
This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 590ccb3  [SPARK-33197][SQL] Make changes to 
spark.sql.analyzer.maxIterations take effect at runtime
590ccb3 is described below

commit 590ccb3ac4c32dd67226d2765406d23b949574f6
Author: Yuning Zhang 
AuthorDate: Mon Oct 26 16:19:06 2020 +0900

[SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take 
effect at runtime

### What changes were proposed in this pull request?

Make changes to `spark.sql.analyzer.maxIterations` take effect at runtime.

### Why are the changes needed?

`spark.sql.analyzer.maxIterations` is not a static conf. However, before 
this patch, changing `spark.sql.analyzer.maxIterations` at runtime does not 
take effect.

### Does this PR introduce _any_ user-facing change?

Yes. Before this patch, changing `spark.sql.analyzer.maxIterations` at 
runtime does not take effect.

### How was this patch tested?

modified unit test

Closes #30108 from yuningzh-db/dynamic-analyzer-max-iterations.

Authored-by: Yuning Zhang 
Signed-off-by: Takeshi Yamamuro 
(cherry picked from commit a21945ce6c725896d19647891d1f9fa9ef74bd87)
Signed-off-by: Takeshi Yamamuro 
---
 .../spark/sql/catalyst/analysis/Analyzer.scala | 16 +++--
 .../sql/catalyst/analysis/AnalysisSuite.scala  | 41 ++
 2 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index bf3d4f0..2c9681b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -129,8 +129,7 @@ object AnalysisContext {
  */
 class Analyzer(
 override val catalogManager: CatalogManager,
-conf: SQLConf,
-maxIterations: Int)
+conf: SQLConf)
   extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
 
   private val v1SessionCatalog: SessionCatalog = 
catalogManager.v1SessionCatalog
@@ -141,12 +140,7 @@ class Analyzer(
   def this(catalog: SessionCatalog, conf: SQLConf) = {
 this(
   new CatalogManager(conf, FakeV2SessionCatalog, catalog),
-  conf,
-  conf.analyzerMaxIterations)
-  }
-
-  def this(catalogManager: CatalogManager, conf: SQLConf) = {
-this(catalogManager, conf, conf.analyzerMaxIterations)
+  conf)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
@@ -181,9 +175,9 @@ class Analyzer(
* If the plan cannot be resolved within maxIterations, analyzer will throw 
exception to inform
* user to increase the value of SQLConf.ANALYZER_MAX_ITERATIONS.
*/
-  protected val fixedPoint =
+  protected def fixedPoint =
 FixedPoint(
-  maxIterations,
+  conf.analyzerMaxIterations,
   errorOnExceed = true,
   maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key)
 
@@ -199,7 +193,7 @@ class Analyzer(
*/
   val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
 
-  lazy val batches: Seq[Batch] = Seq(
+  override def batches: Seq[Batch] = Seq(
 Batch("Substitution", fixedPoint,
   CTESubstitution,
   WindowsSubstitution,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 6ec341d..b432747 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -910,4 +910,45 @@ class AnalysisSuite extends AnalysisTest with Matchers {
 )
 assertAnalysisSuccess(plan)
   }
+
+  test("SPARK-33197: Make sure changes to ANALYZER_MAX_ITERATIONS take effect 
at runtime") {
+// RuleExecutor only throw exception or log warning when the rule is 
supposed to run
+// more than once.
+val maxIterations = 2
+val maxIterationsEnough = 5
+withSQLConf(SQLConf.ANALYZER_MAX_ITERATIONS.key -> maxIterations.toString) 
{
+  val conf = SQLConf.get
+  val testAnalyzer = new Analyzer(
+new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, 
conf), conf)
+
+  val plan = testRelation2.select(
+$"a" / Literal(2) as "div1",
+$"a" / $"b" as "div2",
+$"a" / $"c" as "div3",
+$"a" / $"d" as "div4",
+$"e" / $"e" as "div5")
+
+  val message1 = intercept[TreeNodeException[LogicalPlan]] {
+testAnalyzer.execute(plan)
+  }.getMessage
+  assert(message1.startsWith(s"Max 

[spark] branch branch-3.0 updated: [SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take effect at runtime

2020-10-26 Thread yamamuro
This is an automated email from the ASF dual-hosted git repository.

yamamuro pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
 new 590ccb3  [SPARK-33197][SQL] Make changes to 
spark.sql.analyzer.maxIterations take effect at runtime
590ccb3 is described below

commit 590ccb3ac4c32dd67226d2765406d23b949574f6
Author: Yuning Zhang 
AuthorDate: Mon Oct 26 16:19:06 2020 +0900

[SPARK-33197][SQL] Make changes to spark.sql.analyzer.maxIterations take 
effect at runtime

### What changes were proposed in this pull request?

Make changes to `spark.sql.analyzer.maxIterations` take effect at runtime.

### Why are the changes needed?

`spark.sql.analyzer.maxIterations` is not a static conf. However, before 
this patch, changing `spark.sql.analyzer.maxIterations` at runtime does not 
take effect.

### Does this PR introduce _any_ user-facing change?

Yes. Before this patch, changing `spark.sql.analyzer.maxIterations` at 
runtime does not take effect.

### How was this patch tested?

modified unit test

Closes #30108 from yuningzh-db/dynamic-analyzer-max-iterations.

Authored-by: Yuning Zhang 
Signed-off-by: Takeshi Yamamuro 
(cherry picked from commit a21945ce6c725896d19647891d1f9fa9ef74bd87)
Signed-off-by: Takeshi Yamamuro 
---
 .../spark/sql/catalyst/analysis/Analyzer.scala | 16 +++--
 .../sql/catalyst/analysis/AnalysisSuite.scala  | 41 ++
 2 files changed, 46 insertions(+), 11 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index bf3d4f0..2c9681b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -129,8 +129,7 @@ object AnalysisContext {
  */
 class Analyzer(
 override val catalogManager: CatalogManager,
-conf: SQLConf,
-maxIterations: Int)
+conf: SQLConf)
   extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
 
   private val v1SessionCatalog: SessionCatalog = 
catalogManager.v1SessionCatalog
@@ -141,12 +140,7 @@ class Analyzer(
   def this(catalog: SessionCatalog, conf: SQLConf) = {
 this(
   new CatalogManager(conf, FakeV2SessionCatalog, catalog),
-  conf,
-  conf.analyzerMaxIterations)
-  }
-
-  def this(catalogManager: CatalogManager, conf: SQLConf) = {
-this(catalogManager, conf, conf.analyzerMaxIterations)
+  conf)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): 
LogicalPlan = {
@@ -181,9 +175,9 @@ class Analyzer(
* If the plan cannot be resolved within maxIterations, analyzer will throw 
exception to inform
* user to increase the value of SQLConf.ANALYZER_MAX_ITERATIONS.
*/
-  protected val fixedPoint =
+  protected def fixedPoint =
 FixedPoint(
-  maxIterations,
+  conf.analyzerMaxIterations,
   errorOnExceed = true,
   maxIterationsSetting = SQLConf.ANALYZER_MAX_ITERATIONS.key)
 
@@ -199,7 +193,7 @@ class Analyzer(
*/
   val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
 
-  lazy val batches: Seq[Batch] = Seq(
+  override def batches: Seq[Batch] = Seq(
 Batch("Substitution", fixedPoint,
   CTESubstitution,
   WindowsSubstitution,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 6ec341d..b432747 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -910,4 +910,45 @@ class AnalysisSuite extends AnalysisTest with Matchers {
 )
 assertAnalysisSuccess(plan)
   }
+
+  test("SPARK-33197: Make sure changes to ANALYZER_MAX_ITERATIONS take effect 
at runtime") {
+// RuleExecutor only throw exception or log warning when the rule is 
supposed to run
+// more than once.
+val maxIterations = 2
+val maxIterationsEnough = 5
+withSQLConf(SQLConf.ANALYZER_MAX_ITERATIONS.key -> maxIterations.toString) 
{
+  val conf = SQLConf.get
+  val testAnalyzer = new Analyzer(
+new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, 
conf), conf)
+
+  val plan = testRelation2.select(
+$"a" / Literal(2) as "div1",
+$"a" / $"b" as "div2",
+$"a" / $"c" as "div3",
+$"a" / $"d" as "div4",
+$"e" / $"e" as "div5")
+
+  val message1 = intercept[TreeNodeException[LogicalPlan]] {
+testAnalyzer.execute(plan)
+  }.getMessage
+  assert(message1.startsWith(s"Max