Repository: spark
Updated Branches:
  refs/heads/master 33b85620f -> cdf032868


[SQL] Rename some apply functions.

I was looking at the code gen code and got confused by a few of use cases of 
apply, in particular apply on objects. So I went ahead and changed a few of 
them. Hopefully slightly more clear with a proper verb.

Author: Reynold Xin <r...@databricks.com>

Closes #5624 from rxin/apply-rename and squashes the following commits:

ee45034 [Reynold Xin] [SQL] Rename some apply functions.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cdf03286
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cdf03286
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cdf03286

Branch: refs/heads/master
Commit: cdf0328684f70ddcd49b23c23c1532aeb9caa44e
Parents: 33b8562
Author: Reynold Xin <r...@databricks.com>
Authored: Wed Apr 22 11:18:01 2015 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Wed Apr 22 11:18:01 2015 -0700

----------------------------------------------------------------------
 .../sql/catalyst/AbstractSparkSQLParser.scala   |  2 +-
 .../apache/spark/sql/catalyst/dsl/package.scala |  2 +-
 .../expressions/codegen/CodeGenerator.scala     |  6 +--
 .../codegen/GenerateMutableProjection.scala     |  2 +-
 .../expressions/codegen/GenerateOrdering.scala  |  2 +-
 .../expressions/codegen/GeneratePredicate.scala |  2 +-
 .../codegen/GenerateProjection.scala            |  2 +-
 .../sql/catalyst/expressions/predicates.scala   |  6 +--
 .../spark/sql/catalyst/rules/RuleExecutor.scala |  2 +-
 .../spark/sql/catalyst/SqlParserSuite.scala     |  9 ++--
 .../sql/catalyst/analysis/AnalysisSuite.scala   | 22 ++++-----
 .../analysis/DecimalPrecisionSuite.scala        |  6 +--
 .../expressions/GeneratedEvaluationSuite.scala  | 10 ++--
 .../GeneratedMutableEvaluationSuite.scala       |  8 +--
 .../optimizer/BooleanSimplificationSuite.scala  |  2 +-
 .../optimizer/CombiningLimitsSuite.scala        |  4 +-
 .../optimizer/ConstantFoldingSuite.scala        | 14 +++---
 .../optimizer/ConvertToLocalRelationSuite.scala |  2 +-
 .../optimizer/ExpressionOptimizationSuite.scala |  2 +-
 .../optimizer/FilterPushdownSuite.scala         | 52 ++++++++++----------
 .../optimizer/LikeSimplificationSuite.scala     |  8 +--
 .../catalyst/optimizer/OptimizeInSuite.scala    |  4 +-
 ...SimplifyCaseConversionExpressionsSuite.scala |  8 +--
 .../catalyst/optimizer/UnionPushdownSuite.scala |  7 ++-
 .../sql/catalyst/trees/RuleExecutorSuite.scala  |  6 +--
 .../scala/org/apache/spark/sql/SQLContext.scala | 12 ++---
 .../apache/spark/sql/execution/SparkPlan.scala  | 10 ++--
 .../joins/BroadcastNestedLoopJoin.scala         |  2 +-
 .../sql/execution/joins/LeftSemiJoinBNL.scala   |  2 +-
 .../apache/spark/sql/parquet/newParquet.scala   |  2 +-
 .../org/apache/spark/sql/sources/ddl.scala      |  4 +-
 .../org/apache/spark/sql/hive/HiveContext.scala |  2 +-
 .../org/apache/spark/sql/hive/HiveQl.scala      |  4 +-
 .../apache/spark/sql/hive/HiveStrategies.scala  |  4 +-
 .../apache/spark/sql/hive/test/TestHive.scala   |  2 +-
 35 files changed, 117 insertions(+), 117 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
index 3823584..1f3c024 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala
@@ -32,7 +32,7 @@ private[sql] object KeywordNormalizer {
 private[sql] abstract class AbstractSparkSQLParser
   extends StandardTokenParsers with PackratParsers {
 
-  def apply(input: String): LogicalPlan = {
+  def parse(input: String): LogicalPlan = {
     // Initialize the Keywords.
     lexical.initialize(reservedWords)
     phrase(start)(new lexical.Scanner(input)) match {

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
index 4e5c64b..5d5aba9 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala
@@ -296,7 +296,7 @@ package object dsl {
       InsertIntoTable(
         analysis.UnresolvedRelation(Seq(tableName)), Map.empty, logicalPlan, 
overwrite, false)
 
-    def analyze: LogicalPlan = 
EliminateSubQueries(analysis.SimpleAnalyzer(logicalPlan))
+    def analyze: LogicalPlan = 
EliminateSubQueries(analysis.SimpleAnalyzer.execute(logicalPlan))
   }
 
   object plans {  // scalastyle:ignore

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index be2c101..eeffedb 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -98,11 +98,11 @@ abstract class CodeGenerator[InType <: AnyRef, OutType <: 
AnyRef] extends Loggin
       })
 
   /** Generates the requested evaluator binding the given expression(s) to the 
inputSchema. */
-  def apply(expressions: InType, inputSchema: Seq[Attribute]): OutType =
-    apply(bind(expressions, inputSchema))
+  def generate(expressions: InType, inputSchema: Seq[Attribute]): OutType =
+    generate(bind(expressions, inputSchema))
 
   /** Generates the requested evaluator given already bound expression(s). */
-  def apply(expressions: InType): OutType = 
cache.get(canonicalize(expressions))
+  def generate(expressions: InType): OutType = 
cache.get(canonicalize(expressions))
 
   /**
    * Returns a term name that is unique within this instance of a 
`CodeGenerator`.

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
index a419fd7..8402607 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateMutableProjection.scala
@@ -30,7 +30,7 @@ object GenerateMutableProjection extends 
CodeGenerator[Seq[Expression], () => Mu
   val mutableRowName = newTermName("mutableRow")
 
   protected def canonicalize(in: Seq[Expression]): Seq[Expression] =
-    in.map(ExpressionCanonicalizer(_))
+    in.map(ExpressionCanonicalizer.execute)
 
   protected def bind(in: Seq[Expression], inputSchema: Seq[Attribute]): 
Seq[Expression] =
     in.map(BindReferences.bindReference(_, inputSchema))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
index fc2a2b6..b129c0d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateOrdering.scala
@@ -30,7 +30,7 @@ object GenerateOrdering extends CodeGenerator[Seq[SortOrder], 
Ordering[Row]] wit
   import scala.reflect.runtime.universe._
 
  protected def canonicalize(in: Seq[SortOrder]): Seq[SortOrder] =
-    in.map(ExpressionCanonicalizer(_).asInstanceOf[SortOrder])
+    in.map(ExpressionCanonicalizer.execute(_).asInstanceOf[SortOrder])
 
   protected def bind(in: Seq[SortOrder], inputSchema: Seq[Attribute]): 
Seq[SortOrder] =
     in.map(BindReferences.bindReference(_, inputSchema))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
index 2a0935c..40e1630 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GeneratePredicate.scala
@@ -26,7 +26,7 @@ object GeneratePredicate extends CodeGenerator[Expression, 
(Row) => Boolean] {
   import scala.reflect.runtime.{universe => ru}
   import scala.reflect.runtime.universe._
 
-  protected def canonicalize(in: Expression): Expression = 
ExpressionCanonicalizer(in)
+  protected def canonicalize(in: Expression): Expression = 
ExpressionCanonicalizer.execute(in)
 
   protected def bind(in: Expression, inputSchema: Seq[Attribute]): Expression =
     BindReferences.bindReference(in, inputSchema)

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
index 6f572ff..d491bab 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/GenerateProjection.scala
@@ -31,7 +31,7 @@ object GenerateProjection extends 
CodeGenerator[Seq[Expression], Projection] {
   import scala.reflect.runtime.universe._
 
   protected def canonicalize(in: Seq[Expression]): Seq[Expression] =
-    in.map(ExpressionCanonicalizer(_))
+    in.map(ExpressionCanonicalizer.execute)
 
   protected def bind(in: Seq[Expression], inputSchema: Seq[Attribute]): 
Seq[Expression] =
     in.map(BindReferences.bindReference(_, inputSchema))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index fcd6352..46522eb 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -23,10 +23,10 @@ import 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.types.{DataType, BinaryType, BooleanType, 
NativeType}
 
 object InterpretedPredicate {
-  def apply(expression: Expression, inputSchema: Seq[Attribute]): (Row => 
Boolean) =
-    apply(BindReferences.bindReference(expression, inputSchema))
+  def create(expression: Expression, inputSchema: Seq[Attribute]): (Row => 
Boolean) =
+    create(BindReferences.bindReference(expression, inputSchema))
 
-  def apply(expression: Expression): (Row => Boolean) = {
+  def create(expression: Expression): (Row => Boolean) = {
     (r: Row) => expression.eval(r).asInstanceOf[Boolean]
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
index c441f0b..3f9858b 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/rules/RuleExecutor.scala
@@ -45,7 +45,7 @@ abstract class RuleExecutor[TreeType <: TreeNode[_]] extends 
Logging {
    * Executes the batches of rules defined by the subclass. The batches are 
executed serially
    * using the defined execution strategy. Within each batch, rules are also 
executed serially.
    */
-  def apply(plan: TreeType): TreeType = {
+  def execute(plan: TreeType): TreeType = {
     var curPlan = plan
 
     batches.foreach { batch =>

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
index 1a0a0e6..a652c70 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/SqlParserSuite.scala
@@ -49,13 +49,14 @@ class SqlParserSuite extends FunSuite {
 
   test("test long keyword") {
     val parser = new SuperLongKeywordTestParser
-    assert(TestCommand("NotRealCommand") === 
parser("ThisIsASuperLongKeyWordTest NotRealCommand"))
+    assert(TestCommand("NotRealCommand") ===
+      parser.parse("ThisIsASuperLongKeyWordTest NotRealCommand"))
   }
 
   test("test case insensitive") {
     val parser = new CaseInsensitiveTestParser
-    assert(TestCommand("NotRealCommand") === parser("EXECUTE NotRealCommand"))
-    assert(TestCommand("NotRealCommand") === parser("execute NotRealCommand"))
-    assert(TestCommand("NotRealCommand") === parser("exEcute NotRealCommand"))
+    assert(TestCommand("NotRealCommand") === parser.parse("EXECUTE 
NotRealCommand"))
+    assert(TestCommand("NotRealCommand") === parser.parse("execute 
NotRealCommand"))
+    assert(TestCommand("NotRealCommand") === parser.parse("exEcute 
NotRealCommand"))
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 7c24921..971e1ff 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -42,10 +42,10 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
 
 
   def caseSensitiveAnalyze(plan: LogicalPlan): Unit =
-    caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer(plan))
+    caseSensitiveAnalyzer.checkAnalysis(caseSensitiveAnalyzer.execute(plan))
 
   def caseInsensitiveAnalyze(plan: LogicalPlan): Unit =
-    caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer(plan))
+    
caseInsensitiveAnalyzer.checkAnalysis(caseInsensitiveAnalyzer.execute(plan))
 
   val testRelation = LocalRelation(AttributeReference("a", IntegerType, 
nullable = true)())
   val testRelation2 = LocalRelation(
@@ -82,7 +82,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
         
a.select(UnresolvedStar(None)).select('a).unionAll(b.select(UnresolvedStar(None)))
       }
 
-    assert(caseInsensitiveAnalyzer(plan).resolved)
+    assert(caseInsensitiveAnalyzer.execute(plan).resolved)
   }
 
   test("check project's resolved") {
@@ -98,11 +98,11 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
 
   test("analyze project") {
     assert(
-      caseSensitiveAnalyzer(Project(Seq(UnresolvedAttribute("a")), 
testRelation)) ===
+      caseSensitiveAnalyzer.execute(Project(Seq(UnresolvedAttribute("a")), 
testRelation)) ===
         Project(testRelation.output, testRelation))
 
     assert(
-      caseSensitiveAnalyzer(
+      caseSensitiveAnalyzer.execute(
         Project(Seq(UnresolvedAttribute("TbL.a")),
           UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
         Project(testRelation.output, testRelation))
@@ -115,13 +115,13 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
     assert(e.getMessage().toLowerCase.contains("cannot resolve"))
 
     assert(
-      caseInsensitiveAnalyzer(
+      caseInsensitiveAnalyzer.execute(
         Project(Seq(UnresolvedAttribute("TbL.a")),
           UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
         Project(testRelation.output, testRelation))
 
     assert(
-      caseInsensitiveAnalyzer(
+      caseInsensitiveAnalyzer.execute(
         Project(Seq(UnresolvedAttribute("tBl.a")),
           UnresolvedRelation(Seq("TaBlE"), Some("TbL")))) ===
         Project(testRelation.output, testRelation))
@@ -134,13 +134,13 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
     assert(e.getMessage == "Table Not Found: tAbLe")
 
     assert(
-      caseSensitiveAnalyzer(UnresolvedRelation(Seq("TaBlE"), None)) === 
testRelation)
+      caseSensitiveAnalyzer.execute(UnresolvedRelation(Seq("TaBlE"), None)) 
=== testRelation)
 
     assert(
-      caseInsensitiveAnalyzer(UnresolvedRelation(Seq("tAbLe"), None)) === 
testRelation)
+      caseInsensitiveAnalyzer.execute(UnresolvedRelation(Seq("tAbLe"), None)) 
=== testRelation)
 
     assert(
-      caseInsensitiveAnalyzer(UnresolvedRelation(Seq("TaBlE"), None)) === 
testRelation)
+      caseInsensitiveAnalyzer.execute(UnresolvedRelation(Seq("TaBlE"), None)) 
=== testRelation)
   }
 
   def errorTest(
@@ -219,7 +219,7 @@ class AnalysisSuite extends FunSuite with BeforeAndAfter {
       AttributeReference("d", DecimalType.Unlimited)(),
       AttributeReference("e", ShortType)())
 
-    val plan = caseInsensitiveAnalyzer(
+    val plan = caseInsensitiveAnalyzer.execute(
       testRelation2.select(
         'a / Literal(2) as 'div1,
         'a / 'b as 'div2,

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
index 67bec99..36b03d1 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
@@ -48,12 +48,12 @@ class DecimalPrecisionSuite extends FunSuite with 
BeforeAndAfter {
 
   private def checkType(expression: Expression, expectedType: DataType): Unit 
= {
     val plan = Project(Seq(Alias(expression, "c")()), relation)
-    assert(analyzer(plan).schema.fields(0).dataType === expectedType)
+    assert(analyzer.execute(plan).schema.fields(0).dataType === expectedType)
   }
 
   private def checkComparison(expression: Expression, expectedType: DataType): 
Unit = {
     val plan = Project(Alias(expression, "c")() :: Nil, relation)
-    val comparison = analyzer(plan).collect {
+    val comparison = analyzer.execute(plan).collect {
       case Project(Alias(e: BinaryComparison, _) :: Nil, _) => e
     }.head
     assert(comparison.left.dataType === expectedType)
@@ -64,7 +64,7 @@ class DecimalPrecisionSuite extends FunSuite with 
BeforeAndAfter {
     val plan =
       Union(Project(Seq(Alias(left, "l")()), relation),
         Project(Seq(Alias(right, "r")()), relation))
-    val (l, r) = analyzer(plan).collect {
+    val (l, r) = analyzer.execute(plan).collect {
       case Union(left, right) => (left.output.head, right.output.head)
     }.head
     assert(l.dataType === expectedType)

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
index ef3114f..b5ebe4b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedEvaluationSuite.scala
@@ -29,7 +29,7 @@ class GeneratedEvaluationSuite extends 
ExpressionEvaluationSuite {
       expected: Any,
       inputRow: Row = EmptyRow): Unit = {
     val plan = try {
-      GenerateMutableProjection(Alias(expression, s"Optimized($expression)")() 
:: Nil)()
+      GenerateMutableProjection.generate(Alias(expression, 
s"Optimized($expression)")() :: Nil)()
     } catch {
       case e: Throwable =>
         val evaluated = GenerateProjection.expressionEvaluator(expression)
@@ -56,10 +56,10 @@ class GeneratedEvaluationSuite extends 
ExpressionEvaluationSuite {
 
     val futures = (1 to 20).map { _ =>
       future {
-        GeneratePredicate(EqualTo(Literal(1), Literal(1)))
-        GenerateProjection(EqualTo(Literal(1), Literal(1)) :: Nil)
-        GenerateMutableProjection(EqualTo(Literal(1), Literal(1)) :: Nil)
-        GenerateOrdering(Add(Literal(1), Literal(1)).asc :: Nil)
+        GeneratePredicate.generate(EqualTo(Literal(1), Literal(1)))
+        GenerateProjection.generate(EqualTo(Literal(1), Literal(1)) :: Nil)
+        GenerateMutableProjection.generate(EqualTo(Literal(1), Literal(1)) :: 
Nil)
+        GenerateOrdering.generate(Add(Literal(1), Literal(1)).asc :: Nil)
       }
     }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
index bcc0c40..97af2e0 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/GeneratedMutableEvaluationSuite.scala
@@ -25,13 +25,13 @@ import org.apache.spark.sql.catalyst.expressions.codegen._
  */
 class GeneratedMutableEvaluationSuite extends ExpressionEvaluationSuite {
   override def checkEvaluation(
-                                expression: Expression,
-                                expected: Any,
-                                inputRow: Row = EmptyRow): Unit = {
+      expression: Expression,
+      expected: Any,
+      inputRow: Row = EmptyRow): Unit = {
     lazy val evaluated = GenerateProjection.expressionEvaluator(expression)
 
     val plan = try {
-      GenerateProjection(Alias(expression, s"Optimized($expression)")() :: Nil)
+      GenerateProjection.generate(Alias(expression, 
s"Optimized($expression)")() :: Nil)
     } catch {
       case e: Throwable =>
         fail(

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
index 72f06e2..6255578 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/BooleanSimplificationSuite.scala
@@ -61,7 +61,7 @@ class BooleanSimplificationSuite extends PlanTest with 
PredicateHelper {
 
   def checkCondition(input: Expression, expected: Expression): Unit = {
     val plan = testRelation.where(input).analyze
-    val actual = Optimize(plan).expressions.head
+    val actual = Optimize.execute(plan).expressions.head
     compareConditions(actual, expected)
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
index e2ae0d2..2d16d66 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/CombiningLimitsSuite.scala
@@ -44,7 +44,7 @@ class CombiningLimitsSuite extends PlanTest {
         .limit(10)
         .limit(5)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a)
@@ -61,7 +61,7 @@ class CombiningLimitsSuite extends PlanTest {
         .limit(7)
         .limit(5)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a)

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
index 4396bd0..14b28e8 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
@@ -47,7 +47,7 @@ class ConstantFoldingSuite extends PlanTest {
         .subquery('y)
         .select('a)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a.attr)
@@ -74,7 +74,7 @@ class ConstantFoldingSuite extends PlanTest {
           Literal(2) * Literal(3) - Literal(6) / (Literal(4) - Literal(2))
         )(Literal(9) / Literal(3) as Symbol("9/3"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation
@@ -99,7 +99,7 @@ class ConstantFoldingSuite extends PlanTest {
           Literal(2) * 'a + Literal(4) as Symbol("c3"),
           'a * (Literal(3) + Literal(4)) as Symbol("c4"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation
@@ -127,7 +127,7 @@ class ConstantFoldingSuite extends PlanTest {
             (Literal(1) === Literal(1) || 'b > 1) &&
             (Literal(1) === Literal(2) || 'b < 10)))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation
@@ -144,7 +144,7 @@ class ConstantFoldingSuite extends PlanTest {
           Cast(Literal("2"), IntegerType) + Literal(3) + 'a as Symbol("c1"),
           Coalesce(Seq(Cast(Literal("abc"), IntegerType), Literal(3))) as 
Symbol("c2"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation
@@ -163,7 +163,7 @@ class ConstantFoldingSuite extends PlanTest {
           Rand + Literal(1) as Symbol("c1"),
           Sum('a) as Symbol("c2"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation
@@ -210,7 +210,7 @@ class ConstantFoldingSuite extends PlanTest {
       Contains("abc", Literal.create(null, StringType)) as 'c20
     )
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     val correctAnswer =
       testRelation

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
index cf42d43..6841bd9 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConvertToLocalRelationSuite.scala
@@ -49,7 +49,7 @@ class ConvertToLocalRelationSuite extends PlanTest {
       UnresolvedAttribute("a").as("a1"),
       (UnresolvedAttribute("b") + 1).as("b1"))
 
-    val optimized = Optimize(projectOnLocal.analyze)
+    val optimized = Optimize.execute(projectOnLocal.analyze)
 
     comparePlans(optimized, correctAnswer)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
index 2f3704b..a4a3a66 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExpressionOptimizationSuite.scala
@@ -30,7 +30,7 @@ class ExpressionOptimizationSuite extends 
ExpressionEvaluationSuite {
       expected: Any,
       inputRow: Row = EmptyRow): Unit = {
     val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, 
OneRowRelation)
-    val optimizedPlan = DefaultOptimizer(plan)
+    val optimizedPlan = DefaultOptimizer.execute(plan)
     super.checkEvaluation(optimizedPlan.expressions.head, expected, inputRow)
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
index 45cf695..aa9708b 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/FilterPushdownSuite.scala
@@ -50,7 +50,7 @@ class FilterPushdownSuite extends PlanTest {
         .subquery('y)
         .select('a)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a.attr)
@@ -65,7 +65,7 @@ class FilterPushdownSuite extends PlanTest {
         .groupBy('a)('a, Count('b))
         .select('a)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a)
@@ -81,7 +81,7 @@ class FilterPushdownSuite extends PlanTest {
         .groupBy('a)('a as 'c, Count('b))
         .select('c)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select('a)
@@ -98,7 +98,7 @@ class FilterPushdownSuite extends PlanTest {
         .select('a)
         .where('a === 1)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .where('a === 1)
@@ -115,7 +115,7 @@ class FilterPushdownSuite extends PlanTest {
         .where('e === 1)
         .analyze
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .where('a + 'b === 1)
@@ -131,7 +131,7 @@ class FilterPushdownSuite extends PlanTest {
       .where('a === 1)
       .where('a === 2)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .where('a === 1 && 'a === 2)
@@ -152,7 +152,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 1)
     val right = testRelation.where('b === 2)
     val correctAnswer =
@@ -170,7 +170,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 1)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 1)
     val right = testRelation
     val correctAnswer =
@@ -188,7 +188,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 1 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 1)
     val right = testRelation.where('b === 2)
     val correctAnswer =
@@ -206,7 +206,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 1 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 1)
     val correctAnswer =
       left.join(y, LeftOuter).where("y.b".attr === 2).analyze
@@ -223,7 +223,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 1 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val right = testRelation.where('b === 2).subquery('d)
     val correctAnswer =
       x.join(right, RightOuter).where("x.b".attr === 1).analyze
@@ -240,7 +240,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 2).subquery('d)
     val correctAnswer =
       left.join(y, LeftOuter, Some("d.b".attr === 1)).where("y.b".attr === 
2).analyze
@@ -257,7 +257,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val right = testRelation.where('b === 2).subquery('d)
     val correctAnswer =
       x.join(right, RightOuter, Some("d.b".attr === 1)).where("x.b".attr === 
2).analyze
@@ -274,7 +274,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 2).subquery('l)
     val right = testRelation.where('b === 1).subquery('r)
     val correctAnswer =
@@ -292,7 +292,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val right = testRelation.where('b === 2).subquery('r)
     val correctAnswer =
       x.join(right, RightOuter, Some("r.b".attr === 1)).where("x.b".attr === 
2).analyze
@@ -309,7 +309,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === 
"y.c".attr)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 2).subquery('l)
     val right = testRelation.where('b === 1).subquery('r)
     val correctAnswer =
@@ -327,7 +327,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === 
"y.c".attr)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.subquery('l)
     val right = testRelation.where('b === 2).subquery('r)
     val correctAnswer =
@@ -346,7 +346,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === 
"y.c".attr)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('b === 2).subquery('l)
     val right = testRelation.where('b === 1).subquery('r)
     val correctAnswer =
@@ -365,7 +365,7 @@ class FilterPushdownSuite extends PlanTest {
         .where("x.b".attr === 2 && "y.b".attr === 2 && "x.c".attr === 
"y.c".attr)
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('a === 3).subquery('l)
     val right = testRelation.where('b === 2).subquery('r)
     val correctAnswer =
@@ -382,7 +382,7 @@ class FilterPushdownSuite extends PlanTest {
     val originalQuery = {
       x.join(y, condition = Some("x.b".attr === "y.b".attr))
     }
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
 
     comparePlans(analysis.EliminateSubQueries(originalQuery.analyze), 
optimized)
   }
@@ -396,7 +396,7 @@ class FilterPushdownSuite extends PlanTest {
         .where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1) && 
("y.a".attr === 1))
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('a === 1).subquery('x)
     val right = testRelation.where('a === 1).subquery('y)
     val correctAnswer =
@@ -415,7 +415,7 @@ class FilterPushdownSuite extends PlanTest {
         .where(("x.b".attr === "y.b".attr) && ("x.a".attr === 1))
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val left = testRelation.where('a === 1).subquery('x)
     val right = testRelation.subquery('y)
     val correctAnswer =
@@ -436,7 +436,7 @@ class FilterPushdownSuite extends PlanTest {
           ("z.a".attr >= 3) && ("z.a".attr === "x.b".attr))
     }
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val lleft = testRelation.where('a >= 3).subquery('z)
     val left = testRelation.where('a === 1).subquery('x)
     val right = testRelation.subquery('y)
@@ -457,7 +457,7 @@ class FilterPushdownSuite extends PlanTest {
         .generate(Explode('c_arr), true, false, Some("arr"))
         .where(('b >= 5) && ('a > 6))
     }
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = {
       testRelationWithArrayType
         .where(('b >= 5) && ('a > 6))
@@ -474,7 +474,7 @@ class FilterPushdownSuite extends PlanTest {
         .generate(generator, true, false, Some("arr"))
         .where(('b >= 5) && ('c > 6))
     }
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val referenceResult = {
       testRelationWithArrayType
         .where('b >= 5)
@@ -502,7 +502,7 @@ class FilterPushdownSuite extends PlanTest {
         .generate(Explode('c_arr), true, false, Some("arr"))
         .where(('c > 6) || ('b > 5)).analyze
     }
-    val optimized = Optimize(originalQuery)
+    val optimized = Optimize.execute(originalQuery)
 
     comparePlans(optimized, originalQuery)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
index b10577c..b3df487 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LikeSimplificationSuite.scala
@@ -41,7 +41,7 @@ class LikeSimplificationSuite extends PlanTest {
       testRelation
         .where(('a like "abc%") || ('a like "abc\\%"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .where(StartsWith('a, "abc") || ('a like "abc\\%"))
       .analyze
@@ -54,7 +54,7 @@ class LikeSimplificationSuite extends PlanTest {
       testRelation
         .where('a like "%xyz")
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .where(EndsWith('a, "xyz"))
       .analyze
@@ -67,7 +67,7 @@ class LikeSimplificationSuite extends PlanTest {
       testRelation
         .where(('a like "%mn%") || ('a like "%mn\\%"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .where(Contains('a, "mn") || ('a like "%mn\\%"))
       .analyze
@@ -80,7 +80,7 @@ class LikeSimplificationSuite extends PlanTest {
       testRelation
         .where(('a like "") || ('a like "abc"))
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .where(('a === "") || ('a === "abc"))
       .analyze

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
index 966bc9a..3eb399e 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeInSuite.scala
@@ -49,7 +49,7 @@ class OptimizeInSuite extends PlanTest {
         .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2))))
         .analyze
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .where(InSet(UnresolvedAttribute("a"), HashSet[Any]() + 1 + 2))
@@ -64,7 +64,7 @@ class OptimizeInSuite extends PlanTest {
         .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), 
UnresolvedAttribute("b"))))
         .analyze
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .where(In(UnresolvedAttribute("a"), Seq(Literal(1),Literal(2), 
UnresolvedAttribute("b"))))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
index 22992fb..6b1e53c 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/SimplifyCaseConversionExpressionsSuite.scala
@@ -41,7 +41,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest 
{
       testRelation
         .select(Upper(Upper('a)) as 'u)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select(Upper('a) as 'u)
@@ -55,7 +55,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest 
{
       testRelation
         .select(Upper(Lower('a)) as 'u)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer =
       testRelation
         .select(Upper('a) as 'u)
@@ -69,7 +69,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest 
{
       testRelation
         .select(Lower(Upper('a)) as 'l)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .select(Lower('a) as 'l)
       .analyze
@@ -82,7 +82,7 @@ class SimplifyCaseConversionExpressionsSuite extends PlanTest 
{
       testRelation
         .select(Lower(Lower('a)) as 'l)
 
-    val optimized = Optimize(originalQuery.analyze)
+    val optimized = Optimize.execute(originalQuery.analyze)
     val correctAnswer = testRelation
       .select(Lower('a) as 'l)
       .analyze

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
index a54751d..a3ad200 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnionPushdownSuite.scala
@@ -17,10 +17,9 @@
 
 package org.apache.spark.sql.catalyst.optimizer
 
-import org.apache.spark.sql.catalyst.analysis
 import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
+import org.apache.spark.sql.catalyst.plans.PlanTest
 import org.apache.spark.sql.catalyst.plans.logical._
-import org.apache.spark.sql.catalyst.plans.{PlanTest, LeftOuter, RightOuter}
 import org.apache.spark.sql.catalyst.rules._
 import org.apache.spark.sql.catalyst.dsl.plans._
 import org.apache.spark.sql.catalyst.dsl.expressions._
@@ -41,7 +40,7 @@ class UnionPushdownSuite extends PlanTest {
   test("union: filter to each side") {
     val query = testUnion.where('a === 1)
 
-    val optimized = Optimize(query.analyze)
+    val optimized = Optimize.execute(query.analyze)
 
     val correctAnswer =
       Union(testRelation.where('a === 1), testRelation2.where('d === 
1)).analyze
@@ -52,7 +51,7 @@ class UnionPushdownSuite extends PlanTest {
   test("union: project to each side") {
     val query = testUnion.select('b)
 
-    val optimized = Optimize(query.analyze)
+    val optimized = Optimize.execute(query.analyze)
 
     val correctAnswer =
       Union(testRelation.select('b), testRelation2.select('e)).analyze

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
index 4b2d455..2a641c6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/RuleExecutorSuite.scala
@@ -34,7 +34,7 @@ class RuleExecutorSuite extends FunSuite {
       val batches = Batch("once", Once, DecrementLiterals) :: Nil
     }
 
-    assert(ApplyOnce(Literal(10)) === Literal(9))
+    assert(ApplyOnce.execute(Literal(10)) === Literal(9))
   }
 
   test("to fixed point") {
@@ -42,7 +42,7 @@ class RuleExecutorSuite extends FunSuite {
       val batches = Batch("fixedPoint", FixedPoint(100), DecrementLiterals) :: 
Nil
     }
 
-    assert(ToFixedPoint(Literal(10)) === Literal(0))
+    assert(ToFixedPoint.execute(Literal(10)) === Literal(0))
   }
 
   test("to maxIterations") {
@@ -50,6 +50,6 @@ class RuleExecutorSuite extends FunSuite {
       val batches = Batch("fixedPoint", FixedPoint(10), DecrementLiterals) :: 
Nil
     }
 
-    assert(ToFixedPoint(Literal(100)) === Literal(90))
+    assert(ToFixedPoint.execute(Literal(100)) === Literal(90))
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
index bcd20c0..a279b0f 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
@@ -132,16 +132,16 @@ class SQLContext(@transient val sparkContext: 
SparkContext)
   protected[sql] lazy val optimizer: Optimizer = DefaultOptimizer
 
   @transient
-  protected[sql] val ddlParser = new DDLParser(sqlParser.apply(_))
+  protected[sql] val ddlParser = new DDLParser(sqlParser.parse(_))
 
   @transient
   protected[sql] val sqlParser = {
     val fallback = new catalyst.SqlParser
-    new SparkSQLParser(fallback(_))
+    new SparkSQLParser(fallback.parse(_))
   }
 
   protected[sql] def parseSql(sql: String): LogicalPlan = {
-    ddlParser(sql, false).getOrElse(sqlParser(sql))
+    ddlParser.parse(sql, false).getOrElse(sqlParser.parse(sql))
   }
 
   protected[sql] def executeSql(sql: String): this.QueryExecution = 
executePlan(parseSql(sql))
@@ -1120,12 +1120,12 @@ class SQLContext(@transient val sparkContext: 
SparkContext)
   protected[sql] class QueryExecution(val logical: LogicalPlan) {
     def assertAnalyzed(): Unit = analyzer.checkAnalysis(analyzed)
 
-    lazy val analyzed: LogicalPlan = analyzer(logical)
+    lazy val analyzed: LogicalPlan = analyzer.execute(logical)
     lazy val withCachedData: LogicalPlan = {
       assertAnalyzed()
       cacheManager.useCachedData(analyzed)
     }
-    lazy val optimizedPlan: LogicalPlan = optimizer(withCachedData)
+    lazy val optimizedPlan: LogicalPlan = optimizer.execute(withCachedData)
 
     // TODO: Don't just pick the first one...
     lazy val sparkPlan: SparkPlan = {
@@ -1134,7 +1134,7 @@ class SQLContext(@transient val sparkContext: 
SparkContext)
     }
     // executedPlan should not be used to initialize any SparkPlan. It should 
be
     // only used for execution.
-    lazy val executedPlan: SparkPlan = prepareForExecution(sparkPlan)
+    lazy val executedPlan: SparkPlan = prepareForExecution.execute(sparkPlan)
 
     /** Internal version of the RDD. Avoids copies and has no schema */
     lazy val toRdd: RDD[Row] = executedPlan.execute()

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
index e159ffe..59c8980 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
@@ -144,7 +144,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with 
Logging with Serializ
     log.debug(
       s"Creating Projection: $expressions, inputSchema: $inputSchema, 
codegen:$codegenEnabled")
     if (codegenEnabled) {
-      GenerateProjection(expressions, inputSchema)
+      GenerateProjection.generate(expressions, inputSchema)
     } else {
       new InterpretedProjection(expressions, inputSchema)
     }
@@ -156,7 +156,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with 
Logging with Serializ
     log.debug(
       s"Creating MutableProj: $expressions, inputSchema: $inputSchema, 
codegen:$codegenEnabled")
     if(codegenEnabled) {
-      GenerateMutableProjection(expressions, inputSchema)
+      GenerateMutableProjection.generate(expressions, inputSchema)
     } else {
       () => new InterpretedMutableProjection(expressions, inputSchema)
     }
@@ -166,15 +166,15 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] 
with Logging with Serializ
   protected def newPredicate(
       expression: Expression, inputSchema: Seq[Attribute]): (Row) => Boolean = 
{
     if (codegenEnabled) {
-      GeneratePredicate(expression, inputSchema)
+      GeneratePredicate.generate(expression, inputSchema)
     } else {
-      InterpretedPredicate(expression, inputSchema)
+      InterpretedPredicate.create(expression, inputSchema)
     }
   }
 
   protected def newOrdering(order: Seq[SortOrder], inputSchema: 
Seq[Attribute]): Ordering[Row] = {
     if (codegenEnabled) {
-      GenerateOrdering(order, inputSchema)
+      GenerateOrdering.generate(order, inputSchema)
     } else {
       new RowOrdering(order, inputSchema)
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
index 83b1a83..56200f6 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/BroadcastNestedLoopJoin.scala
@@ -59,7 +59,7 @@ case class BroadcastNestedLoopJoin(
   }
 
   @transient private lazy val boundCondition =
-    InterpretedPredicate(
+    InterpretedPredicate.create(
       condition
         .map(c => BindReferences.bindReference(c, left.output ++ right.output))
         .getOrElse(Literal(true)))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
index 1fa7e7b..e06f63f 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/LeftSemiJoinBNL.scala
@@ -45,7 +45,7 @@ case class LeftSemiJoinBNL(
   override def right: SparkPlan = broadcast
 
   @transient private lazy val boundCondition =
-    InterpretedPredicate(
+    InterpretedPredicate.create(
       condition
         .map(c => BindReferences.bindReference(c, left.output ++ right.output))
         .getOrElse(Literal(true)))

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
index af7b3c8..88466f5 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/newParquet.scala
@@ -611,7 +611,7 @@ private[sql] case class ParquetRelation2(
 
     val rawPredicate =
       
partitionPruningPredicates.reduceOption(expressions.And).getOrElse(Literal(true))
-    val boundPredicate = InterpretedPredicate(rawPredicate transform {
+    val boundPredicate = InterpretedPredicate.create(rawPredicate transform {
       case a: AttributeReference =>
         val index = partitionColumns.indexWhere(a.name == _.name)
         BoundReference(index, partitionColumns(index).dataType, nullable = 
true)

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
index 78d4941..e7a0685 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
@@ -38,9 +38,9 @@ private[sql] class DDLParser(
     parseQuery: String => LogicalPlan)
   extends AbstractSparkSQLParser with DataTypeParser with Logging {
 
-  def apply(input: String, exceptionOnError: Boolean): Option[LogicalPlan] = {
+  def parse(input: String, exceptionOnError: Boolean): Option[LogicalPlan] = {
     try {
-      Some(apply(input))
+      Some(parse(input))
     } catch {
       case ddlException: DDLException => throw ddlException
       case _ if !exceptionOnError => None

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index c4a73b3..dd06b26 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -93,7 +93,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
     if (conf.dialect == "sql") {
       super.sql(substituted)
     } else if (conf.dialect == "hiveql") {
-      val ddlPlan = ddlParserWithHiveQL(sqlText, exceptionOnError = false)
+      val ddlPlan = ddlParserWithHiveQL.parse(sqlText, exceptionOnError = 
false)
       DataFrame(this, ddlPlan.getOrElse(HiveQl.parseSql(substituted)))
     }  else {
       sys.error(s"Unsupported SQL dialect: ${conf.dialect}. Try 'sql' or 
'hiveql'")

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 85061f2..0ea6d57 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -144,7 +144,7 @@ private[hive] object HiveQl {
 
   protected val hqlParser = {
     val fallback = new ExtendedHiveQlParser
-    new SparkSQLParser(fallback(_))
+    new SparkSQLParser(fallback.parse(_))
   }
 
   /**
@@ -240,7 +240,7 @@ private[hive] object HiveQl {
 
 
   /** Returns a LogicalPlan for a given HiveQL string. */
-  def parseSql(sql: String): LogicalPlan = hqlParser(sql)
+  def parseSql(sql: String): LogicalPlan = hqlParser.parse(sql)
 
   val errorRegEx = "line (\\d+):(\\d+) (.*)".r
 

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index a6f4fbe..be9249a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -119,9 +119,9 @@ private[hive] trait HiveStrategies {
             val inputData = new GenericMutableRow(relation.partitionKeys.size)
             val pruningCondition =
               if (codegenEnabled) {
-                GeneratePredicate(castedPredicate)
+                GeneratePredicate.generate(castedPredicate)
               } else {
-                InterpretedPredicate(castedPredicate)
+                InterpretedPredicate.create(castedPredicate)
               }
 
             val partitions = relation.hiveQlPartitions.filter { part =>

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf03286/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
index 6570fa1..9f17bca 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
@@ -185,7 +185,7 @@ class TestHiveContext(sc: SparkContext) extends 
HiveContext(sc) {
       logDebug(s"Query references test tables: 
${referencedTestTables.mkString(", ")}")
       referencedTestTables.foreach(loadTestTable)
       // Proceed with analysis.
-      analyzer(logical)
+      analyzer.execute(logical)
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to