http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/materialize/MutableNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/materialize/MutableNode.java 
b/core/src/main/java/org/apache/calcite/materialize/MutableNode.java
new file mode 100644
index 0000000..5e5871f
--- /dev/null
+++ b/core/src/main/java/org/apache/calcite/materialize/MutableNode.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.materialize;
+
+import org.apache.calcite.util.mapping.IntPair;
+
+import com.google.common.collect.Ordering;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+import java.util.Set;
+
+/** Mutable version of {@link LatticeNode}, used while a graph is being
+ * built. */
+class MutableNode {
+  final LatticeTable table;
+  final MutableNode parent;
+  final Step step;
+  int startCol;
+  int endCol;
+  String alias;
+  final List<MutableNode> children = new ArrayList<>();
+
+  /** Comparator for sorting children within a parent. */
+  static final Ordering<MutableNode> ORDERING =
+      Ordering.from(
+          new Comparator<MutableNode>() {
+            public int compare(MutableNode o1, MutableNode o2) {
+              int c = Ordering.<String>natural().lexicographical().compare(
+                  o1.table.t.getQualifiedName(), 
o2.table.t.getQualifiedName());
+              if (c == 0) {
+                // The nodes have the same table. Now compare them based on the
+                // columns they use as foreign key.
+                c = Ordering.<Integer>natural().lexicographical().compare(
+                    IntPair.left(o1.step.keys), IntPair.left(o2.step.keys));
+              }
+              return c;
+            }
+          });
+
+  /** Creates a root node. */
+  MutableNode(LatticeTable table) {
+    this(table, null, null);
+  }
+
+  /** Creates a non-root node. */
+  MutableNode(LatticeTable table, MutableNode parent, Step step) {
+    this.table = Objects.requireNonNull(table);
+    this.parent = parent;
+    this.step = step;
+    if (parent != null) {
+      parent.children.add(this);
+      Collections.sort(parent.children, ORDERING);
+    }
+  }
+
+  /** Populates a flattened list of mutable nodes. */
+  void flatten(List<MutableNode> flatNodes) {
+    flatNodes.add(this);
+    for (MutableNode child : children) {
+      child.flatten(flatNodes);
+    }
+  }
+
+  /** Returns whether this node is cylic, in an undirected sense; that is,
+   * whether the same descendant can be reached by more than one route. */
+  boolean isCyclic() {
+    final Set<MutableNode> descendants = new HashSet<>();
+    return isCyclicRecurse(descendants);
+  }
+
+  private boolean isCyclicRecurse(Set<MutableNode> descendants) {
+    if (!descendants.add(this)) {
+      return true;
+    }
+    for (MutableNode child : children) {
+      if (child.isCyclicRecurse(descendants)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  void addPath(Path path, String alias) {
+    MutableNode n = this;
+    for (Step step1 : path.steps) {
+      MutableNode n2 = n.findChild(step1);
+      if (n2 == null) {
+        n2 = new MutableNode(step1.target(), n, step1);
+        if (alias != null) {
+          n2.alias = alias;
+        }
+      }
+      n = n2;
+    }
+  }
+
+  private MutableNode findChild(Step step) {
+    for (MutableNode child : children) {
+      if (child.table.equals(step.target())
+          && child.step.equals(step)) {
+        return child;
+      }
+    }
+    return null;
+  }
+}
+
+// End MutableNode.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/materialize/Path.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/materialize/Path.java 
b/core/src/main/java/org/apache/calcite/materialize/Path.java
new file mode 100644
index 0000000..39e6191
--- /dev/null
+++ b/core/src/main/java/org/apache/calcite/materialize/Path.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.materialize;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.List;
+
+/** A sequence of {@link Step}s from a root node (fact table) to another node
+ * (dimension table), possibly via intermediate dimension tables. */
+class Path {
+  final List<Step> steps;
+  private final int id;
+
+  Path(List<Step> steps, int id) {
+    this.steps = ImmutableList.copyOf(steps);
+    this.id = id;
+  }
+
+  @Override public int hashCode() {
+    return id;
+  }
+
+  @Override public boolean equals(Object obj) {
+    return this == obj
+        || obj instanceof Path
+        && id == ((Path) obj).id;
+  }
+}
+
+// End Path.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/materialize/SqlStatisticProvider.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/calcite/materialize/SqlStatisticProvider.java 
b/core/src/main/java/org/apache/calcite/materialize/SqlStatisticProvider.java
new file mode 100644
index 0000000..8d045e1
--- /dev/null
+++ 
b/core/src/main/java/org/apache/calcite/materialize/SqlStatisticProvider.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.materialize;
+
+import java.util.List;
+
+/**
+ * Estimates row counts for tables and columns.
+ *
+ * <p>Unlike {@link LatticeStatisticProvider}, works on raw tables and columns
+ * and does not need a {@link Lattice}.
+ */
+public interface SqlStatisticProvider {
+  double tableCardinality(List<String> qualifiedTableName);
+}
+
+// End SqlStatisticProvider.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/materialize/Step.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/materialize/Step.java 
b/core/src/main/java/org/apache/calcite/materialize/Step.java
new file mode 100644
index 0000000..ea4cf79
--- /dev/null
+++ b/core/src/main/java/org/apache/calcite/materialize/Step.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.materialize;
+
+import org.apache.calcite.util.graph.AttributedDirectedGraph;
+import org.apache.calcite.util.graph.DefaultEdge;
+import org.apache.calcite.util.mapping.IntPair;
+
+import com.google.common.collect.ImmutableList;
+
+import java.util.List;
+import java.util.Objects;
+
+/** Edge in the join graph.
+ *
+ * <p>It is directed: the "parent" must be the "many" side containing the
+ * foreign key, and the "target" is the "one" side containing the primary
+ * key. For example, EMP &rarr; DEPT.
+ *
+ * <p>When created via
+ * {@link LatticeSpace#addEdge(LatticeTable, LatticeTable, List)}
+ * it is unique within the {@link LatticeSpace}. */
+class Step extends DefaultEdge {
+  final List<IntPair> keys;
+
+  Step(LatticeTable source, LatticeTable target, List<IntPair> keys) {
+    super(source, target);
+    this.keys = ImmutableList.copyOf(keys);
+    assert IntPair.ORDERING.isStrictlyOrdered(keys); // ordered and unique
+  }
+
+  @Override public int hashCode() {
+    return Objects.hash(source, target, keys);
+  }
+
+  @Override public boolean equals(Object obj) {
+    return this == obj
+        || obj instanceof Step
+        && ((Step) obj).source.equals(source)
+        && ((Step) obj).target.equals(target)
+        && ((Step) obj).keys.equals(keys);
+  }
+
+  @Override public String toString() {
+    final StringBuilder b = new StringBuilder()
+        .append("Step(")
+        .append(source)
+        .append(", ")
+        .append(target)
+        .append(",");
+    for (IntPair key : keys) {
+      b.append(' ')
+          .append(source().field(key.source).getName())
+          .append(':')
+          .append(target().field(key.target).getName());
+    }
+    return b.append(")")
+        .toString();
+  }
+
+  LatticeTable source() {
+    return (LatticeTable) source;
+  }
+
+  LatticeTable target() {
+    return (LatticeTable) target;
+  }
+
+  boolean isBackwards(SqlStatisticProvider statisticProvider) {
+    return source == target
+        ? keys.get(0).source < keys.get(0).target // want PK on the right
+        : cardinality(statisticProvider, source())
+            < cardinality(statisticProvider, target());
+  }
+
+  /** Temporary method. We should use (inferred) primary keys to figure out
+   * the direction of steps. */
+  private double cardinality(SqlStatisticProvider statisticProvider,
+      LatticeTable table) {
+    return statisticProvider.tableCardinality(table.t.getQualifiedName());
+  }
+
+  /** Creates {@link Step} instances. */
+  static class Factory implements 
AttributedDirectedGraph.AttributedEdgeFactory<
+      LatticeTable, Step> {
+    public Step createEdge(LatticeTable source, LatticeTable target) {
+      throw new UnsupportedOperationException();
+    }
+
+    public Step createEdge(LatticeTable source, LatticeTable target,
+        Object... attributes) {
+      @SuppressWarnings("unchecked") final List<IntPair> keys =
+          (List) attributes[0];
+      return new Step(source, target, keys);
+    }
+  }
+}
+
+// End Step.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/model/JsonSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/model/JsonSchema.java 
b/core/src/main/java/org/apache/calcite/model/JsonSchema.java
index eb2ab3d..ce9c801 100644
--- a/core/src/main/java/org/apache/calcite/model/JsonSchema.java
+++ b/core/src/main/java/org/apache/calcite/model/JsonSchema.java
@@ -88,6 +88,10 @@ public abstract class JsonSchema {
    */
   public Boolean cache;
 
+  /** Whether to create lattices in this schema based on queries occurring in
+   * other schemas. Default value is {@code false}. */
+  public Boolean autoLattice;
+
   public abstract void accept(ModelHandler handler);
 
   public void visitChildren(ModelHandler modelHandler) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/model/ModelHandler.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/model/ModelHandler.java 
b/core/src/main/java/org/apache/calcite/model/ModelHandler.java
index ee6e5f2..afd11f4 100644
--- a/core/src/main/java/org/apache/calcite/model/ModelHandler.java
+++ b/core/src/main/java/org/apache/calcite/model/ModelHandler.java
@@ -527,8 +527,10 @@ public class ModelHandler {
   public void visit(JsonMeasure jsonMeasure) {
     checkRequiredAttributes(jsonMeasure, "agg");
     assert latticeBuilder != null;
+    final boolean distinct = false; // no distinct field in JsonMeasure.yet
     final Lattice.Measure measure =
-        latticeBuilder.resolveMeasure(jsonMeasure.agg, jsonMeasure.args);
+        latticeBuilder.resolveMeasure(jsonMeasure.agg, distinct,
+            jsonMeasure.args);
     if (tileBuilder != null) {
       tileBuilder.addMeasure(measure);
     } else if (latticeBuilder != null) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/plan/RelOptLattice.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/plan/RelOptLattice.java 
b/core/src/main/java/org/apache/calcite/plan/RelOptLattice.java
index a32f55d..2463fde 100644
--- a/core/src/main/java/org/apache/calcite/plan/RelOptLattice.java
+++ b/core/src/main/java/org/apache/calcite/plan/RelOptLattice.java
@@ -40,7 +40,7 @@ public class RelOptLattice {
   }
 
   public RelOptTable rootTable() {
-    return lattice.nodes.get(0).scan.getTable();
+    return lattice.rootNode.relOptTable();
   }
 
   /** Rewrites a relational expression to use a lattice.

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java 
b/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
index 1388b7a..c062814 100644
--- a/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
+++ b/core/src/main/java/org/apache/calcite/plan/hep/HepPlanner.java
@@ -68,7 +68,7 @@ import java.util.Set;
 public class HepPlanner extends AbstractRelOptPlanner {
   //~ Instance fields --------------------------------------------------------
 
-  private HepProgram mainProgram;
+  private final HepProgram mainProgram;
 
   private HepProgram currentProgram;
 
@@ -76,9 +76,11 @@ public class HepPlanner extends AbstractRelOptPlanner {
 
   private RelTraitSet requestedRootTraits;
 
-  private Map<String, HepRelVertex> mapDigestToVertex;
+  private final Map<String, HepRelVertex> mapDigestToVertex = new HashMap<>();
 
-  private final Set<RelOptRule> allRules;
+  // NOTE jvs 24-Apr-2006:  We use LinkedHashSet
+  // in order to provide deterministic behavior.
+  private final Set<RelOptRule> allRules = new LinkedHashSet<>();
 
   private int nTransformations;
 
@@ -86,14 +88,15 @@ public class HepPlanner extends AbstractRelOptPlanner {
 
   private int nTransformationsLastGC;
 
-  private boolean noDAG;
+  private final boolean noDag;
 
   /**
    * Query graph, with edges directed from parent to child. This is a
    * single-rooted DAG, possibly with additional roots corresponding to
    * discarded plan fragments which remain to be garbage-collected.
    */
-  private DirectedGraph<HepRelVertex, DefaultEdge> graph;
+  private final DirectedGraph<HepRelVertex, DefaultEdge> graph =
+      DefaultDirectedGraph.create();
 
   private final Function2<RelNode, RelNode, Void> onCopyHook;
 
@@ -123,28 +126,23 @@ public class HepPlanner extends AbstractRelOptPlanner {
 
   /**
    * Creates a new HepPlanner with the option to keep the graph a
-   * tree(noDAG=true) or allow DAG(noDAG=false).
+   * tree (noDag = true) or allow DAG (noDag = false).
    *
-   * @param program    program controlling rule application
+   * @param noDag      If false, create shared nodes if expressions are
+   *                   identical
+   * @param program    Program controlling rule application
    * @param onCopyHook Function to call when a node is copied
    */
   public HepPlanner(
       HepProgram program,
       Context context,
-      boolean noDAG,
+      boolean noDag,
       Function2<RelNode, RelNode, Void> onCopyHook,
       RelOptCostFactory costFactory) {
     super(costFactory, context);
     this.mainProgram = program;
-    this.onCopyHook =
-        Util.first(onCopyHook, Functions.ignore2());
-    mapDigestToVertex = new HashMap<>();
-    graph = DefaultDirectedGraph.create();
-
-    // NOTE jvs 24-Apr-2006:  We use LinkedHashSet here and below
-    // in order to provide deterministic behavior.
-    allRules = new LinkedHashSet<>();
-    this.noDAG = noDAG;
+    this.onCopyHook = Util.first(onCopyHook, Functions.ignore2());
+    this.noDag = noDag;
   }
 
   //~ Methods ----------------------------------------------------------------
@@ -822,7 +820,7 @@ public class HepPlanner extends AbstractRelOptPlanner {
     rel.recomputeDigest();
 
     // try to find equivalent rel only if DAG is allowed
-    if (!noDAG) {
+    if (!noDag) {
       // Now, check if an equivalent vertex already exists in graph.
       String digest = rel.getDigest();
       HepRelVertex equivVertex = mapDigestToVertex.get(digest);

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/rel/rel2sql/SqlImplementor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/calcite/rel/rel2sql/SqlImplementor.java 
b/core/src/main/java/org/apache/calcite/rel/rel2sql/SqlImplementor.java
index c480ec4..8660951 100644
--- a/core/src/main/java/org/apache/calcite/rel/rel2sql/SqlImplementor.java
+++ b/core/src/main/java/org/apache/calcite/rel/rel2sql/SqlImplementor.java
@@ -93,6 +93,8 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
+import java.util.function.IntFunction;
+import javax.annotation.Nonnull;
 
 /**
  * State for generating a SQL statement.
@@ -410,15 +412,17 @@ public abstract class SqlImplementor {
   /** Context for translating a {@link RexNode} expression (within a
    * {@link RelNode}) into a {@link SqlNode} expression (within a SQL parse
    * tree). */
-  public abstract class Context {
+  public abstract static class Context {
+    final SqlDialect dialect;
     final int fieldCount;
     private final boolean ignoreCast;
 
-    protected Context(int fieldCount) {
-      this(fieldCount, false);
+    protected Context(SqlDialect dialect, int fieldCount) {
+      this(dialect, fieldCount, false);
     }
 
-    protected Context(int fieldCount, boolean ignoreCast) {
+    protected Context(SqlDialect dialect, int fieldCount, boolean ignoreCast) {
+      this.dialect = dialect;
       this.fieldCount = fieldCount;
       this.ignoreCast = ignoreCast;
     }
@@ -453,7 +457,7 @@ public abstract class SqlImplementor {
         switch (referencedExpr.getKind()) {
         case CORREL_VARIABLE:
           final RexCorrelVariable variable = (RexCorrelVariable) 
referencedExpr;
-          final Context correlAliasContext = correlTableMap.get(variable.id);
+          final Context correlAliasContext = getAliasContext(variable);
           final RexFieldAccess lastAccess = accesses.pollLast();
           assert lastAccess != null;
           sqlIdentifier = (SqlIdentifier) correlAliasContext
@@ -563,7 +567,7 @@ public abstract class SqlImplementor {
         if (rex instanceof RexSubQuery) {
           subQuery = (RexSubQuery) rex;
           sqlSubQuery =
-              visitChild(0, subQuery.rel).asQueryOrValues();
+              implementor().visitChild(0, subQuery.rel).asQueryOrValues();
           final List<RexNode> operands = subQuery.operands;
           SqlNode op0;
           if (operands.size() == 1) {
@@ -583,7 +587,8 @@ public abstract class SqlImplementor {
       case EXISTS:
       case SCALAR_QUERY:
         subQuery = (RexSubQuery) rex;
-        sqlSubQuery = visitChild(0, subQuery.rel).asQueryOrValues();
+        sqlSubQuery =
+            implementor().visitChild(0, subQuery.rel).asQueryOrValues();
         return subQuery.getOperator().createCall(POS, sqlSubQuery);
 
       case NOT:
@@ -633,6 +638,10 @@ public abstract class SqlImplementor {
       }
     }
 
+    protected Context getAliasContext(RexCorrelVariable variable) {
+      throw new UnsupportedOperationException();
+    }
+
     private SqlCall toSql(RexProgram program, RexOver rexOver) {
       final RexWindow rexWindow = rexOver.getWindow();
       final SqlNodeList partitionList = new SqlNodeList(
@@ -786,6 +795,39 @@ public abstract class SqlImplementor {
     }
 
     public SqlImplementor implementor() {
+      throw new UnsupportedOperationException();
+    }
+  }
+
+  /** Simple implementation of {@link Context} that cannot handle sub-queries
+   * or correlations. Because it is so simple, you do not need to create a
+   * {@link SqlImplementor} or {@link org.apache.calcite.tools.RelBuilder}
+   * to use it. It is a good way to convert a {@link RexNode} to SQL text. */
+  public static class SimpleContext extends Context {
+    @Nonnull private final IntFunction<SqlNode> field;
+
+    public SimpleContext(SqlDialect dialect, IntFunction<SqlNode> field) {
+      super(dialect, 0, false);
+      this.field = field;
+    }
+
+    public SqlNode field(int ordinal) {
+      return field.apply(ordinal);
+    }
+  }
+
+  /** Implementation of {@link Context} that has an enclosing
+   * {@link SqlImplementor} and can therefore do non-trivial expressions. */
+  protected abstract class BaseContext extends Context {
+    BaseContext(SqlDialect dialect, int fieldCount) {
+      super(dialect, fieldCount);
+    }
+
+    @Override protected Context getAliasContext(RexCorrelVariable variable) {
+      return correlTableMap.get(variable.id);
+    }
+
+    @Override public SqlImplementor implementor() {
       return SqlImplementor.this;
     }
   }
@@ -801,23 +843,24 @@ public abstract class SqlImplementor {
 
   public Context aliasContext(Map<String, RelDataType> aliases,
       boolean qualified) {
-    return new AliasContext(aliases, qualified);
+    return new AliasContext(dialect, aliases, qualified);
   }
 
   public Context joinContext(Context leftContext, Context rightContext) {
-    return new JoinContext(leftContext, rightContext);
+    return new JoinContext(dialect, leftContext, rightContext);
   }
 
   public Context matchRecognizeContext(Context context) {
-    return new MatchRecognizeContext(((AliasContext) context).aliases);
+    return new MatchRecognizeContext(dialect, ((AliasContext) 
context).aliases);
   }
 
   /**
    * Context for translating MATCH_RECOGNIZE clause
    */
   public class MatchRecognizeContext extends AliasContext {
-    protected MatchRecognizeContext(Map<String, RelDataType> aliases) {
-      super(aliases, false);
+    protected MatchRecognizeContext(SqlDialect dialect,
+        Map<String, RelDataType> aliases) {
+      super(dialect, aliases, false);
     }
 
     @Override public SqlNode toSql(RexProgram program, RexNode rex) {
@@ -833,14 +876,14 @@ public abstract class SqlImplementor {
 
   /** Implementation of Context that precedes field references with their
    * "table alias" based on the current sub-query's FROM clause. */
-  public class AliasContext extends Context {
+  public class AliasContext extends BaseContext {
     private final boolean qualified;
     private final Map<String, RelDataType> aliases;
 
     /** Creates an AliasContext; use {@link #aliasContext(Map, boolean)}. */
-    protected AliasContext(Map<String, RelDataType> aliases,
-        boolean qualified) {
-      super(computeFieldCount(aliases));
+    protected AliasContext(SqlDialect dialect,
+        Map<String, RelDataType> aliases, boolean qualified) {
+      super(dialect, computeFieldCount(aliases));
       this.aliases = aliases;
       this.qualified = qualified;
     }
@@ -869,13 +912,14 @@ public abstract class SqlImplementor {
 
   /** Context for translating ON clause of a JOIN from {@link RexNode} to
    * {@link SqlNode}. */
-  class JoinContext extends Context {
+  class JoinContext extends BaseContext {
     private final SqlImplementor.Context leftContext;
     private final SqlImplementor.Context rightContext;
 
     /** Creates a JoinContext; use {@link #joinContext(Context, Context)}. */
-    private JoinContext(Context leftContext, Context rightContext) {
-      super(leftContext.fieldCount + rightContext.fieldCount);
+    private JoinContext(SqlDialect dialect, Context leftContext,
+        Context rightContext) {
+      super(dialect, leftContext.fieldCount + rightContext.fieldCount);
       this.leftContext = leftContext;
       this.rightContext = rightContext;
     }
@@ -956,7 +1000,7 @@ public abstract class SqlImplementor {
       Context newContext;
       final SqlNodeList selectList = select.getSelectList();
       if (selectList != null) {
-        newContext = new Context(selectList.size()) {
+        newContext = new Context(dialect, selectList.size()) {
           public SqlNode field(int ordinal) {
             final SqlNode selectItem = selectList.get(ordinal);
             switch (selectItem.getKind()) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/schema/impl/StarTable.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/schema/impl/StarTable.java 
b/core/src/main/java/org/apache/calcite/schema/impl/StarTable.java
index b1d89da..05444f6 100644
--- a/core/src/main/java/org/apache/calcite/schema/impl/StarTable.java
+++ b/core/src/main/java/org/apache/calcite/schema/impl/StarTable.java
@@ -89,7 +89,7 @@ public class StarTable extends AbstractTable implements 
TranslatableTable {
     if (this.fieldCounts == null) {
       this.fieldCounts = ImmutableIntList.copyOf(fieldCounts);
     }
-    return typeFactory.createStructType(typeList, lattice.uniqueColumnNames);
+    return typeFactory.createStructType(typeList, lattice.uniqueColumnNames());
   }
 
   public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable table) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java 
b/core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java
index 87b8f15..99aa23e 100644
--- a/core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java
+++ b/core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java
@@ -1167,6 +1167,13 @@ public class SqlValidatorUtil {
       (original, attempt, size) -> Util.first(original, "$f")
           + Math.max(size, attempt);
 
+  public static final Suggester ATTEMPT_SUGGESTER =
+      new Suggester() {
+        public String apply(String original, int attempt, int size) {
+          return Util.first(original, "$") + attempt;
+        }
+      };
+
   /** Builds a list of GROUP BY expressions. */
   static class GroupAnalyzer {
     /** Extra expressions, computed from the input as extra GROUP BY

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/tools/FrameworkConfig.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/tools/FrameworkConfig.java 
b/core/src/main/java/org/apache/calcite/tools/FrameworkConfig.java
index 94bb273..ace0226 100644
--- a/core/src/main/java/org/apache/calcite/tools/FrameworkConfig.java
+++ b/core/src/main/java/org/apache/calcite/tools/FrameworkConfig.java
@@ -16,6 +16,7 @@
  */
 package org.apache.calcite.tools;
 
+import org.apache.calcite.materialize.SqlStatisticProvider;
 import org.apache.calcite.plan.Context;
 import org.apache.calcite.plan.RelOptCostFactory;
 import org.apache.calcite.plan.RelTraitDef;
@@ -118,6 +119,20 @@ public interface FrameworkConfig {
    * Returns the type system.
    */
   RelDataTypeSystem getTypeSystem();
+
+  /**
+   * Returns whether the lattice suggester should try to widen a lattice when a
+   * new query arrives that doesn't quite fit, as opposed to creating a new
+   * lattice.
+   */
+  boolean isEvolveLattice();
+
+  /**
+   * Returns the source of statistics about tables and columns to be used
+   * by the lattice suggester to deduce primary keys, foreign keys, and the
+   * direction of relationships.
+   */
+  SqlStatisticProvider getStatisticProvider();
 }
 
 // End FrameworkConfig.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/tools/Frameworks.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/tools/Frameworks.java 
b/core/src/main/java/org/apache/calcite/tools/Frameworks.java
index ee0549b..e5d83fb 100644
--- a/core/src/main/java/org/apache/calcite/tools/Frameworks.java
+++ b/core/src/main/java/org/apache/calcite/tools/Frameworks.java
@@ -18,6 +18,8 @@ package org.apache.calcite.tools;
 
 import org.apache.calcite.config.CalciteConnectionProperty;
 import org.apache.calcite.jdbc.CalciteSchema;
+import org.apache.calcite.materialize.MapSqlStatisticProvider;
+import org.apache.calcite.materialize.SqlStatisticProvider;
 import org.apache.calcite.plan.Context;
 import org.apache.calcite.plan.RelOptCluster;
 import org.apache.calcite.plan.RelOptCostFactory;
@@ -167,36 +169,74 @@ public class Frameworks {
     return CalciteSchema.createRootSchema(addMetadataSchema).plus();
   }
 
+  /** Creates a config builder with each setting initialized to its default
+   * value. */
   public static ConfigBuilder newConfigBuilder() {
     return new ConfigBuilder();
   }
 
+  /** Creates a config builder initializing each setting from an existing
+   * config.
+   *
+   * <p>So, {@code newConfigBuilder(config).build()} will return a
+   * value equal to {@code config}. */
+  public static ConfigBuilder newConfigBuilder(FrameworkConfig config) {
+    return new ConfigBuilder(config);
+  }
+
   /**
    * A builder to help you build a {@link FrameworkConfig} using defaults
    * where values aren't required.
    */
   public static class ConfigBuilder {
-    private SqlRexConvertletTable convertletTable =
-        StandardConvertletTable.INSTANCE;
-    private SqlOperatorTable operatorTable = SqlStdOperatorTable.instance();
-    private ImmutableList<Program> programs = ImmutableList.of();
+    private SqlRexConvertletTable convertletTable;
+    private SqlOperatorTable operatorTable;
+    private ImmutableList<Program> programs;
     private Context context;
     private ImmutableList<RelTraitDef> traitDefs;
-    private SqlParser.Config parserConfig =
-        SqlParser.Config.DEFAULT;
-    private SqlToRelConverter.Config sqlToRelConverterConfig =
-        SqlToRelConverter.Config.DEFAULT;
+    private SqlParser.Config parserConfig;
+    private SqlToRelConverter.Config sqlToRelConverterConfig;
     private SchemaPlus defaultSchema;
     private RexExecutor executor;
     private RelOptCostFactory costFactory;
-    private RelDataTypeSystem typeSystem = RelDataTypeSystem.DEFAULT;
-
-    private ConfigBuilder() {}
+    private RelDataTypeSystem typeSystem;
+    private boolean evolveLattice;
+    private SqlStatisticProvider statisticProvider;
+
+    /** Creates a ConfigBuilder, initializing to defaults. */
+    private ConfigBuilder() {
+      convertletTable = StandardConvertletTable.INSTANCE;
+      operatorTable = SqlStdOperatorTable.instance();
+      programs = ImmutableList.of();
+      parserConfig = SqlParser.Config.DEFAULT;
+      sqlToRelConverterConfig = SqlToRelConverter.Config.DEFAULT;
+      typeSystem = RelDataTypeSystem.DEFAULT;
+      evolveLattice = false;
+      statisticProvider = MapSqlStatisticProvider.INSTANCE;
+    }
+
+    /** Creates a ConfigBuilder, initializing from an existing config. */
+    private ConfigBuilder(FrameworkConfig config) {
+      convertletTable = config.getConvertletTable();
+      operatorTable = config.getOperatorTable();
+      programs = config.getPrograms();
+      context = config.getContext();
+      traitDefs = config.getTraitDefs();
+      parserConfig = config.getParserConfig();
+      sqlToRelConverterConfig = config.getSqlToRelConverterConfig();
+      defaultSchema = config.getDefaultSchema();
+      executor = config.getExecutor();
+      costFactory = config.getCostFactory();
+      typeSystem = config.getTypeSystem();
+      evolveLattice = config.isEvolveLattice();
+      statisticProvider = config.getStatisticProvider();
+    }
 
     public FrameworkConfig build() {
       return new StdFrameworkConfig(context, convertletTable, operatorTable,
           programs, traitDefs, parserConfig, sqlToRelConverterConfig,
-          defaultSchema, costFactory, typeSystem, executor);
+          defaultSchema, costFactory, typeSystem, executor, evolveLattice,
+          statisticProvider);
     }
 
     public ConfigBuilder context(Context c) {
@@ -278,6 +318,17 @@ public class Frameworks {
       this.typeSystem = Objects.requireNonNull(typeSystem);
       return this;
     }
+
+    public ConfigBuilder evolveLattice(boolean evolveLattice) {
+      this.evolveLattice = evolveLattice;
+      return this;
+    }
+
+    public ConfigBuilder statisticProvider(
+        SqlStatisticProvider statisticProvider) {
+      this.statisticProvider = Objects.requireNonNull(statisticProvider);
+      return this;
+    }
   }
 
   /**
@@ -296,6 +347,8 @@ public class Frameworks {
     private final RelOptCostFactory costFactory;
     private final RelDataTypeSystem typeSystem;
     private final RexExecutor executor;
+    private final boolean evolveLattice;
+    private final SqlStatisticProvider statisticProvider;
 
     StdFrameworkConfig(Context context,
         SqlRexConvertletTable convertletTable,
@@ -307,7 +360,9 @@ public class Frameworks {
         SchemaPlus defaultSchema,
         RelOptCostFactory costFactory,
         RelDataTypeSystem typeSystem,
-        RexExecutor executor) {
+        RexExecutor executor,
+        boolean evolveLattice,
+        SqlStatisticProvider statisticProvider) {
       this.context = context;
       this.convertletTable = convertletTable;
       this.operatorTable = operatorTable;
@@ -319,6 +374,8 @@ public class Frameworks {
       this.costFactory = costFactory;
       this.typeSystem = typeSystem;
       this.executor = executor;
+      this.evolveLattice = evolveLattice;
+      this.statisticProvider = statisticProvider;
     }
 
     public SqlParser.Config getParserConfig() {
@@ -364,6 +421,14 @@ public class Frameworks {
     public RelDataTypeSystem getTypeSystem() {
       return typeSystem;
     }
+
+    public boolean isEvolveLattice() {
+      return evolveLattice;
+    }
+
+    public SqlStatisticProvider getStatisticProvider() {
+      return statisticProvider;
+    }
   }
 }
 

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/util/graph/AttributedDirectedGraph.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/calcite/util/graph/AttributedDirectedGraph.java 
b/core/src/main/java/org/apache/calcite/util/graph/AttributedDirectedGraph.java
new file mode 100644
index 0000000..6f325e1
--- /dev/null
+++ 
b/core/src/main/java/org/apache/calcite/util/graph/AttributedDirectedGraph.java
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.util.graph;
+
+import org.apache.calcite.util.Util;
+
+import java.util.List;
+
+/**
+ * Directed graph where edges have attributes and allows multiple edges between
+ * any two vertices provided that their attributes are different.
+ *
+ * @param <V> Vertex type
+ * @param <E> Edge type
+ */
+public class AttributedDirectedGraph<V, E extends DefaultEdge>
+    extends DefaultDirectedGraph<V, E> {
+  /** Creates an attributed graph. */
+  public AttributedDirectedGraph(AttributedEdgeFactory<V, E> edgeFactory) {
+    super(edgeFactory);
+  }
+
+  public static <V, E extends DefaultEdge> AttributedDirectedGraph<V, E> 
create(
+      AttributedEdgeFactory<V, E> edgeFactory) {
+    return new AttributedDirectedGraph<>(edgeFactory);
+  }
+
+  /** Returns the first edge between one vertex to another. */
+  @Override public E getEdge(V source, V target) {
+    final VertexInfo<V, E> info = vertexMap.get(source);
+    for (E outEdge : info.outEdges) {
+      if (outEdge.target.equals(target)) {
+        return outEdge;
+      }
+    }
+    return null;
+  }
+
+  /** @deprecated Use {@link #addEdge(Object, Object, Object...)}. */
+  @Deprecated
+  public E addEdge(V vertex, V targetVertex) {
+    return super.addEdge(vertex, targetVertex);
+  }
+
+  public E addEdge(V vertex, V targetVertex, Object... attributes) {
+    final VertexInfo<V, E> info = vertexMap.get(vertex);
+    if (info == null) {
+      throw new IllegalArgumentException("no vertex " + vertex);
+    }
+    final VertexInfo<V, E> info2 = vertexMap.get(targetVertex);
+    if (info2 == null) {
+      throw new IllegalArgumentException("no vertex " + targetVertex);
+    }
+    @SuppressWarnings("unchecked")
+    final AttributedEdgeFactory<V, E> f =
+        (AttributedEdgeFactory) this.edgeFactory;
+    final E edge = f.createEdge(vertex, targetVertex, attributes);
+    if (edges.add(edge)) {
+      info.outEdges.add(edge);
+      return edge;
+    } else {
+      return null;
+    }
+  }
+
+  /** Returns all edges between one vertex to another. */
+  public Iterable<E> getEdges(V source, final V target) {
+    final VertexInfo<V, E> info = vertexMap.get(source);
+    return Util.filter(info.outEdges, outEdge -> 
outEdge.target.equals(target));
+  }
+
+  /** Removes all edges from a given vertex to another.
+   * Returns whether any were removed. */
+  public boolean removeEdge(V source, V target) {
+    final VertexInfo<V, E> info = vertexMap.get(source);
+    List<E> outEdges = info.outEdges;
+    int removeCount = 0;
+    for (int i = 0, size = outEdges.size(); i < size; i++) {
+      E edge = outEdges.get(i);
+      if (edge.target.equals(target)) {
+        outEdges.remove(i);
+        edges.remove(edge);
+        ++removeCount;
+      }
+    }
+    return removeCount > 0;
+  }
+
+  /** Factory for edges that have attributes.
+   *
+   * @param <V> Vertex type
+   * @param <E> Edge type
+   */
+  public interface AttributedEdgeFactory<V, E> extends EdgeFactory<V, E> {
+    E createEdge(V v0, V v1, Object... attributes);
+  }
+}
+
+// End AttributedDirectedGraph.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/util/graph/DefaultDirectedGraph.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/calcite/util/graph/DefaultDirectedGraph.java 
b/core/src/main/java/org/apache/calcite/util/graph/DefaultDirectedGraph.java
index 1da04d9..a93060c 100644
--- a/core/src/main/java/org/apache/calcite/util/graph/DefaultDirectedGraph.java
+++ b/core/src/main/java/org/apache/calcite/util/graph/DefaultDirectedGraph.java
@@ -16,6 +16,8 @@
  */
 package org.apache.calcite.util.graph;
 
+import com.google.common.collect.Ordering;
+
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -34,8 +36,7 @@ import java.util.Set;
 public class DefaultDirectedGraph<V, E extends DefaultEdge>
     implements DirectedGraph<V, E> {
   final Set<E> edges = new LinkedHashSet<>();
-  final Map<V, VertexInfo<V, E>> vertexMap =
-      new LinkedHashMap<>();
+  final Map<V, VertexInfo<V, E>> vertexMap = new LinkedHashMap<>();
   final EdgeFactory<V, E> edgeFactory;
 
   /** Creates a graph. */
@@ -52,15 +53,28 @@ public class DefaultDirectedGraph<V, E extends DefaultEdge>
     return new DefaultDirectedGraph<>(edgeFactory);
   }
 
+  public String toStringUnordered() {
+    return "graph("
+        + "vertices: " + vertexMap.keySet()
+        + ", edges: " + edges + ")";
+  }
+
   @Override public String toString() {
-    StringBuilder buf = new StringBuilder();
-    buf.append("graph(")
-        .append("vertices: ")
-        .append(vertexMap.keySet())
-        .append(", edges: ")
-        .append(edges)
-        .append(")");
-    return buf.toString();
+    @SuppressWarnings("unchecked")
+    final Ordering<V> vertexOrdering = (Ordering) Ordering.usingToString();
+    @SuppressWarnings("unchecked")
+    final Ordering<E> edgeOrdering = (Ordering) Ordering.usingToString();
+    return toString(vertexOrdering, edgeOrdering);
+  }
+
+  /** Returns the string representation of this graph, using the given
+   * orderings to ensure that the output order of vertices and edges is
+   * deterministic. */
+  private String toString(Ordering<V> vertexOrdering,
+      Ordering<E> edgeOrdering) {
+    return "graph("
+        + "vertices: " + vertexOrdering.sortedCopy(vertexMap.keySet())
+        + ", edges: " + edgeOrdering.sortedCopy(edges) + ")";
   }
 
   public boolean addVertex(V vertex) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/util/graph/DefaultEdge.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/util/graph/DefaultEdge.java 
b/core/src/main/java/org/apache/calcite/util/graph/DefaultEdge.java
index d288d02..3570b4d 100644
--- a/core/src/main/java/org/apache/calcite/util/graph/DefaultEdge.java
+++ b/core/src/main/java/org/apache/calcite/util/graph/DefaultEdge.java
@@ -16,6 +16,8 @@
  */
 package org.apache.calcite.util.graph;
 
+import java.util.Objects;
+
 /**
  * Default implementation of Edge.
  */
@@ -24,8 +26,8 @@ public class DefaultEdge {
   public final Object target;
 
   public DefaultEdge(Object source, Object target) {
-    this.source = source;
-    this.target = target;
+    this.source = Objects.requireNonNull(source);
+    this.target = Objects.requireNonNull(target);
   }
 
   @Override public int hashCode() {

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/main/java/org/apache/calcite/util/mapping/IntPair.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/calcite/util/mapping/IntPair.java 
b/core/src/main/java/org/apache/calcite/util/mapping/IntPair.java
index 87865b7..f4685f4 100644
--- a/core/src/main/java/org/apache/calcite/util/mapping/IntPair.java
+++ b/core/src/main/java/org/apache/calcite/util/mapping/IntPair.java
@@ -18,7 +18,12 @@ package org.apache.calcite.util.mapping;
 
 import org.apache.calcite.runtime.Utilities;
 
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Ordering;
+
 import java.util.AbstractList;
+import java.util.Comparator;
 import java.util.List;
 
 /**
@@ -27,6 +32,44 @@ import java.util.List;
  * @see Mapping#iterator()
  */
 public class IntPair {
+  /** Function that swaps source and target fields of an {@link IntPair}. */
+  public static final Function<IntPair, IntPair> SWAP =
+      new Function<IntPair, IntPair>() {
+        public IntPair apply(IntPair pair) {
+          return of(pair.target, pair.source);
+        }
+      };
+
+  /** Ordering that compares pairs lexicographically: first by their source,
+   * then by their target. */
+  public static final Ordering<IntPair> ORDERING =
+      Ordering.from(
+          new Comparator<IntPair>() {
+            public int compare(IntPair o1, IntPair o2) {
+              int c = Integer.compare(o1.source, o2.source);
+              if (c == 0) {
+                c = Integer.compare(o1.target, o2.target);
+              }
+              return c;
+            }
+          });
+
+  /** Function that returns the left (source) side of a pair. */
+  public static final Function<IntPair, Integer> LEFT =
+      new Function<IntPair, Integer>() {
+        public Integer apply(IntPair pair) {
+          return pair.source;
+        }
+      };
+
+  /** Function that returns the right (target) side of a pair. */
+  public static final Function<IntPair, Integer> RIGHT =
+      new Function<IntPair, Integer>() {
+        public Integer apply(IntPair pair) {
+          return pair.target;
+        }
+      };
+
   //~ Instance fields --------------------------------------------------------
 
   public final int source;
@@ -110,6 +153,16 @@ public class IntPair {
       }
     };
   }
+
+  /** Returns the left side of a list of pairs. */
+  public static List<Integer> left(final List<IntPair> pairs) {
+    return Lists.transform(pairs, LEFT);
+  }
+
+  /** Returns the right side of a list of pairs. */
+  public static List<Integer> right(final List<IntPair> pairs) {
+    return Lists.transform(pairs, RIGHT);
+  }
 }
 
 // End IntPair.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
----------------------------------------------------------------------
diff --git 
a/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java 
b/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
new file mode 100644
index 0000000..43f7562
--- /dev/null
+++ 
b/core/src/test/java/org/apache/calcite/materialize/LatticeSuggesterTest.java
@@ -0,0 +1,626 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.materialize;
+
+import org.apache.calcite.prepare.PlannerImpl;
+import org.apache.calcite.rel.RelRoot;
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.parser.SqlParseException;
+import org.apache.calcite.test.CalciteAssert;
+import org.apache.calcite.test.FoodMartQuerySet;
+import org.apache.calcite.tools.FrameworkConfig;
+import org.apache.calcite.tools.Frameworks;
+import org.apache.calcite.tools.Planner;
+import org.apache.calcite.tools.RelConversionException;
+import org.apache.calcite.tools.ValidationException;
+import org.apache.calcite.util.Util;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+
+import org.hamcrest.BaseMatcher;
+import org.hamcrest.Description;
+import org.hamcrest.Matcher;
+import org.hamcrest.TypeSafeMatcher;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import static org.hamcrest.CoreMatchers.allOf;
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+
+/**
+ * Unit tests for {@link LatticeSuggester}.
+ */
+public class LatticeSuggesterTest {
+
+  /** Some basic query patterns on the Scott schema with "EMP" and "DEPT"
+   * tables. */
+  @Test public void testEmpDept() throws Exception {
+    final Tester t = new Tester();
+    final String q0 = "select dept.dname, count(*), sum(sal)\n"
+        + "from emp\n"
+        + "join dept using (deptno)\n"
+        + "group by dept.dname";
+    assertThat(t.addQuery(q0),
+        isGraphs("EMP (DEPT:DEPTNO)", "[COUNT(), SUM(EMP.SAL)]"));
+
+    // Same as above, but using WHERE rather than JOIN
+    final String q1 = "select dept.dname, count(*), sum(sal)\n"
+        + "from emp, dept\n"
+        + "where emp.deptno = dept.deptno\n"
+        + "group by dept.dname";
+    assertThat(t.addQuery(q1),
+        isGraphs("EMP (DEPT:DEPTNO)", "[COUNT(), SUM(EMP.SAL)]"));
+
+    // With HAVING
+    final String q2 = "select dept.dname\n"
+        + "from emp, dept\n"
+        + "where emp.deptno = dept.deptno\n"
+        + "group by dept.dname\n"
+        + "having count(*) > 10";
+    assertThat(t.addQuery(q2),
+        isGraphs("EMP (DEPT:DEPTNO)", "[COUNT()]"));
+
+    // No joins, therefore graph has a single node and no edges
+    final String q3 = "select distinct dname\n"
+        + "from dept";
+    assertThat(t.addQuery(q3),
+        isGraphs("DEPT", "[]"));
+
+    // Graph is empty because there are no tables
+    final String q4 = "select distinct t.c\n"
+        + "from (values 1, 2) as t(c)"
+        + "join (values 2, 3) as u(c) using (c)\n";
+    assertThat(t.addQuery(q4),
+        isGraphs());
+
+    // Self-join
+    final String q5 = "select *\n"
+        + "from emp as e\n"
+        + "join emp as m on e.mgr = m.empno";
+    assertThat(t.addQuery(q5),
+        isGraphs("EMP (EMP:MGR)", "[]"));
+
+    // Self-join, twice
+    final String q6 = "select *\n"
+        + "from emp as e join emp as m on e.mgr = m.empno\n"
+        + "join emp as m2 on m.mgr = m2.empno";
+    assertThat(t.addQuery(q6),
+        isGraphs("EMP (EMP:MGR (EMP:MGR))", "[]"));
+
+    // No graphs, because cyclic: e -> m, m -> m2, m2 -> e
+    final String q7 = "select *\n"
+        + "from emp as e\n"
+        + "join emp as m on e.mgr = m.empno\n"
+        + "join emp as m2 on m.mgr = m2.empno\n"
+        + "where m2.mgr = e.empno";
+    assertThat(t.addQuery(q7),
+        isGraphs());
+
+    // The graph of all tables and hops
+    final String expected = "graph("
+        + "vertices: [[scott, DEPT],"
+        + " [scott, EMP]], "
+        + "edges: [Step([scott, EMP], [scott, DEPT], DEPTNO:DEPTNO),"
+        + " Step([scott, EMP], [scott, EMP], MGR:EMPNO)])";
+    assertThat(t.s.space.g.toString(), is(expected));
+  }
+
+  @Test public void testFoodmart() throws Exception {
+    final Tester t = new Tester().foodmart();
+    final String q = "select \"t\".\"the_year\" as \"c0\",\n"
+        + " \"t\".\"quarter\" as \"c1\",\n"
+        + " \"pc\".\"product_family\" as \"c2\",\n"
+        + " sum(\"s\".\"unit_sales\") as \"m0\"\n"
+        + "from \"time_by_day\" as \"t\",\n"
+        + " \"sales_fact_1997\" as \"s\",\n"
+        + " \"product_class\" as \"pc\",\n"
+        + " \"product\" as \"p\"\n"
+        + "where \"s\".\"time_id\" = \"t\".\"time_id\"\n"
+        + "and \"t\".\"the_year\" = 1997\n"
+        + "and \"s\".\"product_id\" = \"p\".\"product_id\"\n"
+        + "and \"p\".\"product_class_id\" = \"pc\".\"product_class_id\"\n"
+        + "group by \"t\".\"the_year\",\n"
+        + " \"t\".\"quarter\",\n"
+        + " \"pc\".\"product_family\"";
+    final String g = "sales_fact_1997"
+        + " (product:product_id (product_class:product_class_id)"
+        + " time_by_day:time_id)";
+    assertThat(t.addQuery(q),
+        isGraphs(g, "[SUM(sales_fact_1997.unit_sales)]"));
+
+    // The graph of all tables and hops
+    final String expected = "graph("
+        + "vertices: ["
+        + "[foodmart, product], "
+        + "[foodmart, product_class], "
+        + "[foodmart, sales_fact_1997], "
+        + "[foodmart, time_by_day]], "
+        + "edges: ["
+        + "Step([foodmart, product], [foodmart, product_class],"
+        + " product_class_id:product_class_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, product],"
+        + " product_id:product_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, time_by_day],"
+        + " time_id:time_id)])";
+    assertThat(t.s.space.g.toString(), is(expected));
+  }
+
+  @Test public void testAggregateExpression() throws Exception {
+    final Tester t = new Tester().foodmart();
+    final String q = "select \"t\".\"the_year\" as \"c0\",\n"
+        + " \"pc\".\"product_family\" as \"c1\",\n"
+        + " sum((case when \"s\".\"promotion_id\" = 0\n"
+        + "     then 0 else \"s\".\"store_sales\"\n"
+        + "     end)) as \"sum_m0\"\n"
+        + "from \"time_by_day\" as \"t\",\n"
+        + " \"sales_fact_1997\" as \"s\",\n"
+        + " \"product_class\" as \"pc\",\n"
+        + " \"product\" as \"p\"\n"
+        + "where \"s\".\"time_id\" = \"t\".\"time_id\"\n"
+        + " and \"t\".\"the_year\" = 1997\n"
+        + " and \"s\".\"product_id\" = \"p\".\"product_id\"\n"
+        + " and \"p\".\"product_class_id\" = \"pc\".\"product_class_id\"\n"
+        + "group by \"t\".\"the_year\",\n"
+        + " \"pc\".\"product_family\"\n";
+    final String g = "sales_fact_1997"
+        + " (product:product_id (product_class:product_class_id)"
+        + " time_by_day:time_id)";
+    final String expected = "[SUM(m0)]";
+    assertThat(t.addQuery(q),
+        allOf(isGraphs(g, expected),
+            hasMeasureNames(0, "sum_m0"),
+            hasDerivedColumnNames(0, "m0")));
+  }
+
+  private Matcher<List<Lattice>> hasMeasureNames(int ordinal,
+      String... names) {
+    final List<String> nameList = ImmutableList.copyOf(names);
+    return new TypeSafeMatcher<List<Lattice>>() {
+      public void describeTo(Description description) {
+        description.appendValue(names);
+      }
+
+      protected boolean matchesSafely(List<Lattice> lattices) {
+        final Lattice lattice = lattices.get(ordinal);
+        final List<String> actualNameList =
+            Util.transform(lattice.defaultMeasures, measure -> measure.name);
+        return actualNameList.equals(nameList);
+      }
+    };
+  }
+
+  private Matcher<List<Lattice>> hasDerivedColumnNames(int ordinal,
+      String... names) {
+    final List<String> nameList = ImmutableList.copyOf(names);
+    return new TypeSafeMatcher<List<Lattice>>() {
+      public void describeTo(Description description) {
+        description.appendValue(names);
+      }
+
+      protected boolean matchesSafely(List<Lattice> lattices) {
+        final Lattice lattice = lattices.get(ordinal);
+        final List<String> actualNameList =
+            lattice.columns.stream()
+                .filter(c -> c instanceof Lattice.DerivedColumn)
+                .map(c -> ((Lattice.DerivedColumn) c).alias)
+                .collect(Collectors.toList());
+        return actualNameList.equals(nameList);
+      }
+    };
+  }
+
+  @Test public void testSharedSnowflake() throws Exception {
+    final Tester t = new Tester().foodmart();
+    // foodmart query 5827 (also 5828, 5830, 5832) uses the "region" table
+    // twice: once via "store" and once via "customer";
+    // TODO: test what happens if FK from "store" to "region" is reversed
+    final String q = "select \"s\".\"store_country\" as \"c0\",\n"
+        + " \"r\".\"sales_region\" as \"c1\",\n"
+        + " \"r1\".\"sales_region\" as \"c2\",\n"
+        + " sum(\"f\".\"unit_sales\") as \"m0\"\n"
+        + "from \"store\" as \"s\",\n"
+        + " \"sales_fact_1997\" as \"f\",\n"
+        + " \"region\" as \"r\",\n"
+        + " \"region\" as \"r1\",\n"
+        + " \"customer\" as \"c\"\n"
+        + "where \"f\".\"store_id\" = \"s\".\"store_id\"\n"
+        + " and \"s\".\"store_country\" = 'USA'\n"
+        + " and \"s\".\"region_id\" = \"r\".\"region_id\"\n"
+        + " and \"r\".\"sales_region\" = 'South West'\n"
+        + " and \"f\".\"customer_id\" = \"c\".\"customer_id\"\n"
+        + " and \"c\".\"customer_region_id\" = \"r1\".\"region_id\"\n"
+        + " and \"r1\".\"sales_region\" = 'South West'\n"
+        + "group by \"s\".\"store_country\",\n"
+        + " \"r\".\"sales_region\",\n"
+        + " \"r1\".\"sales_region\"\n";
+    final String g = "sales_fact_1997"
+        + " (customer:customer_id (region:customer_region_id)"
+        + " store:store_id (region:region_id))";
+    assertThat(t.addQuery(q),
+        isGraphs(g, "[SUM(sales_fact_1997.unit_sales)]"));
+  }
+
+  @Test public void testExpressionInAggregate() throws Exception {
+    final Tester t = new Tester().withEvolve(true).foodmart();
+    final FoodMartQuerySet set = FoodMartQuerySet.instance();
+    for (int id : new int[]{392, 393}) {
+      t.addQuery(set.queries.get(id).sql);
+    }
+  }
+
+  private void checkFoodMartAll(boolean evolve) throws Exception {
+    final Tester t = new Tester().foodmart().withEvolve(evolve);
+    final FoodMartQuerySet set = FoodMartQuerySet.instance();
+    for (FoodMartQuerySet.FoodmartQuery query : set.queries.values()) {
+      if (query.sql.contains("\"agg_10_foo_fact\"")
+          || query.sql.contains("\"agg_line_class\"")
+          || query.sql.contains("\"agg_tenant\"")
+          || query.sql.contains("\"line\"")
+          || query.sql.contains("\"line_class\"")
+          || query.sql.contains("\"tenant\"")
+          || query.sql.contains("\"test_lp_xxx_fact\"")
+          || query.sql.contains("\"product_csv\"")
+          || query.sql.contains("\"product_cat\"")
+          || query.sql.contains("\"cat\"")
+          || query.sql.contains("\"fact\"")) {
+        continue;
+      }
+      switch (query.id) {
+      case 2455: // missing RTRIM function
+      case 2456: // missing RTRIM function
+      case 2457: // missing RTRIM function
+      case 5682: // case sensitivity
+      case 5700: // || applied to smallint
+        continue;
+      default:
+        t.addQuery(query.sql);
+      }
+    }
+
+    // The graph of all tables and hops
+    final String expected = "graph("
+        + "vertices: ["
+        + "[foodmart, agg_c_10_sales_fact_1997], "
+        + "[foodmart, agg_c_14_sales_fact_1997], "
+        + "[foodmart, agg_c_special_sales_fact_1997], "
+        + "[foodmart, agg_g_ms_pcat_sales_fact_1997], "
+        + "[foodmart, agg_l_03_sales_fact_1997], "
+        + "[foodmart, agg_l_04_sales_fact_1997], "
+        + "[foodmart, agg_l_05_sales_fact_1997], "
+        + "[foodmart, agg_lc_06_sales_fact_1997], "
+        + "[foodmart, agg_lc_100_sales_fact_1997], "
+        + "[foodmart, agg_ll_01_sales_fact_1997], "
+        + "[foodmart, agg_pl_01_sales_fact_1997], "
+        + "[foodmart, customer], "
+        + "[foodmart, department], "
+        + "[foodmart, employee], "
+        + "[foodmart, employee_closure], "
+        + "[foodmart, inventory_fact_1997], "
+        + "[foodmart, position], "
+        + "[foodmart, product], "
+        + "[foodmart, product_class], "
+        + "[foodmart, promotion], "
+        + "[foodmart, region], "
+        + "[foodmart, salary], "
+        + "[foodmart, sales_fact_1997], "
+        + "[foodmart, store], "
+        + "[foodmart, store_ragged], "
+        + "[foodmart, time_by_day], "
+        + "[foodmart, warehouse], "
+        + "[foodmart, warehouse_class]], "
+        + "edges: ["
+        + "Step([foodmart, agg_c_14_sales_fact_1997], [foodmart, store], 
store_id:store_id), "
+        + "Step([foodmart, agg_c_14_sales_fact_1997], [foodmart, time_by_day],"
+        + " month_of_year:month_of_year), "
+        + "Step([foodmart, customer], [foodmart, region], 
customer_region_id:region_id), "
+        + "Step([foodmart, customer], [foodmart, store], 
state_province:store_state), "
+        + "Step([foodmart, employee], [foodmart, employee], 
supervisor_id:employee_id), "
+        + "Step([foodmart, employee], [foodmart, position], 
position_id:position_id), "
+        + "Step([foodmart, employee], [foodmart, store], store_id:store_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, employee], 
product_id:employee_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, employee], 
time_id:employee_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, product], 
product_id:product_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, store], 
store_id:store_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, store], 
warehouse_id:store_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, time_by_day], 
time_id:time_id), "
+        + "Step([foodmart, inventory_fact_1997], [foodmart, warehouse],"
+        + " warehouse_id:warehouse_id), "
+        + "Step([foodmart, product], [foodmart, product_class],"
+        + " product_class_id:product_class_id), "
+        + "Step([foodmart, product], [foodmart, store], 
product_class_id:store_id), "
+        + "Step([foodmart, product_class], [foodmart, store], 
product_class_id:region_id), "
+        + "Step([foodmart, salary], [foodmart, department], 
department_id:department_id), "
+        + "Step([foodmart, salary], [foodmart, employee], 
employee_id:employee_id), "
+        + "Step([foodmart, salary], [foodmart, employee_closure], 
employee_id:employee_id), "
+        + "Step([foodmart, salary], [foodmart, time_by_day], 
pay_date:the_date), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, customer], 
customer_id:customer_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, customer], 
product_id:customer_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, customer], 
store_id:customer_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, product], 
product_id:product_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, promotion], 
promotion_id:promotion_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, store], 
product_id:store_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, store], 
store_id:store_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, store_ragged], 
store_id:store_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, time_by_day], 
product_id:time_id), "
+        + "Step([foodmart, sales_fact_1997], [foodmart, time_by_day], 
time_id:time_id), "
+        + "Step([foodmart, store], [foodmart, region], region_id:region_id), "
+        + "Step([foodmart, store], [foodmart, warehouse], store_id:stores_id), 
"
+        + "Step([foodmart, warehouse], [foodmart, warehouse_class],"
+        + " warehouse_class_id:warehouse_class_id)])";
+    assertThat(t.s.space.g.toString(), is(expected));
+    if (evolve) {
+      // compared to evolve=false, there are a few more nodes (133 vs 117),
+      // the same number of paths, and a lot fewer lattices (27 vs 376)
+      assertThat(t.s.space.nodeMap.size(), is(133));
+      assertThat(t.s.latticeMap.size(), is(27));
+      assertThat(t.s.space.pathMap.size(), is(42));
+    } else {
+      assertThat(t.s.space.nodeMap.size(), is(117));
+      assertThat(t.s.latticeMap.size(), is(386));
+      assertThat(t.s.space.pathMap.size(), is(42));
+    }
+  }
+
+  @Test public void testFoodMartAll() throws Exception {
+    checkFoodMartAll(false);
+  }
+
+  @Test public void testFoodMartAllEvolve() throws Exception {
+    checkFoodMartAll(true);
+  }
+
+  @Test public void testContains() throws Exception {
+    final Tester t = new Tester().foodmart();
+    final LatticeRootNode fNode = t.node("select *\n"
+        + "from \"sales_fact_1997\"");
+    final LatticeRootNode fcNode = t.node("select *\n"
+        + "from \"sales_fact_1997\"\n"
+        + "join \"customer\" using (\"customer_id\")");
+    final LatticeRootNode fcpNode = t.node("select *\n"
+        + "from \"sales_fact_1997\"\n"
+        + "join \"customer\" using (\"customer_id\")\n"
+        + "join \"product\" using (\"product_id\")");
+    assertThat(fNode.contains(fNode), is(true));
+    assertThat(fNode.contains(fcNode), is(false));
+    assertThat(fNode.contains(fcpNode), is(false));
+    assertThat(fcNode.contains(fNode), is(true));
+    assertThat(fcNode.contains(fcNode), is(true));
+    assertThat(fcNode.contains(fcpNode), is(false));
+    assertThat(fcpNode.contains(fNode), is(true));
+    assertThat(fcpNode.contains(fcNode), is(true));
+    assertThat(fcpNode.contains(fcpNode), is(true));
+  }
+
+  @Test public void testEvolve() throws Exception {
+    final Tester t = new Tester().foodmart().withEvolve(true);
+
+    final String q0 = "select count(*)\n"
+        + "from \"sales_fact_1997\"";
+    final String l0 = "sales_fact_1997:[COUNT()]";
+    t.addQuery(q0);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l0));
+
+    final String q1 = "select sum(\"unit_sales\")\n"
+        + "from \"sales_fact_1997\"\n"
+        + "join \"customer\" using (\"customer_id\")\n"
+        + "group by \"customer\".\"city\"";
+    final String l1 = "sales_fact_1997 (customer:customer_id)"
+        + ":[COUNT(), SUM(sales_fact_1997.unit_sales)]";
+    t.addQuery(q1);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l1));
+
+    final String q2 = "select count(distinct \"the_day\")\n"
+        + "from \"sales_fact_1997\"\n"
+        + "join \"time_by_day\" using (\"time_id\")\n"
+        + "join \"product\" using (\"product_id\")";
+    final String l2 = "sales_fact_1997"
+        + " (customer:customer_id product:product_id time_by_day:time_id)"
+        + ":[COUNT(), SUM(sales_fact_1997.unit_sales),"
+        + " COUNT(DISTINCT time_by_day.the_day)]";
+    t.addQuery(q2);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l2));
+
+    final Lattice lattice = Iterables.getOnlyElement(t.s.latticeMap.values());
+    final List<List<String>> tableNames =
+        lattice.tables().stream().map(table ->
+            table.t.getQualifiedName())
+            .sorted(Comparator.comparing(Object::toString))
+            .collect(Util.toImmutableList());
+    assertThat(tableNames.toString(),
+        is("[[foodmart, customer],"
+            + " [foodmart, product],"
+            + " [foodmart, sales_fact_1997],"
+            + " [foodmart, time_by_day]]"));
+
+    final String q3 = "select min(\"product\".\"product_id\")\n"
+        + "from \"sales_fact_1997\"\n"
+        + "join \"product\" using (\"product_id\")\n"
+        + "join \"product_class\" as pc using (\"product_class_id\")\n"
+        + "group by pc.\"product_department\"";
+    final String l3 = "sales_fact_1997"
+        + " (customer:customer_id product:product_id"
+        + " (product_class:product_class_id) time_by_day:time_id)"
+        + ":[COUNT(), SUM(sales_fact_1997.unit_sales),"
+        + " MIN(product.product_id), COUNT(DISTINCT time_by_day.the_day)]";
+    t.addQuery(q3);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l3));
+  }
+
+  @Test public void testExpression() throws Exception {
+    final Tester t = new Tester().foodmart().withEvolve(true);
+
+    final String q0 = "select\n"
+        + "  \"fname\" || ' ' || \"lname\" as \"full_name\",\n"
+        + "  count(*) as c,\n"
+        + "  avg(\"total_children\" - \"num_children_at_home\")\n"
+        + "from \"customer\"\n"
+        + "group by \"fname\", \"lname\"";
+    final String l0 = "customer:[COUNT(), AVG($f2)]";
+    t.addQuery(q0);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l0));
+    final Lattice lattice = Iterables.getOnlyElement(t.s.latticeMap.values());
+    final List<Lattice.DerivedColumn> derivedColumns = lattice.columns.stream()
+        .filter(c -> c instanceof Lattice.DerivedColumn)
+        .map(c -> (Lattice.DerivedColumn) c)
+        .collect(Collectors.toList());
+    assertThat(derivedColumns.size(), is(2));
+    final List<String> tables = ImmutableList.of("customer");
+    assertThat(derivedColumns.get(0).tables, is(tables));
+    assertThat(derivedColumns.get(1).tables, is(tables));
+  }
+
+  @Test public void testExpressionInJoin() throws Exception {
+    final Tester t = new Tester().foodmart().withEvolve(true);
+
+    final String q0 = "select\n"
+        + "  \"fname\" || ' ' || \"lname\" as \"full_name\",\n"
+        + "  count(*) as c,\n"
+        + "  avg(\"total_children\" - \"num_children_at_home\")\n"
+        + "from \"customer\" join \"sales_fact_1997\" using 
(\"customer_id\")\n"
+        + "group by \"fname\", \"lname\"";
+    final String l0 = "sales_fact_1997 (customer:customer_id)"
+        + ":[COUNT(), AVG($f2)]";
+    t.addQuery(q0);
+    assertThat(t.s.latticeMap.size(), is(1));
+    assertThat(Iterables.getOnlyElement(t.s.latticeMap.keySet()),
+        is(l0));
+    final Lattice lattice = Iterables.getOnlyElement(t.s.latticeMap.values());
+    final List<Lattice.DerivedColumn> derivedColumns = lattice.columns.stream()
+        .filter(c -> c instanceof Lattice.DerivedColumn)
+        .map(c -> (Lattice.DerivedColumn) c)
+        .collect(Collectors.toList());
+    assertThat(derivedColumns.size(), is(2));
+    final List<String> tables = ImmutableList.of("customer");
+    assertThat(derivedColumns.get(0).tables, is(tables));
+    assertThat(derivedColumns.get(1).tables, is(tables));
+  }
+
+  /** Creates a matcher that matches query graphs to strings. */
+  private BaseMatcher<List<Lattice>> isGraphs(
+      String... strings) {
+    final List<String> expectedList = Arrays.asList(strings);
+    return new BaseMatcher<List<Lattice>>() {
+      public boolean matches(Object item) {
+        //noinspection unchecked
+        return item instanceof List
+            && ((List) item).size() * 2 == expectedList.size()
+            && allEqual((List) item, expectedList);
+      }
+
+      private boolean allEqual(List<Lattice> items,
+          List<String> expects) {
+        for (int i = 0; i < items.size(); i++) {
+          final Lattice lattice = items.get(i);
+          final String expectedNode = expects.get(2 * i);
+          if (!lattice.rootNode.digest.equals(expectedNode)) {
+            return false;
+          }
+          final String expectedMeasures = expects.get(2 * i + 1);
+          if (!lattice.defaultMeasures.toString().equals(expectedMeasures)) {
+            return false;
+          }
+        }
+        return true;
+      }
+
+      public void describeTo(Description description) {
+        description.appendValue(expectedList);
+      }
+    };
+  }
+
+  /** Test helper. */
+  private static class Tester {
+    final LatticeSuggester s;
+    private final FrameworkConfig config;
+
+    Tester() {
+      this(
+          Frameworks.newConfigBuilder()
+              .defaultSchema(schemaFrom(CalciteAssert.SchemaSpec.SCOTT))
+              .build());
+    }
+
+    private Tester(FrameworkConfig config) {
+      this.config = config;
+      s = new LatticeSuggester(config);
+    }
+
+    Tester withConfig(FrameworkConfig config) {
+      return new Tester(config);
+    }
+
+    Tester foodmart() {
+      return schema(CalciteAssert.SchemaSpec.JDBC_FOODMART);
+    }
+
+    private Tester schema(CalciteAssert.SchemaSpec schemaSpec) {
+      return withConfig(builder()
+          .defaultSchema(schemaFrom(schemaSpec))
+          .build());
+    }
+
+    private Frameworks.ConfigBuilder builder() {
+      return Frameworks.newConfigBuilder(config);
+    }
+
+    List<Lattice> addQuery(String q) throws SqlParseException,
+        ValidationException, RelConversionException {
+      final Planner planner = new PlannerImpl(config);
+      final SqlNode node = planner.parse(q);
+      final SqlNode node2 = planner.validate(node);
+      final RelRoot root = planner.rel(node2);
+      return s.addQuery(root.project());
+    }
+
+    /** Parses a query returns its graph. */
+    LatticeRootNode node(String q) throws SqlParseException,
+        ValidationException, RelConversionException {
+      final List<Lattice> list = addQuery(q);
+      assertThat(list.size(), is(1));
+      return list.get(0).rootNode;
+    }
+
+    private static SchemaPlus schemaFrom(CalciteAssert.SchemaSpec spec) {
+      final SchemaPlus rootSchema = Frameworks.createRootSchema(true);
+      return CalciteAssert.addSchema(rootSchema, spec);
+    }
+
+    Tester withEvolve(boolean evolve) {
+      return withConfig(builder().evolveLattice(evolve).build());
+    }
+  }
+}
+
+// End LatticeSuggesterTest.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/CalciteSuite.java 
b/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
index 4d3e67d..058d813 100644
--- a/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
+++ b/core/src/test/java/org/apache/calcite/test/CalciteSuite.java
@@ -19,6 +19,7 @@ package org.apache.calcite.test;
 import org.apache.calcite.TestKtTest;
 import org.apache.calcite.adapter.clone.ArrayTableTest;
 import org.apache.calcite.jdbc.CalciteRemoteDriverTest;
+import org.apache.calcite.materialize.LatticeSuggesterTest;
 import org.apache.calcite.plan.RelOptPlanReaderTest;
 import org.apache.calcite.plan.RelOptUtilTest;
 import org.apache.calcite.plan.RelTraitTest;
@@ -170,6 +171,7 @@ import org.junit.runners.Suite;
     CalciteSqlOperatorTest.class,
     RexProgramFuzzyTest.class,
     ProfilerTest.class,
+    LatticeSuggesterTest.class,
     LatticeTest.class,
     ReflectiveSchemaTest.class,
     SqlAdvisorJdbcTest.class,

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/test/java/org/apache/calcite/test/FoodMartQuerySet.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/FoodMartQuerySet.java 
b/core/src/test/java/org/apache/calcite/test/FoodMartQuerySet.java
new file mode 100644
index 0000000..0d54490
--- /dev/null
+++ b/core/src/test/java/org/apache/calcite/test/FoodMartQuerySet.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.test;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.ref.SoftReference;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Set of queries against the FoodMart database. */
+public class FoodMartQuerySet {
+  private static SoftReference<FoodMartQuerySet> ref;
+
+  public final Map<Integer, FoodmartQuery> queries = new LinkedHashMap<>();
+
+  private FoodMartQuerySet() throws IOException {
+    final ObjectMapper mapper = new ObjectMapper();
+    mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
+    mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
+
+    final InputStream inputStream =
+        new net.hydromatic.foodmart.queries.FoodmartQuerySet().getQueries();
+    FoodmartRoot root = mapper.readValue(inputStream, FoodmartRoot.class);
+    for (FoodmartQuery query : root.queries) {
+      queries.put(query.id, query);
+    }
+  }
+
+  /** Returns the singleton instance of the query set. It is backed by a
+   * soft reference, so it may be freed if memory is short and no one is
+   * using it. */
+  public static synchronized FoodMartQuerySet instance() throws IOException {
+    final SoftReference<FoodMartQuerySet> refLocal = ref;
+    if (refLocal != null) {
+      final FoodMartQuerySet set = refLocal.get();
+      if (set != null) {
+        return set;
+      }
+    }
+    final FoodMartQuerySet set = new FoodMartQuerySet();
+    ref = new SoftReference<>(set);
+    return set;
+  }
+
+  /** JSON root element. */
+  public static class FoodmartRoot {
+    public final List<FoodmartQuery> queries = new ArrayList<>();
+  }
+
+  /** JSON query element. */
+  public static class FoodmartQuery {
+    public int id;
+    public String sql;
+    public final List<FoodmartColumn> columns = new ArrayList<>();
+    public final List<List> rows = new ArrayList<>();
+  }
+
+  /** JSON column element. */
+  public static class FoodmartColumn {
+    public String name;
+    public String type;
+  }
+}
+
+// End FoodMartQuerySet.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/test/java/org/apache/calcite/test/FoodmartTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/FoodmartTest.java 
b/core/src/test/java/org/apache/calcite/test/FoodmartTest.java
index 1968817..0416c79 100644
--- a/core/src/test/java/org/apache/calcite/test/FoodmartTest.java
+++ b/core/src/test/java/org/apache/calcite/test/FoodmartTest.java
@@ -19,8 +19,6 @@ package org.apache.calcite.test;
 import org.apache.calcite.linq4j.tree.Primitive;
 import org.apache.calcite.util.IntegerIntervalSet;
 
-import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 
 import org.junit.Ignore;
@@ -29,12 +27,8 @@ import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
 import java.io.IOException;
-import java.io.InputStream;
-import java.lang.ref.SoftReference;
 import java.util.ArrayList;
-import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Map;
 
 /**
  * Test case that runs the FoodMart reference queries.
@@ -101,7 +95,7 @@ public class FoodmartTest {
 
   // 202 and others: strip away "CAST(the_year AS UNSIGNED) = 1997"
 
-  private final FoodmartQuery query;
+  private final FoodMartQuerySet.FoodmartQuery query;
 
   @Parameterized.Parameters(name = "{index}: foodmart({0})={1}")
   public static List<Object[]> getSqls() throws IOException {
@@ -123,13 +117,13 @@ public class FoodmartTest {
         idList = buf.toString();
       }
       for (Integer id : IntegerIntervalSet.of(idList)) {
-        final FoodmartQuery query1 = set.queries.get(id);
+        final FoodMartQuerySet.FoodmartQuery query1 = set.queries.get(id);
         if (query1 != null) {
           list.add(new Object[] {id /*, query1.sql */});
         }
       }
     } else {
-      for (FoodmartQuery query1 : set.queries.values()) {
+      for (FoodMartQuerySet.FoodmartQuery query1 : set.queries.values()) {
         if (!CalciteAssert.ENABLE_SLOW && query1.id != 2) {
           // If slow queries are not enabled, only run query #2.
           continue;
@@ -145,7 +139,7 @@ public class FoodmartTest {
 
   public FoodmartTest(int id) throws IOException {
     if (id < 0) {
-      this.query = new FoodmartQuery();
+      this.query = new FoodMartQuerySet.FoodmartQuery();
       query.id = id;
       query.sql = "select * from (values 1) as t(c)";
     } else {
@@ -185,61 +179,6 @@ public class FoodmartTest {
     }
   }
 
-  /** Set of queries against the FoodMart database. */
-  public static class FoodMartQuerySet {
-    private static SoftReference<FoodMartQuerySet> ref;
-
-    final Map<Integer, FoodmartQuery> queries =
-        new LinkedHashMap<Integer, FoodmartQuery>();
-
-    private FoodMartQuerySet() throws IOException {
-      final ObjectMapper mapper = new ObjectMapper();
-      mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
-      mapper.configure(JsonParser.Feature.ALLOW_SINGLE_QUOTES, true);
-
-      final InputStream inputStream =
-          new net.hydromatic.foodmart.queries.FoodmartQuerySet().getQueries();
-      FoodmartRoot root = mapper.readValue(inputStream, FoodmartRoot.class);
-      for (FoodmartQuery query : root.queries) {
-        queries.put(query.id, query);
-      }
-    }
-
-    /** Returns the singleton instance of the query set. It is backed by a
-     * soft reference, so it may be freed if memory is short and no one is
-     * using it. */
-    public static synchronized FoodMartQuerySet instance() throws IOException {
-      final SoftReference<FoodMartQuerySet> refLocal = ref;
-      if (refLocal != null) {
-        final FoodMartQuerySet set = refLocal.get();
-        if (set != null) {
-          return set;
-        }
-      }
-      final FoodMartQuerySet set = new FoodMartQuerySet();
-      ref = new SoftReference<FoodMartQuerySet>(set);
-      return set;
-    }
-  }
-
-  /** JSON root element. */
-  public static class FoodmartRoot {
-    public final List<FoodmartQuery> queries = new ArrayList<FoodmartQuery>();
-  }
-
-  /** JSON query element. */
-  public static class FoodmartQuery {
-    public int id;
-    public String sql;
-    public final List<FoodmartColumn> columns = new 
ArrayList<FoodmartColumn>();
-    public final List<List> rows = new ArrayList<List>();
-  }
-
-  /** JSON column element. */
-  public static class FoodmartColumn {
-    public String name;
-    public String type;
-  }
 }
 
 // End FoodmartTest.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/b47413a1/core/src/test/java/org/apache/calcite/test/JdbcTest.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/JdbcTest.java 
b/core/src/test/java/org/apache/calcite/test/JdbcTest.java
index 08414be..cd87e5f 100644
--- a/core/src/test/java/org/apache/calcite/test/JdbcTest.java
+++ b/core/src/test/java/org/apache/calcite/test/JdbcTest.java
@@ -2163,8 +2163,7 @@ public class JdbcTest {
 
   private CalciteAssert.AssertQuery withFoodMartQuery(int id)
       throws IOException {
-    final FoodmartTest.FoodMartQuerySet set =
-        FoodmartTest.FoodMartQuerySet.instance();
+    final FoodMartQuerySet set = FoodMartQuerySet.instance();
     return CalciteAssert.that()
         .with(CalciteAssert.Config.FOODMART_CLONE)
         .query(set.queries.get(id).sql);
@@ -2182,8 +2181,7 @@ public class JdbcTest {
    */
   @Ignore
   @Test public void testNoCalcBetweenJoins() throws IOException {
-    final FoodmartTest.FoodMartQuerySet set =
-        FoodmartTest.FoodMartQuerySet.instance();
+    final FoodMartQuerySet set = FoodMartQuerySet.instance();
     CalciteAssert.that()
         .with(CalciteAssert.Config.FOODMART_CLONE)
         .query(set.queries.get(16).sql)
@@ -2272,8 +2270,8 @@ public class JdbcTest {
   @Ignore // DO NOT CHECK IN
   @Test public void testFoodmartLattice() throws IOException {
     // 8: select ... from customer, sales, time ... group by ...
-    final FoodmartTest.FoodmartQuery query =
-        FoodmartTest.FoodMartQuerySet.instance().queries.get(8);
+    final FoodMartQuerySet set = FoodMartQuerySet.instance();
+    final FoodMartQuerySet.FoodmartQuery query = set.queries.get(8);
     CalciteAssert.that()
         .with(CalciteAssert.Config.JDBC_FOODMART_WITH_LATTICE)
         .withDefaultSchema("foodmart")

Reply via email to