fjy closed pull request #6404: SQL: Update to Calcite 1.17.0.
URL: https://github.com/apache/incubator-druid/pull/6404
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/pom.xml b/pom.xml
index 3185ec50b65..f81c21cb7b6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -65,7 +65,7 @@
<apache.curator.version>4.0.0</apache.curator.version>
<apache.curator.test.version>2.12.0</apache.curator.test.version>
<avatica.version>1.10.0</avatica.version>
- <calcite.version>1.15.0</calcite.version>
+ <calcite.version>1.17.0</calcite.version>
<dropwizard.metrics.version>4.0.0</dropwizard.metrics.version>
<fastutil.version>8.1.0</fastutil.version>
<guava.version>16.0.1</guava.version>
diff --git
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
index 2f6b86801d8..867995a8c73 100644
---
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
+++
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/DruidTypeSystem.java
@@ -135,4 +135,10 @@ public boolean isSchemaCaseSensitive()
{
return RelDataTypeSystem.DEFAULT.isSchemaCaseSensitive();
}
+
+ @Override
+ public boolean shouldConvertRaggedUnionTypesToVarying()
+ {
+ return true;
+ }
}
diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidRel.java
b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidRel.java
index bbf332ae92a..41cbe971252 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidRel.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/rel/DruidRel.java
@@ -29,7 +29,6 @@
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.AbstractRelNode;
-import org.apache.druid.java.util.common.guava.Accumulator;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.sql.calcite.planner.PlannerContext;
@@ -133,31 +132,19 @@ public PlannerContext getPlannerContext()
@Override
public Node implement(InterpreterImplementor implementor)
{
- final Sink sink = implementor.interpreter.sink(this);
- return new Node()
- {
- @Override
- public void run()
- {
- runQuery().accumulate(
- sink,
- new Accumulator<Sink, Object[]>()
- {
- @Override
- public Sink accumulate(final Sink theSink, final Object[] in)
- {
- try {
- theSink.send(Row.of(in));
- }
- catch (InterruptedException e) {
- throw Throwables.propagate(e);
- }
- return theSink;
- }
- }
- );
- }
- };
+ final Sink sink = implementor.compiler.sink(this);
+ return () -> runQuery().accumulate(
+ sink,
+ (Sink theSink, Object[] in) -> {
+ try {
+ theSink.send(Row.of(in));
+ }
+ catch (InterruptedException e) {
+ throw Throwables.propagate(e);
+ }
+ return theSink;
+ }
+ );
}
@Override
diff --git
a/sql/src/main/java/org/apache/druid/sql/calcite/schema/InformationSchema.java
b/sql/src/main/java/org/apache/druid/sql/calcite/schema/InformationSchema.java
index a0f336fa55b..e8300dfbb44 100644
---
a/sql/src/main/java/org/apache/druid/sql/calcite/schema/InformationSchema.java
+++
b/sql/src/main/java/org/apache/druid/sql/calcite/schema/InformationSchema.java
@@ -413,7 +413,7 @@ public TableType getJdbcTableType()
isDateTime ? String.valueOf(type.getPrecision()) : null,
// DATETIME_PRECISION
isCharacter ? type.getCharset().name() : null, //
CHARACTER_SET_NAME
isCharacter ? type.getCollation().getCollationName() :
null, // COLLATION_NAME
- type.getSqlTypeName().getJdbcOrdinal() // JDBC_TYPE
(Druid extension)
+ Long.valueOf(type.getSqlTypeName().getJdbcOrdinal()) //
JDBC_TYPE (Druid extension)
};
}
}
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
index e0f48dcefb1..dd04e43790e 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
@@ -142,7 +142,8 @@
private static final Logger log = new Logger(CalciteQueryTest.class);
private static final PlannerConfig PLANNER_CONFIG_DEFAULT = new
PlannerConfig();
- private static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION =
new PlannerConfig() {
+ private static final PlannerConfig PLANNER_CONFIG_REQUIRE_TIME_CONDITION =
new PlannerConfig()
+ {
@Override
public boolean isRequireTimeCondition()
{
@@ -595,6 +596,20 @@ public void testExplainInformationSchemaColumns() throws
Exception
);
}
+ @Test
+ public void testMinOnInformationSchemaColumns() throws Exception
+ {
+ testQuery(
+ "SELECT MIN(JDBC_TYPE)\n"
+ + "FROM INFORMATION_SCHEMA.COLUMNS\n"
+ + "WHERE TABLE_SCHEMA = 'druid' AND TABLE_NAME = 'foo'",
+ ImmutableList.of(),
+ ImmutableList.of(
+ new Object[]{-5L}
+ )
+ );
+ }
+
@Test
public void testSelectStar() throws Exception
{
@@ -1795,11 +1810,11 @@ public void testGroupByCaseWhen() throws Exception
"d0:v",
"case_searched("
+
"(CAST(timestamp_extract(\"__time\",'DAY','UTC'), 'DOUBLE') == \"m1\"),"
- + "'match-m1 ',"
+ + "'match-m1',"
+ "(timestamp_extract(\"__time\",'DAY','UTC')
== \"cnt\"),"
+ "'match-cnt',"
+ "(timestamp_extract(\"__time\",'DAY','UTC')
== 0),"
- + "'zero ',"
+ + "'zero',"
+ DruidExpression.nullLiteral() + ")",
ValueType.STRING
)
@@ -1812,7 +1827,7 @@ public void testGroupByCaseWhen() throws Exception
ImmutableList.of(
new Object[]{NullHandling.defaultStringValue(), 2L},
new Object[]{"match-cnt", 1L},
- new Object[]{"match-m1 ", 3L}
+ new Object[]{"match-m1", 3L}
)
);
}
@@ -3150,13 +3165,21 @@ public void testCountStarWithDegenerateFilter() throws
Exception
.dataSource(CalciteTests.DATASOURCE1)
.intervals(QSS(Filtration.eternity()))
.granularity(Granularities.ALL)
- .filters(SELECTOR("dim2", "a", null))
+ .filters(
+ AND(
+ SELECTOR("dim2", "a", null),
+ OR(
+ BOUND("dim1", "a", null, true, false, null,
StringComparators.LEXICOGRAPHIC),
+ NOT(SELECTOR("dim1", null, null))
+ )
+ )
+ )
.aggregators(AGGS(new CountAggregatorFactory("a0")))
.context(TIMESERIES_CONTEXT_DEFAULT)
.build()
),
ImmutableList.of(
- new Object[]{2L}
+ new Object[]{NullHandling.sqlCompatible() ? 2L : 1L}
)
);
}
@@ -6856,7 +6879,7 @@ public void testUsingSubqueryAsPartOfOrFilter() throws
Exception
final String explanation =
"BindableSort(sort0=[$1], dir0=[ASC])\n"
+ " BindableAggregate(group=[{0, 1}], EXPR$2=[COUNT()])\n"
- + " BindableFilter(condition=[OR(=($0, 'xxx'), CAST(AND(IS NOT
NULL($4), <>($2, 0))):BOOLEAN)])\n"
+ + " BindableFilter(condition=[OR(=($0, 'xxx'), CAST(AND(IS NOT
NULL($4), <>($2, 0), IS NOT NULL($1))):BOOLEAN)])\n"
+ " BindableJoin(condition=[=($1, $3)], joinType=[left])\n"
+ " BindableJoin(condition=[true], joinType=[inner])\n"
+ "
DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"virtualColumns\":[],\"resultFormat\":\"compactedList\",\"batchSize\":20480,\"limit\":9223372036854775807,\"filter\":null,\"columns\":[\"dim1\",\"dim2\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\"},\"descending\":false,\"granularity\":{\"type\":\"all\"}}],
signature=[{dim1:STRING, dim2:STRING}])\n"
@@ -7508,7 +7531,10 @@ public void testRequireTimeConditionPositive() throws
Exception
new QueryDataSource(
GroupByQuery.builder()
.setDataSource(CalciteTests.DATASOURCE1)
-
.setInterval(QSS(Intervals.utc(DateTimes.of("2000-01-01").getMillis(),
JodaUtils.MAX_INSTANT)))
+ .setInterval(QSS(Intervals.utc(
+
DateTimes.of("2000-01-01").getMillis(),
+ JodaUtils.MAX_INSTANT
+ )))
.setGranularity(Granularities.ALL)
.setDimensions(DIMS(new
DefaultDimensionSpec("dim2", "d0")))
.setAggregatorSpecs(AGGS(new
LongSumAggregatorFactory("a0", "cnt")))
@@ -7561,10 +7587,10 @@ public void testRequireTimeConditionPositive() throws
Exception
.intervals(QSS(Intervals.utc(DateTimes.of("2000-01-01").getMillis(),
JodaUtils.MAX_INSTANT)))
.granularity(Granularities.ALL)
.filters(IN(
- "dim2",
- ImmutableList.of("1", "2", "a", "d"),
- new SubstringDimExtractionFn(0, 1)
- ))
+ "dim2",
+ ImmutableList.of("1", "2", "a", "d"),
+ new SubstringDimExtractionFn(0, 1)
+ ))
.aggregators(AGGS(new CountAggregatorFactory("a0")))
.context(TIMESERIES_CONTEXT_DEFAULT)
.build()
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
index 0faddca7b19..c68dbc89b70 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/util/QueryLogHook.java
@@ -20,18 +20,18 @@
package org.apache.druid.sql.calcite.util;
import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
+import org.apache.calcite.runtime.Hook;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.Query;
-import org.apache.calcite.runtime.Hook;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runners.model.Statement;
import java.util.List;
+import java.util.function.Consumer;
/**
* JUnit Rule that adds a Calcite hook to log and remember Druid queries.
@@ -78,22 +78,16 @@ public void evaluate() throws Throwable
{
clearRecordedQueries();
- final Function<Object, Object> function = new Function<Object,
Object>()
- {
- @Override
- public Object apply(final Object query)
- {
- try {
- recordedQueries.add((Query) query);
- log.info(
- "Issued query: %s",
-
objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query)
- );
- }
- catch (Exception e) {
- log.warn(e, "Failed to serialize query: %s", query);
- }
- return null;
+ final Consumer<Object> function = query -> {
+ try {
+ recordedQueries.add((Query) query);
+ log.info(
+ "Issued query: %s",
+
objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(query)
+ );
+ }
+ catch (Exception e) {
+ log.warn(e, "Failed to serialize query: %s", query);
}
};
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]