Repository: hive
Updated Branches:
  refs/heads/master 4a7bc89f9 -> bd3889e9f


HIVE-16474: Upgrade Druid version to 0.10 (Nishant Bangarwa, reviewed by Jesus 
Camacho Rodriguez)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bd3889e9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bd3889e9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bd3889e9

Branch: refs/heads/master
Commit: bd3889e9f6e887231c796263c5e8b76eaca2fc3a
Parents: 4a7bc89
Author: Nishant Bangarwa <nishant.mon...@gmail.com>
Authored: Fri May 26 09:21:10 2017 +0100
Committer: Jesus Camacho Rodriguez <jcama...@apache.org>
Committed: Fri May 26 09:21:10 2017 +0100

----------------------------------------------------------------------
 .../hive/druid/DruidStorageHandlerUtils.java    | 34 ++++++++----
 .../hadoop/hive/druid/io/DruidOutputFormat.java |  7 ++-
 .../hadoop/hive/druid/io/DruidRecordWriter.java |  2 +-
 .../serde/HiveDruidSerializationModule.java     | 37 ++++++++++++++
 .../serde/PeriodGranularitySerializer.java      | 54 ++++++++++++++++++++
 .../hive/druid/DerbyConnectorTestUtility.java   |  4 +-
 .../hadoop/hive/druid/TestDruidSerDe.java       |  2 +-
 .../hive/druid/TestDruidStorageHandler.java     |  2 +-
 .../TestHiveDruidQueryBasedInputFormat.java     | 38 ++++++++------
 .../hive/ql/io/TestDruidRecordWriter.java       | 11 ++--
 pom.xml                                         |  2 +-
 11 files changed, 151 insertions(+), 42 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
index adf013b..0e33836 100644
--- 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandlerUtils.java
@@ -17,8 +17,16 @@
  */
 package org.apache.hadoop.hive.druid;
 
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.util.VersionUtil;
+import com.fasterxml.jackson.databind.InjectableValues;
+import com.fasterxml.jackson.databind.JsonSerializer;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.SerializerProvider;
 import com.fasterxml.jackson.databind.jsontype.NamedType;
+import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
+import com.fasterxml.jackson.databind.module.SimpleModule;
 import com.fasterxml.jackson.dataformat.smile.SmileFactory;
 import com.google.common.base.Throwables;
 import com.google.common.collect.ImmutableList;
@@ -39,6 +47,7 @@ import io.druid.metadata.MetadataStorageTablesConfig;
 import io.druid.metadata.SQLMetadataConnector;
 import io.druid.metadata.storage.mysql.MySQLConnector;
 import io.druid.query.BaseQuery;
+import io.druid.query.select.SelectQueryConfig;
 import io.druid.segment.IndexIO;
 import io.druid.segment.IndexMergerV9;
 import io.druid.segment.column.ColumnConfig;
@@ -50,7 +59,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.druid.serde.HiveDruidSerializationModule;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.io.retry.RetryPolicies;
 import org.apache.hadoop.io.retry.RetryProxy;
@@ -75,17 +84,12 @@ import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.io.Reader;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
 import java.net.InetAddress;
 import java.net.URL;
-import java.net.URLDecoder;
 import java.net.UnknownHostException;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Enumeration;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -93,10 +97,6 @@ import java.util.Set;
 import java.util.TimeZone;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-
-import static org.apache.hadoop.hive.ql.exec.Utilities.jarFinderGetJar;
 
 /**
  * Utils class for Druid storage handler.
@@ -117,6 +117,20 @@ public final class DruidStorageHandlerUtils {
    */
   public static final ObjectMapper SMILE_MAPPER = new DefaultObjectMapper(new 
SmileFactory());
 
+  static
+  {
+    // This is needed for serde of PagingSpec as it uses JacksonInject for 
injecting SelectQueryConfig
+    InjectableValues.Std injectableValues = new 
InjectableValues.Std().addValue(
+        SelectQueryConfig.class,
+        new SelectQueryConfig(false)
+    );
+    JSON_MAPPER.setInjectableValues(injectableValues);
+    SMILE_MAPPER.setInjectableValues(injectableValues);
+    HiveDruidSerializationModule hiveDruidSerializationModule = new 
HiveDruidSerializationModule();
+    JSON_MAPPER.registerModule(hiveDruidSerializationModule);
+    SMILE_MAPPER.registerModule(hiveDruidSerializationModule);
+  }
+
   private static final int NUM_RETRIES = 8;
 
   private static final int SECONDS_BETWEEN_RETRIES = 2;

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java
index fbdd4c9..31db86a 100644
--- 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidOutputFormat.java
@@ -21,7 +21,6 @@ import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
-import com.metamx.common.Granularity;
 import io.druid.data.input.impl.DimensionSchema;
 import io.druid.data.input.impl.DimensionsSpec;
 import io.druid.data.input.impl.InputRowParser;
@@ -29,7 +28,7 @@ import io.druid.data.input.impl.MapInputRowParser;
 import io.druid.data.input.impl.StringDimensionSchema;
 import io.druid.data.input.impl.TimeAndDimsParseSpec;
 import io.druid.data.input.impl.TimestampSpec;
-import io.druid.granularity.QueryGranularity;
+import io.druid.java.util.common.granularity.Granularity;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.DoubleSumAggregatorFactory;
 import io.druid.query.aggregation.LongSumAggregatorFactory;
@@ -106,8 +105,8 @@ public class DruidOutputFormat<K, V> implements 
HiveOutputFormat<K, DruidWritabl
             hdfsDataSegmentPusherConfig, jc, 
DruidStorageHandlerUtils.JSON_MAPPER);
 
     final GranularitySpec granularitySpec = new UniformGranularitySpec(
-            Granularity.valueOf(segmentGranularity),
-            QueryGranularity.fromString(
+            Granularity.fromString(segmentGranularity),
+            Granularity.fromString(
                     
tableProperties.getProperty(Constants.DRUID_QUERY_GRANULARITY) == null
                             ? "NONE"
                             : 
tableProperties.getProperty(Constants.DRUID_QUERY_GRANULARITY)),

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
index 8d22df6..e97f588 100644
--- 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/io/DruidRecordWriter.java
@@ -25,10 +25,10 @@ import com.google.common.base.Suppliers;
 import com.google.common.base.Throwables;
 import com.google.common.collect.FluentIterable;
 import com.google.common.collect.Lists;
-import com.metamx.common.Granularity;
 import io.druid.data.input.Committer;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.MapBasedInputRow;
+import io.druid.java.util.common.granularity.Granularity;
 import io.druid.segment.indexing.DataSchema;
 import io.druid.segment.indexing.RealtimeTuningConfig;
 import io.druid.segment.loading.DataSegmentPusher;

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/HiveDruidSerializationModule.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/HiveDruidSerializationModule.java
 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/HiveDruidSerializationModule.java
new file mode 100644
index 0000000..0d56fc5
--- /dev/null
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/HiveDruidSerializationModule.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.druid.serde;
+
+import io.druid.java.util.common.granularity.PeriodGranularity;
+
+import com.fasterxml.jackson.core.util.VersionUtil;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+/**
+ * This class is used to define/override any serde behavior for classes from 
druid.
+ * Currently it is used to override the default behavior when serializing 
PeriodGranularity to include user timezone.
+ */
+public class HiveDruidSerializationModule extends SimpleModule {
+  private static final String NAME = "HiveDruidSerializationModule";
+  private static final VersionUtil VERSION_UTIL = new VersionUtil() {};
+
+  public HiveDruidSerializationModule() {
+    super(NAME, VERSION_UTIL.version());
+    addSerializer(PeriodGranularity.class, new PeriodGranularitySerializer());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/PeriodGranularitySerializer.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/PeriodGranularitySerializer.java
 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/PeriodGranularitySerializer.java
new file mode 100644
index 0000000..3ea4727
--- /dev/null
+++ 
b/druid-handler/src/java/org/apache/hadoop/hive/druid/serde/PeriodGranularitySerializer.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.druid.serde;
+
+import io.druid.java.util.common.granularity.PeriodGranularity;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.jsontype.TypeSerializer;
+
+import org.joda.time.DateTimeZone;
+
+import java.io.IOException;
+
+public class PeriodGranularitySerializer extends 
JsonSerializer<PeriodGranularity> {
+
+  @Override
+  public void serialize(PeriodGranularity granularity, JsonGenerator 
jsonGenerator,
+          SerializerProvider serializerProvider) throws IOException, 
JsonProcessingException {
+    // Set timezone based on user timezone if origin is not already set
+    // as it is default Hive time semantics to consider user timezone.
+    PeriodGranularity granularityWithUserTimezone = new PeriodGranularity(
+            granularity.getPeriod(),
+            granularity.getOrigin(),
+            DateTimeZone.getDefault()
+    );
+    granularityWithUserTimezone.serialize(jsonGenerator, serializerProvider);
+  }
+
+  @Override
+  public void serializeWithType(PeriodGranularity value, JsonGenerator gen,
+          SerializerProvider serializers, TypeSerializer typeSer) throws 
IOException {
+    serialize(value, gen, serializers);
+  }
+}
+
+

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java
 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java
index f9304a5..627f078 100644
--- 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java
+++ 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/DerbyConnectorTestUtility.java
@@ -23,6 +23,8 @@ import com.google.common.base.Suppliers;
 import io.druid.metadata.MetadataStorageConnectorConfig;
 import io.druid.metadata.MetadataStorageTablesConfig;
 import io.druid.metadata.storage.derby.DerbyConnector;
+import io.druid.metadata.storage.derby.DerbyMetadataStorage;
+
 import org.junit.Assert;
 import org.junit.rules.ExternalResource;
 import org.skife.jdbi.v2.DBI;
@@ -46,7 +48,7 @@ public class DerbyConnectorTestUtility extends DerbyConnector 
{
           Supplier<MetadataStorageTablesConfig> dbTables,
           String jdbcUri
   ) {
-    super(config, dbTables, new DBI(jdbcUri + ";create=true"));
+    super(new DerbyMetadataStorage(config.get()), config, dbTables, new 
DBI(jdbcUri + ";create=true"));
     this.jdbcUri = jdbcUri;
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
index a67afdb..1bd5d84 100644
--- a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
+++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidSerDe.java
@@ -554,7 +554,7 @@ public class TestDruidSerDe {
     Query<?> query = null;
     DruidQueryRecordReader<?, ?> reader = null;
     List<?> resultsList = null;
-    ObjectMapper mapper = new DefaultObjectMapper();
+    ObjectMapper mapper = DruidStorageHandlerUtils.JSON_MAPPER;
     switch (queryType) {
       case Query.TIMESERIES:
         query = mapper.readValue(jsonQuery, TimeseriesQuery.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
index 1fe155a..dca558e 100644
--- 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
+++ 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestDruidStorageHandler.java
@@ -193,7 +193,7 @@ public class TestDruidStorageHandler {
     LocalFileSystem localFileSystem = FileSystem.getLocal(config);
 
     Path segmentOutputPath = JobHelper
-            .makeSegmentOutputPath(new Path(segmentRootPath), localFileSystem, 
dataSegment);
+            .makeFileNamePath(new Path(segmentRootPath), localFileSystem, 
dataSegment, JobHelper.INDEX_ZIP);
     Path indexPath = new Path(segmentOutputPath, "index.zip");
     DataSegment dataSegmentWithLoadspect = 
DataSegment.builder(dataSegment).loadSpec(
             ImmutableMap.<String, Object>of("path", indexPath)).build();

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
index bb4011b..2aeb279 100644
--- 
a/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
+++ 
b/druid-handler/src/test/org/apache/hadoop/hive/druid/TestHiveDruidQueryBasedInputFormat.java
@@ -147,8 +147,9 @@ public class TestHiveDruidQueryBasedInputFormat extends 
TestCase {
           + 
"\"dataSource\":{\"type\":\"table\",\"name\":\"sample_datasource\"},"
           + 
"\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2012-01-01T00:00:00.000-08:00/2012-01-03T00:00:00.000-08:00\"]},"
           + "\"descending\":true,"
+          + "\"virtualColumns\":[],"
           + "\"filter\":null,"
-          + 
"\"granularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1969-12-31T16:00:00.000-08:00\"},"
+          + 
"\"granularity\":{\"type\":\"period\",\"period\":\"P1D\",\"timeZone\":\"America/Los_Angeles\",\"origin\":null},"
           + "\"aggregations\":[],"
           + "\"postAggregations\":[],"
           + "\"context\":null}, [localhost:8082]}]";
@@ -178,14 +179,15 @@ public class TestHiveDruidQueryBasedInputFormat extends 
TestCase {
   private static final String TOPN_QUERY_SPLIT =
       "[HiveDruidSplit{{\"queryType\":\"topN\","
           + "\"dataSource\":{\"type\":\"table\",\"name\":\"sample_data\"},"
-          + 
"\"dimension\":{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"sample_dim\",\"outputName\":\"sample_dim\"},"
+          + "\"virtualColumns\":[],"
+          + 
"\"dimension\":{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"sample_dim\",\"outputName\":\"sample_dim\",\"outputType\":\"STRING\"},"
           + 
"\"metric\":{\"type\":\"LegacyTopNMetricSpec\",\"metric\":\"count\"},"
           + "\"threshold\":5,"
           + 
"\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2013-08-31T00:00:00.000-07:00/2013-09-03T00:00:00.000-07:00\"]},"
           + "\"filter\":null,"
           + "\"granularity\":{\"type\":\"all\"},"
-          + 
"\"aggregations\":[{\"type\":\"longSum\",\"name\":\"count\",\"fieldName\":\"count\"},"
-          + 
"{\"type\":\"doubleSum\",\"name\":\"some_metric\",\"fieldName\":\"some_metric\"}],"
+          + 
"\"aggregations\":[{\"type\":\"longSum\",\"name\":\"count\",\"fieldName\":\"count\",\"expression\":null},"
+          + 
"{\"type\":\"doubleSum\",\"name\":\"some_metric\",\"fieldName\":\"some_metric\",\"expression\":null}],"
           + "\"postAggregations\":[],"
           + "\"context\":null,"
           + "\"descending\":false}, [localhost:8082]}]";
@@ -209,12 +211,13 @@ public class TestHiveDruidQueryBasedInputFormat extends 
TestCase {
       "[HiveDruidSplit{{\"queryType\":\"groupBy\","
           + 
"\"dataSource\":{\"type\":\"table\",\"name\":\"sample_datasource\"},"
           + 
"\"intervals\":{\"type\":\"LegacySegmentSpec\",\"intervals\":[\"2012-01-01T00:00:00.000-08:00/2012-01-03T00:00:00.000-08:00\"]},"
+          + "\"virtualColumns\":[],"
           + "\"filter\":null,"
-          + 
"\"granularity\":{\"type\":\"duration\",\"duration\":86400000,\"origin\":\"1969-12-31T16:00:00.000-08:00\"},"
-          + 
"\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"country\",\"outputName\":\"country\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"device\",\"outputName\":\"device\"}],"
-          + 
"\"aggregations\":[{\"type\":\"longSum\",\"name\":\"total_usage\",\"fieldName\":\"user_count\"},"
-          + 
"{\"type\":\"doubleSum\",\"name\":\"data_transfer\",\"fieldName\":\"data_transfer\"}],"
+          + 
"\"granularity\":{\"type\":\"period\",\"period\":\"P1D\",\"timeZone\":\"America/Los_Angeles\",\"origin\":null},"
+          + 
"\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"country\",\"outputName\":\"country\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"device\",\"outputName\":\"device\",\"outputType\":\"STRING\"}],"
+          + 
"\"aggregations\":[{\"type\":\"longSum\",\"name\":\"total_usage\",\"fieldName\":\"user_count\",\"expression\":null},"
+          + 
"{\"type\":\"doubleSum\",\"name\":\"data_transfer\",\"fieldName\":\"data_transfer\",\"expression\":null}],"
           + "\"postAggregations\":[],"
           + "\"having\":null,"
           + 
"\"limitSpec\":{\"type\":\"default\",\"columns\":[{\"dimension\":\"country\",\"direction\":\"ascending\",\"dimensionOrder\":{\"type\":\"lexicographic\"}},"
@@ -238,15 +241,16 @@ public class TestHiveDruidQueryBasedInputFormat extends 
TestCase {
           + "\"descending\":false,"
           + "\"filter\":null,"
           + "\"granularity\":{\"type\":\"all\"},"
-          + 
"\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"robot\",\"outputName\":\"robot\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"namespace\",\"outputName\":\"namespace\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"anonymous\",\"outputName\":\"anonymous\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"unpatrolled\",\"outputName\":\"unpatrolled\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"page\",\"outputName\":\"page\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"language\",\"outputName\":\"language\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"newpage\",\"outputName\":\"newpage\"},"
-          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"user\",\"outputName\":\"user\"}],"
+          + 
"\"dimensions\":[{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"robot\",\"outputName\":\"robot\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"namespace\",\"outputName\":\"namespace\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"anonymous\",\"outputName\":\"anonymous\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"unpatrolled\",\"outputName\":\"unpatrolled\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"page\",\"outputName\":\"page\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"language\",\"outputName\":\"language\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"newpage\",\"outputName\":\"newpage\",\"outputType\":\"STRING\"},"
+          + 
"{\"type\":\"LegacyDimensionSpec\",\"dimension\":\"user\",\"outputName\":\"user\",\"outputType\":\"STRING\"}],"
           + 
"\"metrics\":[\"count\",\"added\",\"delta\",\"variation\",\"deleted\"],"
+          + "\"virtualColumns\":[],"
           + 
"\"pagingSpec\":{\"pagingIdentifiers\":{},\"threshold\":5,\"fromNext\":false},"
           + "\"context\":{\"druid.query.fetch\":true}}, [localhost:8082]}]";
 

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java
----------------------------------------------------------------------
diff --git 
a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java
 
b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java
index d9e01fe..d5b217a 100644
--- 
a/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java
+++ 
b/druid-handler/src/test/org/apache/hadoop/hive/ql/io/TestDruidRecordWriter.java
@@ -23,7 +23,6 @@ import com.google.common.base.Function;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
-import com.metamx.common.Granularity;
 import io.druid.data.input.Firehose;
 import io.druid.data.input.InputRow;
 import io.druid.data.input.impl.DimensionSchema;
@@ -33,7 +32,7 @@ import io.druid.data.input.impl.MapInputRowParser;
 import io.druid.data.input.impl.StringDimensionSchema;
 import io.druid.data.input.impl.TimeAndDimsParseSpec;
 import io.druid.data.input.impl.TimestampSpec;
-import io.druid.granularity.QueryGranularities;
+import io.druid.java.util.common.granularity.Granularities;
 import io.druid.query.aggregation.AggregatorFactory;
 import io.druid.query.aggregation.LongSumAggregatorFactory;
 import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory;
@@ -110,7 +109,7 @@ public class TestDruidRecordWriter {
           )
   );
 
-  // This test need this patch https://github.com/druid-io/druid/pull/3483
+  // This test fails due to conflict of guava classes with hive-exec jar.
   @Ignore
   @Test
   public void testWrite() throws IOException, SegmentLoadingException {
@@ -136,7 +135,7 @@ public class TestDruidRecordWriter {
                     new HyperUniquesAggregatorFactory("unique_hosts", 
"unique_hosts")
             },
             new UniformGranularitySpec(
-                    Granularity.DAY, QueryGranularities.NONE, 
ImmutableList.of(INTERVAL_FULL)
+                    Granularities.DAY, Granularities.NONE, 
ImmutableList.of(INTERVAL_FULL)
             ),
             objectMapper
     );
@@ -167,7 +166,7 @@ public class TestDruidRecordWriter {
               ) {
                 return new DruidWritable(ImmutableMap.<String, 
Object>builder().putAll(input)
                         .put(Constants.DRUID_TIMESTAMP_GRANULARITY_COL_NAME,
-                                Granularity.DAY.truncate(
+                                Granularities.DAY.bucketStart(
                                         new DateTime((long) input
                                                 
.get(DruidTable.DEFAULT_TIMESTAMP_COLUMN)))
                                         .getMillis()
@@ -194,7 +193,7 @@ public class TestDruidRecordWriter {
             ImmutableList.of("host"),
             ImmutableList.of("visited_sum", "unique_hosts"),
             null,
-            QueryGranularities.NONE
+            Granularities.NONE
     );
 
     List<InputRow> rows = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/hive/blob/bd3889e9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 30fa50b..e3ff84f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -137,7 +137,7 @@
     <derby.version>10.10.2.0</derby.version>
     <dropwizard.version>3.1.0</dropwizard.version>
     
<dropwizard-metrics-hadoop-metrics2-reporter.version>0.1.2</dropwizard-metrics-hadoop-metrics2-reporter.version>
-    <druid.version>0.9.2</druid.version>
+    <druid.version>0.10.0</druid.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
     <h2database.version>1.3.166</h2database.version>

Reply via email to