[ 
https://issues.apache.org/jira/browse/DRILL-6242?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16464217#comment-16464217
 ] 

ASF GitHub Bot commented on DRILL-6242:
---------------------------------------

parthchandra commented on a change in pull request #1247: DRILL-6242 Use 
java.time.Local{Date|Time|DateTime} for Drill Date, Time, and Timestamp types
URL: https://github.com/apache/drill/pull/1247#discussion_r185961883
 
 

 ##########
 File path: 
exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestNestedDateTimeTimestamp.java
 ##########
 @@ -0,0 +1,256 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.physical.impl;
+
+import java.sql.Date;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.drill.exec.expr.fn.impl.DateUtility;
+import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * For DRILL-6242, output for Date, Time, Timestamp should use different 
classes
+ */
+public class TestNestedDateTimeTimestamp extends BaseTestQuery {
+    private static final String DATAFILE = "cp.`datetime.parquet`";
+    private static final Map<String,Object> expectedRecord = new 
TreeMap<String,Object>();
+
+    static {
+        /**
+         * Data in the parquet file represents this equivalent JSON, but with 
typed data, time, and timestamps:
+         * {
+         *    "date" : "1970-01-11",
+         *    "time" : "00:00:03.600",
+         *    "timestamp" : "2018-03-23T17:40:52.123Z",
+         *    "date_list" : [ "1970-01-11" ],
+         *    "time_list" : [ "00:00:03.600" ],
+         *    "timestamp_list" : [ "2018-03-23T17:40:52.123Z" ],
+         *    "time_map" : {
+         *      "date" : "1970-01-11",
+         *      "time" : "00:00:03.600",
+         *      "timestamp" : "2018-03-23T17:40:52.123Z"
+         *    }
+         *  }
+         *
+         * Note that when the above data is read in to Drill, Drill modifies 
the timestamp
+         * to local time zone, and preserving the <date> and <time> values.  
This effectively
+         * changes the timestamp, if the time zone is not UTC.
+         */
+
+        LocalDate date = DateUtility.parseLocalDate("1970-01-11");
+        LocalTime time = DateUtility.parseLocalTime("00:00:03.600");
+        LocalDateTime timestamp = DateUtility.parseLocalDateTime("2018-03-23 
17:40:52.123");
+        expectedRecord.put("`date`", date);
+        expectedRecord.put("`time`", time);
+        expectedRecord.put("`timestamp`", timestamp);
+        expectedRecord.put("`date_list`", Arrays.asList(date));
+        expectedRecord.put("`time_list`", Arrays.asList(time));
+        expectedRecord.put("`timestamp_list`", Arrays.asList(timestamp));
+        Map<String,Object> nestedMap = new TreeMap<String,Object>();
+        nestedMap.put("date", date);
+        nestedMap.put("time", time);
+        nestedMap.put("timestamp", timestamp);
+
+        expectedRecord.put("`time_map`", nestedMap);
+    }
+
+
+    /**
+     * Test reading of from the parquet file that contains nested time, date, 
and timestamp
+     */
+    @Test
+    public void testNested() throws Exception {
+      String query = String.format("select * from %s limit 1", DATAFILE);
+      testBuilder()
+              .sqlQuery(query)
+              .ordered()
+              .baselineRecords(Arrays.asList(expectedRecord))
+              .build()
+              .run();
+    }
+
+    /**
+     * Test the textual display to make sure it is consistent with actual JSON 
output
+     */
+    @Test
+    public void testNestedDateTimePrint() throws Exception {
+        List<QueryDataBatch> resultList = 
testSqlWithResults(String.format("select * from %s limit 1", DATAFILE));
+        String actual = getResultString(resultList, " | ");
+
+        final String expected =
+                "date | time | timestamp | date_list | time_list | 
timestamp_list | time_map\n" +
+                "1970-01-11 | 00:00:03.600 | 2018-03-23 17:40:52.123 | 
[\"1970-01-11\"] | [\"00:00:03.600\"] | [\"2018-03-23 17:40:52.123\"] | 
{\"date\":\"1970-01-11\",\"time\":\"00:00:03.600\",\"timestamp\":\"2018-03-23 
17:40:52.123\"}";
+
+        Assert.assertEquals(expected.trim(), actual.trim());
+    }
+
+    /**
+     * Test the json output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASJson() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime";
+        try {
+            test("alter session set store.format = 'json'");
+            test("alter session set store.json.extended_types = false");
+            test("use dfs.tmp");
+            test("create table " + testName + "_json as " + query);
+
+            final String readQuery = "select * from `" + testName + "_json` t1 
";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("baseline_nested_datetime.json")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_json");
+          test("alter session reset store.format ");
+          test("alter session reset store.json.extended_types ");
+        }
+    }
+
+    /**
+     * Test the extended json output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASExtendedJson() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime_extended";
+        try {
+            test("alter session set store.format = 'json'");
+            test("alter session set store.json.extended_types = true");
+            test("use dfs.tmp");
+            test("create table " + testName + "_json as " + query);
+
+            final String readQuery = "select * from `" + testName + "_json` t1 
";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("datetime.parquet")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_json");
+          test("alter session reset store.format ");
+          test("alter session reset store.json.extended_types ");
+        }
+    }
+
+    /**
+     * Test parquet output is consistent as before
+     */
+    @Test
+    public void testNestedDateTimeCTASParquet() throws Exception {
+        String query = String.format("select * from %s limit 1", DATAFILE);
+        String testName = "ctas_nested_datetime_extended";
+        try {
+            test("alter session set store.format = 'parquet'");
+            test("use dfs.tmp");
+            test("create table " + testName + "_parquet as " + query);
+
+            final String readQuery = "select * from `" + testName + "_parquet` 
t1 ";
+
+            testBuilder()
+                .sqlQuery(readQuery)
+                .ordered()
+                .jsonBaselineFile("datetime.parquet")
+                .build()
+                .run();
+        } finally {
+          test("drop table " + testName + "_parquet");
+          test("alter session reset store.format ");
+        }
+    }
+
+    /**
+     * Testing time zone change and revert
+     */
+    @Test
+    public void testTimeZoneChangeAndReverse() throws Exception {
+        long timeMillis[] = new long[]{864000000L, 3600L, 1521826852123L};
+
+        for (int i = 0 ; i < timeMillis.length ; i++) {
+            OffsetDateTime time1 = 
OffsetDateTime.ofInstant(Instant.ofEpochMilli(timeMillis[i]), ZoneOffset.UTC);
+            OffsetDateTime time2 = 
time1.toLocalDateTime().atZone(ZoneOffset.systemDefault()).toOffsetDateTime();
+            OffsetDateTime time3 = 
time2.toLocalDateTime().atOffset(ZoneOffset.UTC);
+
+            System.out.println("time1 = " + time1 + ", time2 = " + time2 + ", 
time3 = " + time3);
 
 Review comment:
   Can you eliminate the printing to stdout from this test (and any others you 
see). We're trying to reduce the verbose output seen when running unit tests.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> Output format for nested date, time, timestamp values in an object hierarchy
> ----------------------------------------------------------------------------
>
>                 Key: DRILL-6242
>                 URL: https://issues.apache.org/jira/browse/DRILL-6242
>             Project: Apache Drill
>          Issue Type: Bug
>          Components: Execution - Data Types
>    Affects Versions: 1.12.0
>            Reporter: Jiang Wu
>            Assignee: Jiang Wu
>            Priority: Major
>             Fix For: 1.14.0
>
>
> Some storages (mapr db, mongo db, etc.) have hierarchical objects that 
> contain nested fields of date, time, timestamp types.  When a query returns 
> these objects, the output format for the nested date, time, timestamp, are 
> showing the internal object (org.joda.time.DateTime), rather than the logical 
> data value.
> For example.  Suppose in MongoDB, we have a single object that looks like 
> this:
> {code:java}
> > db.test.findOne();
> {
>     "_id" : ObjectId("5aa8487d470dd39a635a12f5"),
>     "name" : "orange",
>     "context" : {
>         "date" : ISODate("2018-03-13T21:52:54.940Z"),
>         "user" : "jack"
>     }
> }
> {code}
> Then connect Drill to the above MongoDB storage, and run the following query 
> within Drill:
> {code:java}
> > select t.context.`date`, t.context from test t; 
> +--------+---------+ 
> | EXPR$0 | context | 
> +--------+---------+ 
> | 2018-03-13 | 
> {"date":{"dayOfYear":72,"year":2018,"dayOfMonth":13,"dayOfWeek":2,"era":1,"millisOfDay":78774940,"weekOfWeekyear":11,"weekyear":2018,"monthOfYear":3,"yearOfEra":2018,"yearOfCentury":18,"centuryOfEra":20,"millisOfSecond":940,"secondOfMinute":54,"secondOfDay":78774,"minuteOfHour":52,"minuteOfDay":1312,"hourOfDay":21,"zone":{"fixed":true,"id":"UTC"},"millis":1520977974940,"chronology":{"zone":{"fixed":true,"id":"UTC"}},"afterNow":false,"beforeNow":true,"equalNow":false},"user":"jack"}
>  |
> {code}
> We can see that from the above output, when the date field is retrieved as a 
> top level column, Drill outputs a logical date value.  But when the same 
> field is within an object hierarchy, Drill outputs the internal object used 
> to hold the date value.
> The expected output is the same display for whether the date field is shown 
> as a top level column or when it is within an object hierarchy:
> {code:java}
> > select t.context.`date`, t.context from test t; 
> +--------+---------+ 
> | EXPR$0 | context | 
> +--------+---------+ 
> | 2018-03-13 | {"date":"2018-03-13","user":"jack"} |
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to