This is an automated email from the ASF dual-hosted git repository.

cwylie pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new f47d0d43114 validate projection granularity is finer than or equal to 
segment granularity (#18273)
f47d0d43114 is described below

commit f47d0d43114b82d00861526cac0723ba1e854575
Author: Clint Wylie <[email protected]>
AuthorDate: Fri Jul 18 02:28:17 2025 -0700

    validate projection granularity is finer than or equal to segment 
granularity (#18273)
    
    * validate projection granularity is finer than or equal to segment 
granularity
    * validate projection names to guard against null or empty names either, 
more test
    * validate msq projection granularity too
    * add defensive checks to fail if incremental index projection row add 
would violate minimum timestamp
    * fix bug with Granularities.decorateCursorBuildSpec not setting 
preferredOrdering to time ordering if a granularity is set
---
 .../destination/DataSourceMSQDestination.java      |  66 ++--
 .../org/apache/druid/msq/exec/MSQInsertTest.java   | 158 ++++++--
 .../data/input/impl/AggregateProjectionSpec.java   |   5 +-
 .../util/common/granularity/Granularities.java     |  15 +
 .../druid/segment/AggregateProjectionMetadata.java |   8 +-
 .../segment/incremental/IncrementalIndex.java      |   7 +-
 .../incremental/OnHeapAggregateProjection.java     |  21 +-
 .../incremental/OnheapIncrementalIndex.java        |   7 +-
 .../input/impl/AggregateProjectionSpecTest.java    |  50 ++-
 .../druid/query/groupby/GroupByQueryTest.java      |   4 +
 .../query/timeseries/TimeseriesQueryTest.java      |   9 +
 .../org/apache/druid/query/topn/TopNQueryTest.java |   4 +
 .../segment/AggregateProjectionMetadataTest.java   | 122 +++++-
 .../druid/segment/CursorFactoryProjectionTest.java |  69 ++++
 .../segment/QueryableIndexCursorHolderTest.java    |   2 +-
 .../incremental/OnheapIncrementalIndexTest.java    | 128 ++++++-
 .../apache/druid/segment/indexing/DataSchema.java  | 310 ++++++++-------
 .../druid/segment/indexing/DataSchemaTest.java     | 414 +++++++++++++--------
 18 files changed, 1022 insertions(+), 377 deletions(-)

diff --git 
a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/destination/DataSourceMSQDestination.java
 
b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/destination/DataSourceMSQDestination.java
index f62d5c36390..02132ac2312 100644
--- 
a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/destination/DataSourceMSQDestination.java
+++ 
b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/indexing/destination/DataSourceMSQDestination.java
@@ -30,6 +30,7 @@ import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.granularity.Granularity;
 import org.apache.druid.msq.querykit.ShuffleSpecFactories;
 import org.apache.druid.msq.querykit.ShuffleSpecFactory;
+import org.apache.druid.segment.indexing.DataSchema;
 import org.apache.druid.server.security.Resource;
 import org.apache.druid.server.security.ResourceType;
 import org.joda.time.Interval;
@@ -79,34 +80,8 @@ public class DataSourceMSQDestination implements 
MSQDestination
     this.projections = projections;
     this.terminalStageSpec = terminalStageSpec != null ? terminalStageSpec : 
SegmentGenerationStageSpec.instance();
 
-    if (replaceTimeChunks != null) {
-      // Verify that if replaceTimeChunks is provided, it is nonempty.
-      if (replaceTimeChunks.isEmpty()) {
-        throw new IAE("replaceTimeChunks must be null or nonempty; cannot be 
empty");
-      }
-
-      // Verify all provided time chunks are aligned with segmentGranularity.
-      for (final Interval interval : replaceTimeChunks) {
-        // ETERNITY gets a free pass.
-        if (!Intervals.ETERNITY.equals(interval)) {
-          final boolean startIsAligned =
-              
segmentGranularity.bucketStart(interval.getStart()).equals(interval.getStart());
-
-          final boolean endIsAligned =
-              
segmentGranularity.bucketStart(interval.getEnd()).equals(interval.getEnd())
-              || 
segmentGranularity.increment(segmentGranularity.bucketStart(interval.getEnd()))
-                                   .equals(interval.getEnd());
-
-          if (!startIsAligned || !endIsAligned) {
-            throw new IAE(
-                "Time chunk [%s] provided in replaceTimeChunks is not aligned 
with segmentGranularity [%s]",
-                interval,
-                segmentGranularity
-            );
-          }
-        }
-      }
-    }
+    validateReplaceTimeChunksGranularityAligned(segmentGranularity, 
replaceTimeChunks);
+    DataSchema.validateProjections(projections, segmentGranularity);
   }
 
   @JsonProperty
@@ -242,4 +217,39 @@ public class DataSourceMSQDestination implements 
MSQDestination
   {
     return Optional.of(new Resource(getDataSource(), ResourceType.DATASOURCE));
   }
+
+  private static void validateReplaceTimeChunksGranularityAligned(
+      Granularity segmentGranularity,
+      @Nullable List<Interval> replaceTimeChunks
+  )
+  {
+    if (replaceTimeChunks != null) {
+      // Verify that if replaceTimeChunks is provided, it is nonempty.
+      if (replaceTimeChunks.isEmpty()) {
+        throw new IAE("replaceTimeChunks must be null or nonempty; cannot be 
empty");
+      }
+
+      // Verify all provided time chunks are aligned with segmentGranularity.
+      for (final Interval interval : replaceTimeChunks) {
+        // ETERNITY gets a free pass.
+        if (!Intervals.ETERNITY.equals(interval)) {
+          final boolean startIsAligned =
+              
segmentGranularity.bucketStart(interval.getStart()).equals(interval.getStart());
+
+          final boolean endIsAligned =
+              
segmentGranularity.bucketStart(interval.getEnd()).equals(interval.getEnd())
+              || 
segmentGranularity.increment(segmentGranularity.bucketStart(interval.getEnd()))
+                                   .equals(interval.getEnd());
+
+          if (!startIsAligned || !endIsAligned) {
+            throw new IAE(
+                "Time chunk [%s] provided in replaceTimeChunks is not aligned 
with segmentGranularity [%s]",
+                interval,
+                segmentGranularity
+            );
+          }
+        }
+      }
+    }
+  }
 }
diff --git 
a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java
 
b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java
index d3af32295ca..719f096a4e9 100644
--- 
a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java
+++ 
b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/exec/MSQInsertTest.java
@@ -212,6 +212,24 @@ public class MSQInsertTest extends MSQTestBase
                                         new 
LongSumAggregatorFactory("sum_added", "added")
                                     }
                                 )
+                            ),
+                            new DatasourceProjectionMetadata(
+                                new AggregateProjectionSpec(
+                                    "channel_delta_daily",
+                                    VirtualColumns.create(
+                                        Granularities.toVirtualColumn(
+                                            Granularities.DAY,
+                                            
Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME
+                                        )
+                                    ),
+                                    ImmutableList.of(
+                                        new 
LongDimensionSchema(Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME),
+                                        new StringDimensionSchema("channel")
+                                    ),
+                                    new AggregatorFactory[]{
+                                        new 
LongSumAggregatorFactory("sum_delta", "delta")
+                                    }
+                                )
                             )
                         )
                     )
@@ -529,6 +547,7 @@ public class MSQInsertTest extends MSQTestBase
                                             .add("user", ColumnType.STRING)
                                             .add("added", ColumnType.LONG)
                                             .add("deleted", ColumnType.LONG)
+                                            .add("delta", ColumnType.LONG)
                                             .build();
     AggregateProjectionMetadata expectedProjection = new 
AggregateProjectionMetadata(
         new AggregateProjectionMetadata.Schema(
@@ -551,6 +570,27 @@ public class MSQInsertTest extends MSQTestBase
         ),
         16
     );
+    AggregateProjectionMetadata expectedProjection2 = new 
AggregateProjectionMetadata(
+        new AggregateProjectionMetadata.Schema(
+            "channel_delta_daily",
+            Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME,
+            VirtualColumns.create(
+                Granularities.toVirtualColumn(
+                    Granularities.DAY,
+                    Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME
+                )
+            ),
+            ImmutableList.of(Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME, 
"channel"),
+            new AggregatorFactory[] {
+                new LongSumAggregatorFactory("sum_delta", "delta")
+            },
+            ImmutableList.of(
+                
OrderBy.ascending(Granularities.GRANULARITY_VIRTUAL_COLUMN_NAME),
+                OrderBy.ascending("channel")
+            )
+        ),
+        11
+    );
 
     testIngestQuery().setSql(" insert into foo2 SELECT\n"
                              + "  floor(TIME_PARSE(\"timestamp\") to minute) 
AS __time,\n"
@@ -558,12 +598,13 @@ public class MSQInsertTest extends MSQTestBase
                              + "  page,\n"
                              + "  user,\n"
                              + "  added,\n"
-                             + "  deleted\n"
+                             + "  deleted,\n"
+                             + "  delta\n"
                              + "FROM TABLE(\n"
                              + "  EXTERN(\n"
                              + "    '{ \"files\": [" + toReadFileNameAsJson + 
"],\"type\":\"local\"}',\n"
                              + "    '{\"type\": \"json\"}',\n"
-                             + "    '[{\"name\": \"timestamp\", \"type\": 
\"string\"}, {\"name\": \"channel\", \"type\": \"string\"}, {\"name\": 
\"page\", \"type\": \"string\"}, {\"name\": \"user\", \"type\": \"string\"}, 
{\"name\": \"added\", \"type\": \"long\"}, {\"name\": \"deleted\", \"type\": 
\"long\"}]'\n"
+                             + "    '[{\"name\": \"timestamp\", \"type\": 
\"string\"}, {\"name\": \"channel\", \"type\": \"string\"}, {\"name\": 
\"page\", \"type\": \"string\"}, {\"name\": \"user\", \"type\": \"string\"}, 
{\"name\": \"added\", \"type\": \"long\"}, {\"name\": \"deleted\", \"type\": 
\"long\"}, {\"name\": \"delta\", \"type\": \"long\"}]'\n"
                              + "  )\n"
                              + ") PARTITIONED by day ")
                      .setExpectedDataSource("foo2")
@@ -575,29 +616,29 @@ public class MSQInsertTest extends MSQTestBase
                          "test",
                          0
                      )))
-                     
.setExpectedProjections(ImmutableList.of(expectedProjection))
+                     .setExpectedProjections(List.of(expectedProjection, 
expectedProjection2))
                      .setExpectedResultRows(
                          ImmutableList.of(
-                             new Object[]{1466985600000L, "#en.wikipedia", 
"Bailando 2015", "181.230.118.178", 2L, 0L},
-                             new Object[]{1466985600000L, "#en.wikipedia", 
"Richie Rich's Christmas Wish", "JasonAQuest", 0L, 2L},
-                             new Object[]{1466985600000L, "#pl.wikipedia", 
"Kategoria:Dyskusje nad usunięciem artykułu zakończone bez konsensusu − lipiec 
2016", "Beau.bot", 270L, 0L},
-                             new Object[]{1466985600000L, "#sv.wikipedia", 
"Salo Toraut", "Lsjbot", 31L, 0L},
-                             new Object[]{1466985660000L, "#ceb.wikipedia", 
"Neqerssuaq", "Lsjbot", 4150L, 0L},
-                             new Object[]{1466985660000L, "#en.wikipedia", 
"Panama Canal", "Mariordo", 496L, 0L},
-                             new Object[]{1466985660000L, "#es.wikipedia", 
"Sumo (banda)", "181.110.165.189", 0L, 173L},
-                             new Object[]{1466985660000L, "#sh.wikipedia", "El 
Terco, Bachíniva", "Kolega2357", 0L, 1L},
-                             new Object[]{1466985720000L, "#ru.wikipedia", 
"Википедия:Опросы/Унификация шаблонов «Не переведено»", "Wanderer777", 196L, 
0L},
-                             new Object[]{1466985720000L, "#sh.wikipedia", 
"Hermanos Díaz, Ascensión", "Kolega2357", 0L, 1L},
-                             new Object[]{1466989320000L, "#es.wikipedia", 
"Clasificación para la Eurocopa Sub-21 de 2017", "Guly600", 4L, 0L},
-                             new Object[]{1466989320000L, "#id.wikipedia", 
"Ibnu Sina", "Ftihikam", 106L, 0L},
-                             new Object[]{1466989320000L, "#sh.wikipedia", "El 
Sicomoro, Ascensión", "Kolega2357", 0L, 1L},
-                             new Object[]{1466989320000L, "#zh.wikipedia", 
"中共十八大以来的反腐败工作", "2001:DA8:207:E132:94DC:BA03:DFDF:8F9F", 18L, 0L},
-                             new Object[]{1466992920000L, "#de.wikipedia", 
"Benutzer Diskussion:Squasher/Archiv/2016", "TaxonBot", 2560L, 0L},
-                             new Object[]{1466992920000L, "#pt.wikipedia", 
"Dobromir Zhechev", "Ceresta", 1926L, 0L},
-                             new Object[]{1466992920000L, "#sh.wikipedia", 
"Trinidad Jiménez G., Benemérito de las Américas", "Kolega2357", 0L, 1L},
-                             new Object[]{1466992920000L, "#zh.wikipedia", 
"Wikipedia:頁面存廢討論/記錄/2016/06/27", "Tigerzeng", 1986L, 0L},
-                             new Object[]{1466992980000L, "#de.wikipedia", 
"Benutzer Diskussion:HerrSonderbar", "GiftBot", 364L, 0L},
-                             new Object[]{1466992980000L, "#en.wikipedia", 
"File:Paint.net 4.0.6 screenshot.png", "Calvin Hogg", 0L, 463L}
+                             new Object[]{1466985600000L, "#en.wikipedia", 
"Bailando 2015", "181.230.118.178", 2L, 0L, 2L},
+                             new Object[]{1466985600000L, "#en.wikipedia", 
"Richie Rich's Christmas Wish", "JasonAQuest", 0L, 2L, -2L},
+                             new Object[]{1466985600000L, "#pl.wikipedia", 
"Kategoria:Dyskusje nad usunięciem artykułu zakończone bez konsensusu − lipiec 
2016", "Beau.bot", 270L, 0L, 270L},
+                             new Object[]{1466985600000L, "#sv.wikipedia", 
"Salo Toraut", "Lsjbot", 31L, 0L, 31L},
+                             new Object[]{1466985660000L, "#ceb.wikipedia", 
"Neqerssuaq", "Lsjbot", 4150L, 0L, 4150L},
+                             new Object[]{1466985660000L, "#en.wikipedia", 
"Panama Canal", "Mariordo", 496L, 0L, 496L},
+                             new Object[]{1466985660000L, "#es.wikipedia", 
"Sumo (banda)", "181.110.165.189", 0L, 173L, -173L},
+                             new Object[]{1466985660000L, "#sh.wikipedia", "El 
Terco, Bachíniva", "Kolega2357", 0L, 1L, -1L},
+                             new Object[]{1466985720000L, "#ru.wikipedia", 
"Википедия:Опросы/Унификация шаблонов «Не переведено»", "Wanderer777", 196L, 
0L, 196L},
+                             new Object[]{1466985720000L, "#sh.wikipedia", 
"Hermanos Díaz, Ascensión", "Kolega2357", 0L, 1L, -1L},
+                             new Object[]{1466989320000L, "#es.wikipedia", 
"Clasificación para la Eurocopa Sub-21 de 2017", "Guly600", 4L, 0L, 4L},
+                             new Object[]{1466989320000L, "#id.wikipedia", 
"Ibnu Sina", "Ftihikam", 106L, 0L, 106L},
+                             new Object[]{1466989320000L, "#sh.wikipedia", "El 
Sicomoro, Ascensión", "Kolega2357", 0L, 1L, -1L},
+                             new Object[]{1466989320000L, "#zh.wikipedia", 
"中共十八大以来的反腐败工作", "2001:DA8:207:E132:94DC:BA03:DFDF:8F9F", 18L, 0L, 18L},
+                             new Object[]{1466992920000L, "#de.wikipedia", 
"Benutzer Diskussion:Squasher/Archiv/2016", "TaxonBot", 2560L, 0L, 2560L},
+                             new Object[]{1466992920000L, "#pt.wikipedia", 
"Dobromir Zhechev", "Ceresta", 1926L, 0L, 1926L},
+                             new Object[]{1466992920000L, "#sh.wikipedia", 
"Trinidad Jiménez G., Benemérito de las Américas", "Kolega2357", 0L, 1L, -1L},
+                             new Object[]{1466992920000L, "#zh.wikipedia", 
"Wikipedia:頁面存廢討論/記錄/2016/06/27", "Tigerzeng", 1986L, 0L, 1986L},
+                             new Object[]{1466992980000L, "#de.wikipedia", 
"Benutzer Diskussion:HerrSonderbar", "GiftBot", 364L, 0L, 364L},
+                             new Object[]{1466992980000L, "#en.wikipedia", 
"File:Paint.net 4.0.6 screenshot.png", "Calvin Hogg", 0L, 463L, -463L}
                          )
                      )
                      .setExpectedCountersForStageWorkerChannel(
@@ -614,6 +655,77 @@ public class MSQInsertTest extends MSQTestBase
 
   }
 
+  @MethodSource("data")
+  @ParameterizedTest(name = "{index}:with context {0}")
+  public void 
testInsertOnExternalDataSourceWithCatalogProjectionsInvalidGranularity(
+      String contextName,
+      Map<String, Object> context
+  ) throws IOException
+  {
+    final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
+    final String toReadFileNameAsJson = 
queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
+
+
+    testIngestQuery().setSql(" insert into foo2 SELECT\n"
+                             + "  floor(TIME_PARSE(\"timestamp\") to minute) 
AS __time,\n"
+                             + "  channel,\n"
+                             + "  page,\n"
+                             + "  user,\n"
+                             + "  added,\n"
+                             + "  deleted,\n"
+                             + "  delta\n"
+                             + "FROM TABLE(\n"
+                             + "  EXTERN(\n"
+                             + "    '{ \"files\": [" + toReadFileNameAsJson + 
"],\"type\":\"local\"}',\n"
+                             + "    '{\"type\": \"json\"}',\n"
+                             + "    '[{\"name\": \"timestamp\", \"type\": 
\"string\"}, {\"name\": \"channel\", \"type\": \"string\"}, {\"name\": 
\"page\", \"type\": \"string\"}, {\"name\": \"user\", \"type\": \"string\"}, 
{\"name\": \"added\", \"type\": \"long\"}, {\"name\": \"deleted\", \"type\": 
\"long\"}, {\"name\": \"delta\", \"type\": \"long\"}]'\n"
+                             + "  )\n"
+                             + ") PARTITIONED by hour")
+                     .setExpectedValidationErrorMatcher(
+                         DruidExceptionMatcher.invalidInput().expectMessageIs(
+                             "projection[channel_delta_daily] has 
granularity[{type=period, period=P1D, timeZone=UTC, origin=null}] which must be 
finer than or equal to segment granularity[{type=period, period=PT1H, 
timeZone=UTC, origin=null}]"
+                         )
+                     )
+                     .verifyPlanningErrors();
+
+  }
+
+  @MethodSource("data")
+  @ParameterizedTest(name = "{index}:with context {0}")
+  public void 
testInsertOnExternalDataSourceWithCatalogProjectionsMissingColumn(
+      String contextName,
+      Map<String, Object> context
+  ) throws IOException
+  {
+    final File toRead = getResourceAsTemporaryFile("/wikipedia-sampled.json");
+    final String toReadFileNameAsJson = 
queryFramework().queryJsonMapper().writeValueAsString(toRead.getAbsolutePath());
+
+
+    testIngestQuery().setSql(" insert into foo2 SELECT\n"
+                             + "  floor(TIME_PARSE(\"timestamp\") to minute) 
AS __time,\n"
+                             + "  channel,\n"
+                             + "  page,\n"
+                             + "  user,\n"
+                             + "  added\n"
+                             + "FROM TABLE(\n"
+                             + "  EXTERN(\n"
+                             + "    '{ \"files\": [" + toReadFileNameAsJson + 
"],\"type\":\"local\"}',\n"
+                             + "    '{\"type\": \"json\"}',\n"
+                             + "    '[{\"name\": \"timestamp\", \"type\": 
\"string\"}, {\"name\": \"channel\", \"type\": \"string\"}, {\"name\": 
\"page\", \"type\": \"string\"}, {\"name\": \"user\", \"type\": \"string\"}, 
{\"name\": \"added\", \"type\": \"long\"}, {\"name\": \"deleted\", \"type\": 
\"long\"}]'\n"
+                             + "  )\n"
+                             + ") PARTITIONED by day")
+                     .setExpectedExecutionErrorMatcher(
+                         CoreMatchers.allOf(
+                             CoreMatchers.instanceOf(ISE.class),
+                             
ThrowableMessageMatcher.hasMessage(CoreMatchers.containsString(
+                                 "projection[channel_delta_daily] contains 
aggregator[sum_delta] that is missing required field[delta] in base table")
+                             )
+                         )
+                     )
+                     .verifyExecutionError();
+
+  }
+
   @MethodSource("data")
   @ParameterizedTest(name = "{index}:with context {0}")
   public void testInsertOnFoo1WithGroupByLimitWithoutClusterBy(String 
contextName, Map<String, Object> context)
diff --git 
a/processing/src/main/java/org/apache/druid/data/input/impl/AggregateProjectionSpec.java
 
b/processing/src/main/java/org/apache/druid/data/input/impl/AggregateProjectionSpec.java
index ab4f16781f7..0fd4e614002 100644
--- 
a/processing/src/main/java/org/apache/druid/data/input/impl/AggregateProjectionSpec.java
+++ 
b/processing/src/main/java/org/apache/druid/data/input/impl/AggregateProjectionSpec.java
@@ -73,9 +73,12 @@ public class AggregateProjectionSpec
       @JsonProperty("aggregators") @Nullable AggregatorFactory[] aggregators
   )
   {
+    if (name == null || name.isEmpty()) {
+      throw InvalidInput.exception("projection name cannot be null or empty");
+    }
     this.name = name;
     if (CollectionUtils.isNullOrEmpty(groupingColumns) && (aggregators == null 
|| aggregators.length == 0)) {
-      throw InvalidInput.exception("groupingColumns and aggregators must not 
both be null or empty");
+      throw InvalidInput.exception("projection[%s] groupingColumns and 
aggregators must not both be null or empty", name);
     }
     this.groupingColumns = groupingColumns == null ? Collections.emptyList() : 
groupingColumns;
     this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : 
virtualColumns;
diff --git 
a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
 
b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
index 08de0a364ec..b3fa3e8ede7 100644
--- 
a/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
+++ 
b/processing/src/main/java/org/apache/druid/java/util/common/granularity/Granularities.java
@@ -25,10 +25,13 @@ import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Iterables;
 import org.apache.druid.math.expr.Expr;
 import org.apache.druid.math.expr.ExprMacroTable;
+import org.apache.druid.query.Order;
+import org.apache.druid.query.OrderBy;
 import org.apache.druid.query.Query;
 import org.apache.druid.query.expression.TimestampFloorExprMacro;
 import org.apache.druid.segment.AggregateProjectionMetadata;
 import org.apache.druid.segment.CursorBuildSpec;
+import org.apache.druid.segment.Cursors;
 import org.apache.druid.segment.VirtualColumn;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.column.ColumnHolder;
@@ -39,6 +42,7 @@ import org.joda.time.chrono.ISOChronology;
 import javax.annotation.Nullable;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.Set;
 
 /**
@@ -117,6 +121,16 @@ public class Granularities
             () -> 
Arrays.stream(buildSpec.getVirtualColumns().getVirtualColumns()).iterator()
         )
     );
+    final Order existingTimeOrder = 
Cursors.getTimeOrdering(buildSpec.getPreferredOrdering());
+    final List<OrderBy> orderBy;
+    if (Order.NONE.equals(existingTimeOrder)) {
+      orderBy = ImmutableList.copyOf(
+          Iterables.concat(Cursors.ascendingTimeOrder(), 
buildSpec.getPreferredOrdering())
+      );
+    } else {
+      orderBy = buildSpec.getPreferredOrdering();
+    }
+
     final ImmutableList.Builder<String> groupingColumnsBuilder = 
ImmutableList.builder();
     groupingColumnsBuilder.add(granularityVirtual.getOutputName());
     if (buildSpec.getGroupingColumns() != null) {
@@ -125,6 +139,7 @@ public class Granularities
     return CursorBuildSpec.builder(buildSpec)
                           .setVirtualColumns(virtualColumns)
                           .setGroupingColumns(groupingColumnsBuilder.build())
+                          .setPreferredOrdering(orderBy)
                           .build();
   }
 
diff --git 
a/processing/src/main/java/org/apache/druid/segment/AggregateProjectionMetadata.java
 
b/processing/src/main/java/org/apache/druid/segment/AggregateProjectionMetadata.java
index 2a7fc1aa807..c561757268a 100644
--- 
a/processing/src/main/java/org/apache/druid/segment/AggregateProjectionMetadata.java
+++ 
b/processing/src/main/java/org/apache/druid/segment/AggregateProjectionMetadata.java
@@ -182,9 +182,15 @@ public class AggregateProjectionMetadata
         @JsonProperty("ordering") List<OrderBy> ordering
     )
     {
+      if (name == null || name.isEmpty()) {
+        throw DruidException.defensive("projection schema name cannot be null 
or empty");
+      }
       this.name = name;
       if (CollectionUtils.isNullOrEmpty(groupingColumns) && (aggregators == 
null || aggregators.length == 0)) {
-        throw DruidException.defensive("groupingColumns and aggregators must 
not both be null or empty");
+        throw DruidException.defensive("projection schema[%s] groupingColumns 
and aggregators must not both be null or empty", name);
+      }
+      if (ordering == null) {
+        throw DruidException.defensive("projection schema[%s] ordering must 
not be null", name);
       }
       this.virtualColumns = virtualColumns == null ? VirtualColumns.EMPTY : 
virtualColumns;
       this.groupingColumns = groupingColumns == null ? Collections.emptyList() 
: groupingColumns;
diff --git 
a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java
 
b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java
index c060f3ac4fa..de7ec928d5e 100644
--- 
a/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java
+++ 
b/processing/src/main/java/org/apache/druid/segment/incremental/IncrementalIndex.java
@@ -37,6 +37,7 @@ import org.apache.druid.data.input.Row;
 import org.apache.druid.data.input.impl.DimensionSchema;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.SpatialDimensionSchema;
+import org.apache.druid.error.DruidException;
 import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.java.util.common.IAE;
 import org.apache.druid.java.util.common.ISE;
@@ -489,7 +490,11 @@ public abstract class IncrementalIndex implements 
IncrementalIndexRowSelector, C
   {
     row = formatRow(row);
     if (row.getTimestampFromEpoch() < minTimestamp) {
-      throw new IAE("Cannot add row[%s] because it is below the 
minTimestamp[%s]", row, DateTimes.utc(minTimestamp));
+      throw DruidException.defensive(
+          "Cannot add row[%s] because it is below the minTimestamp[%s]",
+          row,
+          DateTimes.utc(minTimestamp)
+      );
     }
 
     final List<String> rowDimensions = row.getDimensions();
diff --git 
a/processing/src/main/java/org/apache/druid/segment/incremental/OnHeapAggregateProjection.java
 
b/processing/src/main/java/org/apache/druid/segment/incremental/OnHeapAggregateProjection.java
index 90fbe6da896..ed104ef1ddf 100644
--- 
a/processing/src/main/java/org/apache/druid/segment/incremental/OnHeapAggregateProjection.java
+++ 
b/processing/src/main/java/org/apache/druid/segment/incremental/OnHeapAggregateProjection.java
@@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList;
 import org.apache.druid.data.input.InputRow;
 import org.apache.druid.data.input.impl.AggregateProjectionSpec;
 import org.apache.druid.data.input.impl.DimensionSchema;
+import org.apache.druid.error.DruidException;
 import org.apache.druid.error.InvalidInput;
 import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.query.OrderBy;
@@ -149,10 +150,24 @@ public class OnHeapAggregateProjection implements 
IncrementalIndexRowSelector
         projectionDims[i] = key.dims[parentDimensionIndex[i]];
       }
     }
+    final long timestamp;
+
+    if (projectionSchema.getTimeColumnName() != null) {
+      timestamp = 
projectionSchema.getGranularity().bucketStart(DateTimes.utc(key.getTimestamp())).getMillis();
+      if (timestamp < minTimestamp) {
+        throw DruidException.defensive(
+            "Cannot add row[%s] to projection[%s] because projection effective 
timestamp[%s] is below the minTimestamp[%s]",
+            inputRow,
+            projectionSchema.getName(),
+            DateTimes.utc(timestamp),
+            DateTimes.utc(minTimestamp)
+        );
+      }
+    } else {
+      timestamp = minTimestamp;
+    }
     final IncrementalIndexRow subKey = new IncrementalIndexRow(
-        projectionSchema.getTimeColumnName() != null
-        ? 
projectionSchema.getGranularity().bucketStart(DateTimes.utc(key.getTimestamp())).getMillis()
-        : minTimestamp,
+        timestamp,
         projectionDims,
         dimensions
     );
diff --git 
a/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java
 
b/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java
index 53cf321f8e0..f74812b4b22 100644
--- 
a/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java
+++ 
b/processing/src/main/java/org/apache/druid/segment/incremental/OnheapIncrementalIndex.java
@@ -144,12 +144,7 @@ public class OnheapIncrementalIndex extends 
IncrementalIndex
       AggregateProjectionMetadata.Schema schema = 
projectionSpec.toMetadataSchema();
       aggregateProjections.add(new AggregateProjectionMetadata(schema, 0));
       if (projections.containsKey(projectionSpec.getName())) {
-        throw DruidException.forPersona(DruidException.Persona.USER)
-                            .ofCategory(DruidException.Category.INVALID_INPUT)
-                            .build(
-                                "Found duplicate projection[%s], please remove 
and resubmit the ingestion.",
-                                projectionSpec.getName()
-                            );
+        throw DruidException.defensive("duplicate projection[%s]", 
projectionSpec.getName());
       }
 
       final OnHeapAggregateProjection projection = new 
OnHeapAggregateProjection(
diff --git 
a/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
 
b/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
index d7d1ef18221..2e15cdec490 100644
--- 
a/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
+++ 
b/processing/src/test/java/org/apache/druid/data/input/impl/AggregateProjectionSpecTest.java
@@ -30,18 +30,19 @@ import 
org.apache.druid.query.aggregation.LongSumAggregatorFactory;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.testing.InitializedNullHandlingTest;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 
-public class AggregateProjectionSpecTest extends InitializedNullHandlingTest
+class AggregateProjectionSpecTest extends InitializedNullHandlingTest
 {
   private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper();
 
   @Test
-  public void testSerde() throws JsonProcessingException
+  void testSerde() throws JsonProcessingException
   {
     AggregateProjectionSpec spec = new AggregateProjectionSpec(
         "some_projection",
@@ -60,13 +61,40 @@ public class AggregateProjectionSpecTest extends 
InitializedNullHandlingTest
             new LongSumAggregatorFactory("e", "e")
         }
     );
-    Assert.assertEquals(spec, 
JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), 
AggregateProjectionSpec.class));
+    Assertions.assertEquals(spec, 
JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), 
AggregateProjectionSpec.class));
   }
 
   @Test
-  public void testInvalidGrouping()
+  void testMissingName()
   {
-    Throwable t = Assert.assertThrows(
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionSpec(
+            null,
+            VirtualColumns.EMPTY,
+            List.of(new StringDimensionSchema("string")),
+            null
+        )
+    );
+
+    Assertions.assertEquals("projection name cannot be null or empty", 
t.getMessage());
+
+    t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionSpec(
+            "",
+            VirtualColumns.EMPTY,
+            List.of(new StringDimensionSchema("string")),
+            null
+        )
+    );
+    Assertions.assertEquals("projection name cannot be null or empty", 
t.getMessage());
+  }
+
+  @Test
+  void testInvalidGrouping()
+  {
+    Throwable t = Assertions.assertThrows(
         DruidException.class,
         () -> new AggregateProjectionSpec(
             "other_projection",
@@ -75,9 +103,9 @@ public class AggregateProjectionSpecTest extends 
InitializedNullHandlingTest
             null
         )
     );
-    Assert.assertEquals("groupingColumns and aggregators must not both be null 
or empty", t.getMessage());
+    Assertions.assertEquals("projection[other_projection] groupingColumns and 
aggregators must not both be null or empty", t.getMessage());
 
-    t = Assert.assertThrows(
+    t = Assertions.assertThrows(
         DruidException.class,
         () -> new AggregateProjectionSpec(
             "other_projection",
@@ -86,11 +114,11 @@ public class AggregateProjectionSpecTest extends 
InitializedNullHandlingTest
             null
         )
     );
-    Assert.assertEquals("groupingColumns and aggregators must not both be null 
or empty", t.getMessage());
+    Assertions.assertEquals("projection[other_projection] groupingColumns and 
aggregators must not both be null or empty", t.getMessage());
   }
 
   @Test
-  public void testEqualsAndHashcode()
+  void testEqualsAndHashcode()
   {
     EqualsVerifier.forClass(AggregateProjectionSpec.class)
                   .usingGetClass()
diff --git 
a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java 
b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java
index e2d1433cc94..da07a772a78 100644
--- 
a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java
+++ 
b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryTest.java
@@ -41,6 +41,7 @@ import org.apache.druid.query.planning.ExecutionVertex;
 import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
 import org.apache.druid.query.spec.QuerySegmentSpec;
 import org.apache.druid.segment.CursorBuildSpec;
+import org.apache.druid.segment.Cursors;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.column.ColumnType;
@@ -206,6 +207,7 @@ public class GroupByQueryTest extends 
InitializedNullHandlingTest
     Assert.assertEquals(ImmutableList.of("quality", "market", "v0"), 
buildSpec.getGroupingColumns());
     Assert.assertEquals(ImmutableList.of(QueryRunnerTestHelper.ROWS_COUNT, 
longSum), buildSpec.getAggregators());
     Assert.assertEquals(virtualColumns, buildSpec.getVirtualColumns());
+    Assert.assertEquals(List.of(), buildSpec.getPreferredOrdering());
   }
 
   @Test
@@ -258,6 +260,7 @@ public class GroupByQueryTest extends 
InitializedNullHandlingTest
         ),
         buildSpec.getVirtualColumns()
     );
+    Assert.assertEquals(Cursors.ascendingTimeOrder(), 
buildSpec.getPreferredOrdering());
   }
 
   @Test
@@ -311,6 +314,7 @@ public class GroupByQueryTest extends 
InitializedNullHandlingTest
         ),
         buildSpec.getVirtualColumns()
     );
+    Assert.assertEquals(Cursors.ascendingTimeOrder(), 
buildSpec.getPreferredOrdering());
   }
 
   @Test
diff --git 
a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java
 
b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java
index c1f8a144276..9834e221cd5 100644
--- 
a/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java
+++ 
b/processing/src/test/java/org/apache/druid/query/timeseries/TimeseriesQueryTest.java
@@ -29,6 +29,7 @@ import org.apache.druid.query.Query;
 import org.apache.druid.query.QueryRunnerTestHelper;
 import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
 import org.apache.druid.segment.CursorBuildSpec;
+import org.apache.druid.segment.Cursors;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.column.ColumnType;
@@ -149,6 +150,10 @@ public class TimeseriesQueryTest extends 
InitializedNullHandlingTest
         buildSpec.getAggregators()
     );
     Assert.assertEquals(virtualColumns, buildSpec.getVirtualColumns());
+    Assert.assertEquals(
+        descending ? Cursors.descendingTimeOrder() : 
Cursors.ascendingTimeOrder(),
+        buildSpec.getPreferredOrdering()
+    );
   }
 
   @Test
@@ -201,5 +206,9 @@ public class TimeseriesQueryTest extends 
InitializedNullHandlingTest
         ),
         buildSpec.getVirtualColumns()
     );
+    Assert.assertEquals(
+        descending ? Cursors.descendingTimeOrder() : 
Cursors.ascendingTimeOrder(),
+        buildSpec.getPreferredOrdering()
+    );
   }
 }
diff --git 
a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java 
b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java
index 1fd483eabe0..002660dcc60 100644
--- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java
+++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java
@@ -40,6 +40,7 @@ import org.apache.druid.query.extraction.MapLookupExtractor;
 import org.apache.druid.query.lookup.LookupExtractionFn;
 import org.apache.druid.query.ordering.StringComparators;
 import org.apache.druid.segment.CursorBuildSpec;
+import org.apache.druid.segment.Cursors;
 import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.column.ColumnType;
@@ -51,6 +52,7 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 
 import java.io.IOException;
+import java.util.List;
 
 public class TopNQueryTest extends InitializedNullHandlingTest
 {
@@ -295,6 +297,7 @@ public class TopNQueryTest extends 
InitializedNullHandlingTest
     Assert.assertEquals(ImmutableList.of("v"), buildSpec.getGroupingColumns());
     Assert.assertEquals(ImmutableList.of(QueryRunnerTestHelper.ROWS_COUNT, 
longSum), buildSpec.getAggregators());
     Assert.assertEquals(virtualColumns, buildSpec.getVirtualColumns());
+    Assert.assertEquals(List.of(), buildSpec.getPreferredOrdering());
   }
 
   @Test
@@ -330,5 +333,6 @@ public class TopNQueryTest extends 
InitializedNullHandlingTest
         ),
         buildSpec.getVirtualColumns()
     );
+    Assert.assertEquals(Cursors.ascendingTimeOrder(), 
buildSpec.getPreferredOrdering());
   }
 }
diff --git 
a/processing/src/test/java/org/apache/druid/segment/AggregateProjectionMetadataTest.java
 
b/processing/src/test/java/org/apache/druid/segment/AggregateProjectionMetadataTest.java
index 1b2018994bd..1c7d1bfe24c 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/AggregateProjectionMetadataTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/AggregateProjectionMetadataTest.java
@@ -29,20 +29,22 @@ import org.apache.druid.query.OrderBy;
 import org.apache.druid.query.aggregation.AggregatorFactory;
 import org.apache.druid.query.aggregation.CountAggregatorFactory;
 import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
+import org.apache.druid.segment.column.ColumnHolder;
 import org.apache.druid.testing.InitializedNullHandlingTest;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
 
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.SortedSet;
 
-public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
+class AggregateProjectionMetadataTest extends InitializedNullHandlingTest
 {
   private static final ObjectMapper JSON_MAPPER = TestHelper.makeJsonMapper();
 
   @Test
-  public void testSerde() throws JsonProcessingException
+  void testSerde() throws JsonProcessingException
   {
     AggregateProjectionMetadata spec = new AggregateProjectionMetadata(
         new AggregateProjectionMetadata.Schema(
@@ -66,7 +68,7 @@ public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
         ),
         12345
     );
-    Assert.assertEquals(
+    Assertions.assertEquals(
         spec,
         JSON_MAPPER.readValue(JSON_MAPPER.writeValueAsString(spec), 
AggregateProjectionMetadata.class)
     );
@@ -74,7 +76,7 @@ public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
 
 
   @Test
-  public void testComparator()
+  void testComparator()
   {
     SortedSet<AggregateProjectionMetadata> metadataBest = new 
ObjectAVLTreeSet<>(AggregateProjectionMetadata.COMPARATOR);
     AggregateProjectionMetadata good = new AggregateProjectionMetadata(
@@ -144,17 +146,59 @@ public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
     metadataBest.add(betterLessGroupingColumns);
     metadataBest.add(evenBetterMoreAggs);
     metadataBest.add(best);
-    Assert.assertEquals(best, metadataBest.first());
-    Assert.assertArrayEquals(
+    Assertions.assertEquals(best, metadataBest.first());
+    Assertions.assertArrayEquals(
         new AggregateProjectionMetadata[]{best, evenBetterMoreAggs, 
betterLessGroupingColumns, good},
         metadataBest.toArray()
     );
   }
 
   @Test
-  public void testInvalidGrouping()
+  void testInvalidName()
   {
-    Throwable t = Assert.assertThrows(
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionMetadata(
+            new AggregateProjectionMetadata.Schema(
+                null,
+                null,
+                null,
+                null,
+                new AggregatorFactory[]{new CountAggregatorFactory("count")},
+                List.of(OrderBy.ascending(ColumnHolder.TIME_COLUMN_NAME), 
OrderBy.ascending("count"))
+            ),
+            0
+        )
+    );
+    Assertions.assertEquals(
+        "projection schema name cannot be null or empty",
+        t.getMessage()
+    );
+
+    t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionMetadata(
+            new AggregateProjectionMetadata.Schema(
+                "",
+                null,
+                null,
+                null,
+                new AggregatorFactory[]{new CountAggregatorFactory("count")},
+                List.of(OrderBy.ascending(ColumnHolder.TIME_COLUMN_NAME), 
OrderBy.ascending("count"))
+            ),
+            0
+        )
+    );
+    Assertions.assertEquals(
+        "projection schema name cannot be null or empty",
+        t.getMessage()
+    );
+  }
+
+  @Test
+  void testInvalidGrouping()
+  {
+    Throwable t = Assertions.assertThrows(
         DruidException.class,
         () -> new AggregateProjectionMetadata(
             new AggregateProjectionMetadata.Schema(
@@ -168,9 +212,12 @@ public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
             0
         )
     );
-    Assert.assertEquals("groupingColumns and aggregators must not both be null 
or empty", t.getMessage());
+    Assertions.assertEquals(
+        "projection schema[other_projection] groupingColumns and aggregators 
must not both be null or empty",
+        t.getMessage()
+    );
 
-    t = Assert.assertThrows(
+    t = Assertions.assertThrows(
         DruidException.class,
         () -> new AggregateProjectionMetadata(
             new AggregateProjectionMetadata.Schema(
@@ -184,17 +231,62 @@ public class AggregateProjectionMetadataTest extends 
InitializedNullHandlingTest
             0
         )
     );
-    Assert.assertEquals("groupingColumns and aggregators must not both be null 
or empty", t.getMessage());
+    Assertions.assertEquals(
+        "projection schema[other_projection] groupingColumns and aggregators 
must not both be null or empty",
+        t.getMessage()
+    );
+  }
+
+  @Test
+  void testInvalidOrdering()
+  {
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionMetadata(
+            new AggregateProjectionMetadata.Schema(
+                "no order",
+                null,
+                null,
+                null,
+                new AggregatorFactory[]{new CountAggregatorFactory("count")},
+                null
+            ),
+            0
+        )
+    );
+    Assertions.assertEquals(
+        "projection schema[no order] ordering must not be null",
+        t.getMessage()
+    );
+
+    t = Assertions.assertThrows(
+        DruidException.class,
+        () -> new AggregateProjectionMetadata(
+            new AggregateProjectionMetadata.Schema(
+                "",
+                null,
+                null,
+                null,
+                new AggregatorFactory[]{new CountAggregatorFactory("count")},
+                List.of(OrderBy.ascending(ColumnHolder.TIME_COLUMN_NAME), 
OrderBy.ascending("count"))
+            ),
+            0
+        )
+    );
+    Assertions.assertEquals(
+        "projection schema name cannot be null or empty",
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testEqualsAndHashcode()
+  void testEqualsAndHashcode()
   {
     
EqualsVerifier.forClass(AggregateProjectionMetadata.class).usingGetClass().verify();
   }
 
   @Test
-  public void testEqualsAndHashcodeSchema()
+  void testEqualsAndHashcodeSchema()
   {
     EqualsVerifier.forClass(AggregateProjectionMetadata.Schema.class)
                   .withIgnoredFields("orderingWithTimeSubstitution", 
"timeColumnPosition", "granularity")
diff --git 
a/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
 
b/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
index 095fcafed27..7936b06329c 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/CursorFactoryProjectionTest.java
@@ -197,6 +197,21 @@ public class CursorFactoryProjectionTest extends 
InitializedNullHandlingTest
               new LongLastAggregatorFactory("_c_last", "c", null)
           }
       ),
+      new AggregateProjectionSpec(
+          "b_hourly_c_sum_non_time_ordered",
+          VirtualColumns.create(
+              Granularities.toVirtualColumn(Granularities.HOUR, "__gran")
+          ),
+          Arrays.asList(
+              new StringDimensionSchema("b"),
+              new LongDimensionSchema("__gran")
+          ),
+          new AggregatorFactory[]{
+              new CountAggregatorFactory("chocula"),
+              new LongSumAggregatorFactory("_c_sum", "c"),
+              new LongLastAggregatorFactory("_c_last", "c", null)
+          }
+      ),
       new AggregateProjectionSpec(
           "bf_daily_c_sum",
           VirtualColumns.create(
@@ -1102,6 +1117,60 @@ public class CursorFactoryProjectionTest extends 
InitializedNullHandlingTest
     }
   }
 
+  @Test
+  public void testQueryGranularityFitsProjectionGranularityNotTimeOrdered()
+  {
+    final GroupByQuery.Builder queryBuilder =
+        GroupByQuery.builder()
+                    .setDataSource("test")
+                    .setInterval(Intervals.ETERNITY)
+                    .addAggregator(new LongSumAggregatorFactory("c_sum", "c"));
+    if (segmentSortedByTime) {
+      queryBuilder.addDimension("b")
+                  .setGranularity(Granularities.HOUR);
+    } else {
+      queryBuilder.setGranularity(Granularities.ALL)
+                  .setDimensions(
+                      DefaultDimensionSpec.of("__gran", ColumnType.LONG),
+                      DefaultDimensionSpec.of("b")
+                  )
+                  
.setVirtualColumns(Granularities.toVirtualColumn(Granularities.HOUR, "__gran"));
+    }
+    final GroupByQuery query = queryBuilder.build();
+    final CursorBuildSpec buildSpec = 
GroupingEngine.makeCursorBuildSpec(query, null);
+    try (final CursorHolder cursorHolder = 
projectionsCursorFactory.makeCursorHolder(buildSpec)) {
+      final Cursor cursor = cursorHolder.asCursor();
+      int rowCount = 0;
+      while (!cursor.isDone()) {
+        rowCount++;
+        cursor.advance();
+      }
+      Assert.assertEquals(segmentSortedByTime ? 8 : 5, rowCount);
+    }
+
+    final Sequence<ResultRow> resultRows = groupingEngine.process(
+        query,
+        projectionsCursorFactory,
+        projectionsTimeBoundaryInspector,
+        nonBlockingPool,
+        null
+    );
+
+    final List<ResultRow> results = resultRows.toList();
+    Assert.assertEquals(5, results.size());
+    Set<Object[]> resultsInNoParticularOrder = makeArrayResultSet(
+        new Object[]{TIMESTAMP.getMillis(), "aa", 8L},
+        new Object[]{TIMESTAMP.getMillis(), "bb", 6L},
+        new Object[]{TIMESTAMP.getMillis(), "cc", 2L},
+        new Object[]{TIMESTAMP.plusHours(1).getMillis(), "aa", 1L},
+        new Object[]{TIMESTAMP.plusHours(1).getMillis(), "dd", 2L}
+    );
+    for (ResultRow row : results) {
+      Assert.assertTrue("missing row" + row.toString(), 
resultsInNoParticularOrder.contains(row.getArray()));
+    }
+  }
+
+
   @Test
   public void testQueryGranularityLargerProjectionGranularity()
   {
diff --git 
a/processing/src/test/java/org/apache/druid/segment/QueryableIndexCursorHolderTest.java
 
b/processing/src/test/java/org/apache/druid/segment/QueryableIndexCursorHolderTest.java
index dd1f0c6c27f..d0be594844d 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/QueryableIndexCursorHolderTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/QueryableIndexCursorHolderTest.java
@@ -145,7 +145,7 @@ public class QueryableIndexCursorHolderTest
   @Test
   public void testProjectionTimeBoundaryInspector()
   {
-    final DateTime startTime = DateTimes.nowUtc();
+    final DateTime startTime = 
Granularities.HOUR.bucketStart(DateTimes.nowUtc());
     final DimensionsSpec dims = DimensionsSpec.builder()
                                               .setDimensions(
                                                   Arrays.asList(
diff --git 
a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java
 
b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java
index 4d0cb647712..3ec43db9054 100644
--- 
a/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java
+++ 
b/processing/src/test/java/org/apache/druid/segment/incremental/OnheapIncrementalIndexTest.java
@@ -23,12 +23,15 @@ import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
 import nl.jqno.equalsverifier.EqualsVerifier;
+import org.apache.druid.data.input.MapBasedInputRow;
 import org.apache.druid.data.input.impl.AggregateProjectionSpec;
 import org.apache.druid.data.input.impl.DimensionsSpec;
 import org.apache.druid.data.input.impl.DoubleDimensionSchema;
 import org.apache.druid.data.input.impl.LongDimensionSchema;
 import org.apache.druid.data.input.impl.StringDimensionSchema;
 import org.apache.druid.error.DruidException;
+import org.apache.druid.java.util.common.DateTimes;
+import org.apache.druid.java.util.common.granularity.Granularities;
 import org.apache.druid.query.aggregation.AggregatorFactory;
 import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
 import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
@@ -38,9 +41,14 @@ import org.apache.druid.segment.TestHelper;
 import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.column.ColumnType;
 import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
+import org.joda.time.DateTime;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
 public class OnheapIncrementalIndexTest
 {
   private static final ObjectMapper MAPPER = TestHelper.makeJsonMapper();
@@ -117,9 +125,8 @@ public class OnheapIncrementalIndexTest
                                                         .build())
                           .buildIncrementalIndex()
     );
-    Assert.assertEquals(DruidException.Persona.USER, e.getTargetPersona());
-    Assert.assertEquals(DruidException.Category.INVALID_INPUT, 
e.getCategory());
-    Assert.assertEquals("Found duplicate projection[proj], please remove and 
resubmit the ingestion.", e.getMessage());
+    Assert.assertEquals(DruidException.Category.DEFENSIVE, e.getCategory());
+    Assert.assertEquals("duplicate projection[proj]", e.getMessage());
   }
 
   @Test
@@ -409,4 +416,119 @@ public class OnheapIncrementalIndexTest
         t.getMessage()
     );
   }
+
+
+  @Test
+  public void testTimestampOutOfRange()
+  {
+    // arrange
+    DimensionsSpec dimensionsSpec = DimensionsSpec.builder()
+                                                  
.setDimensions(ImmutableList.of(
+                                                      new 
StringDimensionSchema("string"),
+                                                      new 
LongDimensionSchema("long")
+                                                  ))
+                                                  .build();
+    AggregatorFactory aggregatorFactory = new 
DoubleSumAggregatorFactory("double", "double");
+    AggregateProjectionSpec projectionSpec = new AggregateProjectionSpec(
+        "proj",
+        VirtualColumns.EMPTY,
+        ImmutableList.of(new StringDimensionSchema("string")),
+        new AggregatorFactory[]{
+            new LongSumAggregatorFactory("sum_long", "long"),
+            new DoubleSumAggregatorFactory("double", "double")
+        }
+    );
+
+    final DateTime minTimestamp = DateTimes.nowUtc();
+    final DateTime outOfRangeTimestamp = DateTimes.nowUtc().minusDays(1);
+    final DateTime outOfRangeProjectionTimestamp = 
Granularities.YEAR.bucketStart(outOfRangeTimestamp);
+
+    final IncrementalIndex index = IndexBuilder.create()
+                                               
.schema(IncrementalIndexSchema.builder()
+                                                                             
.withDimensionsSpec(dimensionsSpec)
+                                                                             
.withRollup(true)
+                                                                             
.withMetrics(aggregatorFactory)
+                                                                             
.withProjections(List.of(projectionSpec))
+                                                                             
.withMinTimestamp(minTimestamp.getMillis())
+                                                                             
.build())
+                                               .buildIncrementalIndex();
+
+    IncrementalIndexAddResult addResult = index.add(
+        new MapBasedInputRow(
+            minTimestamp,
+            List.of("string", "long"),
+            Map.of(
+                "string", "hello",
+                "long", 10L
+            )
+        )
+    );
+    Assert.assertTrue(addResult.isRowAdded());
+
+    final Map<String, Object> rowMap = new LinkedHashMap<>();
+    rowMap.put("string", "hello");
+    rowMap.put("long", 10L);
+
+    Throwable t = Assert.assertThrows(
+        DruidException.class,
+        () -> index.add(
+            new MapBasedInputRow(
+                outOfRangeTimestamp.getMillis(),
+                List.of("string", "long"),
+                rowMap
+            )
+        )
+    );
+
+    Assert.assertEquals(
+        "Cannot add row[{timestamp="
+        + outOfRangeTimestamp
+        + ", event={string=hello, long=10}, dimensions=[string, long]}] 
because it is below the minTimestamp["
+        + minTimestamp
+        + "]",
+        t.getMessage()
+    );
+
+    AggregateProjectionSpec projectionSpecYear = new AggregateProjectionSpec(
+        "proj",
+        VirtualColumns.create(
+            Granularities.toVirtualColumn(Granularities.YEAR, "g")
+        ),
+        ImmutableList.of(new StringDimensionSchema("string"), new 
LongDimensionSchema("g")),
+        new AggregatorFactory[]{
+            new LongSumAggregatorFactory("sum_long", "long"),
+            new DoubleSumAggregatorFactory("double", "double")
+        }
+    );
+    IncrementalIndex index2 = IndexBuilder.create()
+                                          
.schema(IncrementalIndexSchema.builder()
+                                                                        
.withDimensionsSpec(dimensionsSpec)
+                                                                        
.withRollup(true)
+                                                                        
.withMetrics(aggregatorFactory)
+                                                                        
.withProjections(List.of(projectionSpecYear))
+                                                                        
.withMinTimestamp(minTimestamp.getMillis())
+                                                                        
.build())
+                                          .buildIncrementalIndex();
+
+    t = Assert.assertThrows(
+        DruidException.class,
+        () -> index2.add(
+            new MapBasedInputRow(
+                minTimestamp,
+                List.of("string", "long"),
+                rowMap
+            )
+        )
+    );
+
+    Assert.assertEquals(
+        "Cannot add row[{timestamp="
+        + minTimestamp
+        + ", event={string=hello, long=10}, dimensions=[string, long]}] to 
projection[proj] because projection effective timestamp["
+        + outOfRangeProjectionTimestamp
+        + "] is below the minTimestamp["
+        + minTimestamp + "]",
+        t.getMessage()
+    );
+  }
 }
diff --git 
a/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java 
b/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java
index de951618df2..169d5a30949 100644
--- a/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java
+++ b/server/src/main/java/org/apache/druid/segment/indexing/DataSchema.java
@@ -28,6 +28,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
 import com.google.common.collect.Multiset;
+import com.google.common.collect.Sets;
 import com.google.common.collect.TreeMultiset;
 import org.apache.druid.common.utils.IdUtils;
 import org.apache.druid.data.input.impl.AggregateProjectionSpec;
@@ -37,12 +38,15 @@ import org.apache.druid.data.input.impl.InputRowParser;
 import org.apache.druid.data.input.impl.ParseSpec;
 import org.apache.druid.data.input.impl.TimestampSpec;
 import org.apache.druid.error.DruidException;
+import org.apache.druid.error.InvalidInput;
 import org.apache.druid.indexer.granularity.GranularitySpec;
 import org.apache.druid.indexer.granularity.UniformGranularitySpec;
 import org.apache.druid.java.util.common.IAE;
 import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.granularity.Granularity;
 import org.apache.druid.java.util.common.logger.Logger;
 import org.apache.druid.query.aggregation.AggregatorFactory;
+import org.apache.druid.segment.AggregateProjectionMetadata;
 import org.apache.druid.segment.column.ColumnHolder;
 import org.apache.druid.segment.column.ValueType;
 import org.apache.druid.segment.transform.TransformSpec;
@@ -131,6 +135,10 @@ public class DataSchema
     // Fail-fast if there are output name collisions. Note: because of the 
pull-from-parser magic in getDimensionsSpec,
     // this validation is not necessarily going to be able to catch 
everything. It will run again in getDimensionsSpec.
     computeAndValidateOutputFieldNames(this.dimensionsSpec, this.aggregators);
+    validateProjections(
+        this.projections,
+        this.granularitySpec instanceof UniformGranularitySpec ? 
this.granularitySpec.getSegmentGranularity() : null
+    );
 
     if (this.granularitySpec.isRollup() && this.aggregators.length == 0) {
       log.warn(
@@ -139,8 +147,150 @@ public class DataSchema
           dataSource
       );
     }
+
+  }
+
+  @JsonProperty
+  public String getDataSource()
+  {
+    return dataSource;
+  }
+
+  @Nullable
+  @JsonProperty("timestampSpec")
+  private TimestampSpec getGivenTimestampSpec()
+  {
+    return timestampSpec;
+  }
+
+  public TimestampSpec getTimestampSpec()
+  {
+    if (timestampSpec == null) {
+      timestampSpec = Preconditions.checkNotNull(getParser(), 
"inputRowParser").getParseSpec().getTimestampSpec();
+    }
+    return timestampSpec;
+  }
+
+  @Nullable
+  @JsonProperty("dimensionsSpec")
+  private DimensionsSpec getGivenDimensionsSpec()
+  {
+    return dimensionsSpec;
+  }
+
+  public DimensionsSpec getDimensionsSpec()
+  {
+    if (dimensionsSpec == null) {
+      dimensionsSpec = computeDimensionsSpec(
+          getTimestampSpec(),
+          Preconditions.checkNotNull(getParser(), 
"inputRowParser").getParseSpec().getDimensionsSpec(),
+          aggregators
+      );
+    }
+    return dimensionsSpec;
+  }
+
+  @JsonProperty("metricsSpec")
+  public AggregatorFactory[] getAggregators()
+  {
+    return aggregators;
+  }
+
+  @JsonProperty
+  public GranularitySpec getGranularitySpec()
+  {
+    return granularitySpec;
+  }
+
+  @JsonProperty
+  public TransformSpec getTransformSpec()
+  {
+    return transformSpec;
+  }
+
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  @Nullable
+  public List<AggregateProjectionSpec> getProjections()
+  {
+    return projections;
+  }
+
+  @Nullable
+  public List<String> getProjectionNames()
+  {
+    if (projections == null) {
+      return null;
+    }
+    return 
projections.stream().map(AggregateProjectionSpec::getName).collect(Collectors.toList());
+  }
+
+  @Deprecated
+  @JsonProperty("parser")
+  @Nullable
+  @JsonInclude(Include.NON_NULL)
+  public Map<String, Object> getParserMap()
+  {
+    return parserMap;
+  }
+
+  @Nullable
+  public InputRowParser getParser()
+  {
+    if (inputRowParser == null) {
+      if (parserMap == null) {
+        return null;
+      }
+      //noinspection unchecked
+      inputRowParser = 
transformSpec.decorate(objectMapper.convertValue(this.parserMap, 
InputRowParser.class));
+      ParseSpec parseSpec = inputRowParser.getParseSpec();
+      parseSpec = parseSpec.withDimensionsSpec(
+          computeDimensionsSpec(parseSpec.getTimestampSpec(), 
parseSpec.getDimensionsSpec(), aggregators)
+      );
+      if (timestampSpec != null) {
+        parseSpec = parseSpec.withTimestampSpec(timestampSpec);
+      }
+      if (dimensionsSpec != null) {
+        parseSpec = parseSpec.withDimensionsSpec(dimensionsSpec);
+      }
+      inputRowParser = inputRowParser.withParseSpec(parseSpec);
+    }
+    return inputRowParser;
+  }
+
+  public DataSchema withGranularitySpec(GranularitySpec granularitySpec)
+  {
+    return builder(this).withGranularity(granularitySpec).build();
+  }
+
+  public DataSchema withTransformSpec(TransformSpec transformSpec)
+  {
+    return builder(this).withTransform(transformSpec).build();
+  }
+
+  public DataSchema withDimensionsSpec(DimensionsSpec dimensionsSpec)
+  {
+    return builder(this).withDimensions(dimensionsSpec).build();
+  }
+
+  @Override
+  public String toString()
+  {
+    return "DataSchema{" +
+           "dataSource='" + dataSource + '\'' +
+           ", aggregators=" + Arrays.toString(aggregators) +
+           ", granularitySpec=" + granularitySpec +
+           ", transformSpec=" + transformSpec +
+           ", parserMap=" + parserMap +
+           ", timestampSpec=" + timestampSpec +
+           ", dimensionsSpec=" + dimensionsSpec +
+           ", projections=" + projections +
+           ", inputRowParser=" + inputRowParser +
+           '}';
   }
 
+
+
   private static void validateDatasourceName(String dataSource)
   {
     IdUtils.validateId("dataSource", dataSource);
@@ -298,143 +448,35 @@ public class DataSchema
     }
   }
 
-  @JsonProperty
-  public String getDataSource()
-  {
-    return dataSource;
-  }
-
-  @Nullable
-  @JsonProperty("timestampSpec")
-  private TimestampSpec getGivenTimestampSpec()
-  {
-    return timestampSpec;
-  }
-
-  public TimestampSpec getTimestampSpec()
-  {
-    if (timestampSpec == null) {
-      timestampSpec = Preconditions.checkNotNull(getParser(), 
"inputRowParser").getParseSpec().getTimestampSpec();
-    }
-    return timestampSpec;
-  }
-
-  @Nullable
-  @JsonProperty("dimensionsSpec")
-  private DimensionsSpec getGivenDimensionsSpec()
-  {
-    return dimensionsSpec;
-  }
-
-  public DimensionsSpec getDimensionsSpec()
-  {
-    if (dimensionsSpec == null) {
-      dimensionsSpec = computeDimensionsSpec(
-          getTimestampSpec(),
-          Preconditions.checkNotNull(getParser(), 
"inputRowParser").getParseSpec().getDimensionsSpec(),
-          aggregators
-      );
-    }
-    return dimensionsSpec;
-  }
-
-  @JsonProperty("metricsSpec")
-  public AggregatorFactory[] getAggregators()
-  {
-    return aggregators;
-  }
-
-  @JsonProperty
-  public GranularitySpec getGranularitySpec()
-  {
-    return granularitySpec;
-  }
-
-  @JsonProperty
-  public TransformSpec getTransformSpec()
-  {
-    return transformSpec;
-  }
-
-  @JsonProperty
-  @JsonInclude(JsonInclude.Include.NON_NULL)
-  @Nullable
-  public List<AggregateProjectionSpec> getProjections()
-  {
-    return projections;
-  }
-
-  @Nullable
-  public List<String> getProjectionNames()
-  {
-    if (projections == null) {
-      return null;
-    }
-    return 
projections.stream().map(AggregateProjectionSpec::getName).collect(Collectors.toList());
-  }
-
-  @Deprecated
-  @JsonProperty("parser")
-  @Nullable
-  @JsonInclude(Include.NON_NULL)
-  public Map<String, Object> getParserMap()
-  {
-    return parserMap;
-  }
-
-  @Nullable
-  public InputRowParser getParser()
+  public static void validateProjections(
+      @Nullable List<AggregateProjectionSpec> projections,
+      @Nullable Granularity segmentGranularity
+  )
   {
-    if (inputRowParser == null) {
-      if (parserMap == null) {
-        return null;
-      }
-      //noinspection unchecked
-      inputRowParser = 
transformSpec.decorate(objectMapper.convertValue(this.parserMap, 
InputRowParser.class));
-      ParseSpec parseSpec = inputRowParser.getParseSpec();
-      parseSpec = parseSpec.withDimensionsSpec(
-          computeDimensionsSpec(parseSpec.getTimestampSpec(), 
parseSpec.getDimensionsSpec(), aggregators)
-      );
-      if (timestampSpec != null) {
-        parseSpec = parseSpec.withTimestampSpec(timestampSpec);
-      }
-      if (dimensionsSpec != null) {
-        parseSpec = parseSpec.withDimensionsSpec(dimensionsSpec);
+    if (projections != null) {
+      final Set<String> names = 
Sets.newHashSetWithExpectedSize(projections.size());
+      for (AggregateProjectionSpec projection : projections) {
+        if (names.contains(projection.getName())) {
+          throw InvalidInput.exception("projection[%s] is already defined, 
projection names must be unique", projection.getName());
+        }
+        names.add(projection.getName());
+        final AggregateProjectionMetadata.Schema schema = 
projection.toMetadataSchema();
+        if (schema.getTimeColumnName() == null) {
+          continue;
+        }
+        final Granularity projectionGranularity = schema.getGranularity();
+        if (segmentGranularity != null) {
+          if (segmentGranularity.isFinerThan(projectionGranularity)) {
+            throw InvalidInput.exception(
+                "projection[%s] has granularity[%s] which must be finer than 
or equal to segment granularity[%s]",
+                projection.getName(),
+                projectionGranularity,
+                segmentGranularity
+            );
+          }
+        }
       }
-      inputRowParser = inputRowParser.withParseSpec(parseSpec);
     }
-    return inputRowParser;
-  }
-
-  public DataSchema withGranularitySpec(GranularitySpec granularitySpec)
-  {
-    return builder(this).withGranularity(granularitySpec).build();
-  }
-
-  public DataSchema withTransformSpec(TransformSpec transformSpec)
-  {
-    return builder(this).withTransform(transformSpec).build();
-  }
-
-  public DataSchema withDimensionsSpec(DimensionsSpec dimensionsSpec)
-  {
-    return builder(this).withDimensions(dimensionsSpec).build();
-  }
-
-  @Override
-  public String toString()
-  {
-    return "DataSchema{" +
-           "dataSource='" + dataSource + '\'' +
-           ", aggregators=" + Arrays.toString(aggregators) +
-           ", granularitySpec=" + granularitySpec +
-           ", transformSpec=" + transformSpec +
-           ", parserMap=" + parserMap +
-           ", timestampSpec=" + timestampSpec +
-           ", dimensionsSpec=" + dimensionsSpec +
-           ", projections=" + projections +
-           ", inputRowParser=" + inputRowParser +
-           '}';
   }
 
   public static class Builder
diff --git 
a/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java 
b/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java
index 100436c9d65..360fcf455fa 100644
--- a/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java
+++ b/server/src/test/java/org/apache/druid/segment/indexing/DataSchemaTest.java
@@ -39,6 +39,7 @@ import org.apache.druid.error.DruidException;
 import org.apache.druid.error.DruidExceptionMatcher;
 import org.apache.druid.indexer.granularity.ArbitraryGranularitySpec;
 import org.apache.druid.indexer.granularity.GranularitySpec;
+import org.apache.druid.indexer.granularity.UniformGranularitySpec;
 import org.apache.druid.java.util.common.DateTimes;
 import org.apache.druid.java.util.common.Intervals;
 import org.apache.druid.java.util.common.StringUtils;
@@ -51,15 +52,14 @@ import 
org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
 import org.apache.druid.query.expression.TestExprMacroTable;
 import org.apache.druid.query.filter.SelectorDimFilter;
 import org.apache.druid.segment.TestHelper;
+import org.apache.druid.segment.VirtualColumns;
 import org.apache.druid.segment.transform.ExpressionTransform;
 import org.apache.druid.segment.transform.TransformSpec;
 import org.apache.druid.testing.InitializedNullHandlingTest;
-import org.hamcrest.CoreMatchers;
 import org.hamcrest.MatcherAssert;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
+import org.hamcrest.Matchers;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
 import org.mockito.ArgumentMatchers;
 import org.mockito.Mockito;
 
@@ -68,23 +68,21 @@ import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-public class DataSchemaTest extends InitializedNullHandlingTest
+class DataSchemaTest extends InitializedNullHandlingTest
 {
   private static ArbitraryGranularitySpec ARBITRARY_GRANULARITY = new 
ArbitraryGranularitySpec(
       Granularities.DAY,
       ImmutableList.of(Intervals.of("2014/2015"))
   );
 
-  @Rule
-  public ExpectedException expectedException = ExpectedException.none();
-
   private final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
 
   @Test
-  public void testDefaultExclusions()
+  void testDefaultExclusions()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -110,14 +108,14 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                   .withObjectMapper(jsonMapper)
                                   .build();
 
-    Assert.assertEquals(
+    Assertions.assertEquals(
         ImmutableSet.of("__time", "time", "col1", "col2", "metric1", 
"metric2"),
         schema.getDimensionsSpec().getDimensionExclusions()
     );
   }
 
   @Test
-  public void testExplicitInclude()
+  void testExplicitInclude()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -147,14 +145,14 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                   .withObjectMapper(jsonMapper)
                                   .build();
 
-    Assert.assertEquals(
+    Assertions.assertEquals(
         ImmutableSet.of("__time", "dimC", "col1", "metric1", "metric2"),
         
schema.getParser().getParseSpec().getDimensionsSpec().getDimensionExclusions()
     );
   }
 
   @Test
-  public void testTransformSpec()
+  void testTransformSpec()
   {
     Map<String, Object> parserMap = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -200,23 +198,23 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
     final InputRow row1bb = parser.parseBatch(
         
ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}".getBytes(StandardCharsets.UTF_8))
     ).get(0);
-    Assert.assertEquals(DateTimes.of("2000-01-01"), row1bb.getTimestamp());
-    Assert.assertEquals("foo", row1bb.getRaw("dimA"));
-    Assert.assertEquals("foofoo", row1bb.getRaw("expr"));
+    Assertions.assertEquals(DateTimes.of("2000-01-01"), row1bb.getTimestamp());
+    Assertions.assertEquals("foo", row1bb.getRaw("dimA"));
+    Assertions.assertEquals("foofoo", row1bb.getRaw("expr"));
 
     final InputRow row1string = 
parser.parse("{\"time\":\"2000-01-01\",\"dimA\":\"foo\"}");
-    Assert.assertEquals(DateTimes.of("2000-01-01"), row1string.getTimestamp());
-    Assert.assertEquals("foo", row1string.getRaw("dimA"));
-    Assert.assertEquals("foofoo", row1string.getRaw("expr"));
+    Assertions.assertEquals(DateTimes.of("2000-01-01"), 
row1string.getTimestamp());
+    Assertions.assertEquals("foo", row1string.getRaw("dimA"));
+    Assertions.assertEquals("foofoo", row1string.getRaw("expr"));
 
     final InputRow row2 = parser.parseBatch(
         
ByteBuffer.wrap("{\"time\":\"2000-01-01\",\"dimA\":\"x\"}".getBytes(StandardCharsets.UTF_8))
     ).get(0);
-    Assert.assertNull(row2);
+    Assertions.assertNull(row2);
   }
 
   @Test
-  public void testOverlapMetricNameAndDim()
+  void testOverlapMetricNameAndDim()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -253,17 +251,19 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                   .withGranularity(ARBITRARY_GRANULARITY)
                                   .withObjectMapper(jsonMapper)
                                   .build();
-
-    expectedException.expect(DruidException.class);
-    expectedException.expectMessage(
-        "Cannot specify a column more than once: [metric1] seen in dimensions 
list, metricsSpec list"
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> schema.getParser()
     );
 
-    schema.getParser();
+    Assertions.assertEquals(
+        "Cannot specify a column more than once: [metric1] seen in dimensions 
list, metricsSpec list",
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testOverlapTimeAndDimPositionZero()
+  void testOverlapTimeAndDimPositionZero()
   {
     DataSchema schema = DataSchema.builder()
                                   .withDataSource(IdUtilsTest.VALID_ID_CHARS)
@@ -284,72 +284,81 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                   .withObjectMapper(jsonMapper)
                                   .build();
 
-    Assert.assertEquals(
+    Assertions.assertEquals(
         ImmutableList.of("__time", "dimA", "dimB"),
         schema.getDimensionsSpec().getDimensionNames()
     );
 
-    Assert.assertTrue(schema.getDimensionsSpec().isForceSegmentSortByTime());
+    
Assertions.assertTrue(schema.getDimensionsSpec().isForceSegmentSortByTime());
   }
 
   @Test
-  public void testOverlapTimeAndDimPositionZeroWrongType()
+  void testOverlapTimeAndDimPositionZeroWrongType()
   {
-    expectedException.expect(DruidException.class);
-    expectedException.expectMessage("Encountered dimension[__time] with 
incorrect type[STRING]. Type must be 'long'.");
-
-    DataSchema.builder()
-              .withDataSource(IdUtilsTest.VALID_ID_CHARS)
-              .withTimestamp(new TimestampSpec("time", "auto", null))
-              .withDimensions(
-                  DimensionsSpec.builder()
-                                .setDimensions(
-                                    ImmutableList.of(
-                                        new StringDimensionSchema("__time"),
-                                        new StringDimensionSchema("dimA"),
-                                        new StringDimensionSchema("dimB")
-                                    )
-                                )
-                                
.setDimensionExclusions(ImmutableList.of("dimC"))
-                                .build()
-              )
-              .withGranularity(ARBITRARY_GRANULARITY)
-              .withObjectMapper(jsonMapper)
-              .build();
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> DataSchema.builder()
+                        .withDataSource(IdUtilsTest.VALID_ID_CHARS)
+                        .withTimestamp(new TimestampSpec("time", "auto", null))
+                        .withDimensions(
+                            DimensionsSpec.builder()
+                                          .setDimensions(
+                                              ImmutableList.of(
+                                                  new 
StringDimensionSchema("__time"),
+                                                  new 
StringDimensionSchema("dimA"),
+                                                  new 
StringDimensionSchema("dimB")
+                                              )
+                                          )
+                                          
.setDimensionExclusions(ImmutableList.of("dimC"))
+                                          .build()
+                        )
+                        .withGranularity(ARBITRARY_GRANULARITY)
+                        .withObjectMapper(jsonMapper)
+                        .build()
+    );
+
+    Assertions.assertEquals(
+        "Encountered dimension[__time] with incorrect type[STRING]. Type must 
be 'long'.",
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testOverlapTimeAndDimPositionOne()
+  void testOverlapTimeAndDimPositionOne()
   {
-    expectedException.expect(DruidException.class);
-    expectedException.expectMessage(
+
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> DataSchema.builder()
+                        .withDataSource(IdUtilsTest.VALID_ID_CHARS)
+                        .withTimestamp(new TimestampSpec("time", "auto", null))
+                        .withDimensions(
+                            DimensionsSpec.builder()
+                                          .setDimensions(
+                                              ImmutableList.of(
+                                                  new 
StringDimensionSchema("dimA"),
+                                                  new 
LongDimensionSchema("__time"),
+                                                  new 
StringDimensionSchema("dimB")
+                                              )
+                                          )
+                                          
.setDimensionExclusions(ImmutableList.of("dimC"))
+                                          .build()
+                        )
+                        .withGranularity(ARBITRARY_GRANULARITY)
+                        .withObjectMapper(jsonMapper)
+                        .build()
+    );
+
+    Assertions.assertEquals(
         "Encountered dimension[__time] at position[1]. This is only supported 
when the dimensionsSpec "
         + "parameter[forceSegmentSortByTime] is set to[false]. "
-        + DimensionsSpec.WARNING_NON_TIME_SORT_ORDER
-    );
-
-    DataSchema.builder()
-              .withDataSource(IdUtilsTest.VALID_ID_CHARS)
-              .withTimestamp(new TimestampSpec("time", "auto", null))
-              .withDimensions(
-                  DimensionsSpec.builder()
-                                .setDimensions(
-                                    ImmutableList.of(
-                                        new StringDimensionSchema("dimA"),
-                                        new LongDimensionSchema("__time"),
-                                        new StringDimensionSchema("dimB")
-                                    )
-                                )
-                                
.setDimensionExclusions(ImmutableList.of("dimC"))
-                                .build()
-              )
-              .withGranularity(ARBITRARY_GRANULARITY)
-              .withObjectMapper(jsonMapper)
-              .build();
+        + DimensionsSpec.WARNING_NON_TIME_SORT_ORDER,
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testOverlapTimeAndDimPositionOne_withExplicitSortOrder()
+  void testOverlapTimeAndDimPositionOne_withExplicitSortOrder()
   {
     DataSchema schema =
         DataSchema.builder()
@@ -372,16 +381,16 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                   .withObjectMapper(jsonMapper)
                   .build();
 
-    Assert.assertEquals(
+    Assertions.assertEquals(
         ImmutableList.of("dimA", "__time", "dimB"),
         schema.getDimensionsSpec().getDimensionNames()
     );
 
-    Assert.assertFalse(schema.getDimensionsSpec().isForceSegmentSortByTime());
+    
Assertions.assertFalse(schema.getDimensionsSpec().isForceSegmentSortByTime());
   }
 
   @Test
-  public void testOverlapTimeAndDimLegacy()
+  void testOverlapTimeAndDimLegacy()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -415,15 +424,19 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                   .withObjectMapper(jsonMapper)
                                   .build();
 
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> schema.getParser()
+    );
 
-    expectedException.expect(DruidException.class);
-    expectedException.expectMessage("Encountered dimension[__time] with 
incorrect type[STRING]. Type must be 'long'.");
-
-    schema.getParser();
+    Assertions.assertEquals(
+        "Encountered dimension[__time] with incorrect type[STRING]. Type must 
be 'long'.",
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testDuplicateAggregators()
+  void testDuplicateAggregators()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -441,29 +454,32 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         ), JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
     );
 
-    expectedException.expect(DruidException.class);
-    expectedException.expectMessage(
-        "Cannot specify a column more than once: [metric1] seen in metricsSpec 
list (2 occurrences); "
-        + "[metric3] seen in metricsSpec list (2 occurrences)"
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> DataSchema.builder()
+                        .withDataSource(IdUtilsTest.VALID_ID_CHARS)
+                        .withParserMap(parser)
+                        .withAggregators(
+                            new DoubleSumAggregatorFactory("metric1", "col1"),
+                            new DoubleSumAggregatorFactory("metric2", "col2"),
+                            new DoubleSumAggregatorFactory("metric1", "col3"),
+                            new DoubleSumAggregatorFactory("metric3", "col4"),
+                            new DoubleSumAggregatorFactory("metric3", "col5")
+                        )
+                        .withGranularity(ARBITRARY_GRANULARITY)
+                        .withObjectMapper(jsonMapper)
+                        .build()
     );
 
-    DataSchema schema = DataSchema.builder()
-                                  .withDataSource(IdUtilsTest.VALID_ID_CHARS)
-                                  .withParserMap(parser)
-                                  .withAggregators(
-                                      new 
DoubleSumAggregatorFactory("metric1", "col1"),
-                                      new 
DoubleSumAggregatorFactory("metric2", "col2"),
-                                      new 
DoubleSumAggregatorFactory("metric1", "col3"),
-                                      new 
DoubleSumAggregatorFactory("metric3", "col4"),
-                                      new 
DoubleSumAggregatorFactory("metric3", "col5")
-                                  )
-                                  .withGranularity(ARBITRARY_GRANULARITY)
-                                  .withObjectMapper(jsonMapper)
-                                  .build();
+    Assertions.assertEquals(
+        "Cannot specify a column more than once: [metric1] seen in metricsSpec 
list (2 occurrences); "
+        + "[metric3] seen in metricsSpec list (2 occurrences)",
+        t.getMessage()
+    );
   }
 
   @Test
-  public void testSerdeWithInvalidParserMap() throws Exception
+  void testSerdeWithInvalidParserMap() throws Exception
   {
     String jsonStr = "{"
                      + "\"dataSource\":\"" + 
StringEscapeUtils.escapeJson(IdUtilsTest.VALID_ID_CHARS) + "\","
@@ -483,18 +499,22 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         DataSchema.class
     );
 
-    
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
-    
expectedException.expectCause(CoreMatchers.instanceOf(JsonMappingException.class));
-    expectedException.expectMessage(
-        "Cannot construct instance of 
`org.apache.druid.data.input.impl.StringInputRowParser`, problem: parseSpec"
+    Throwable t = Assertions.assertThrows(
+        IllegalArgumentException.class,
+        () -> schema.getParser()
+    );
+    MatcherAssert.assertThat(
+        t.getMessage(),
+        Matchers.startsWith("Cannot construct instance of 
`org.apache.druid.data.input.impl.StringInputRowParser`, problem: parseSpec")
+    );
+    MatcherAssert.assertThat(
+        t.getCause(),
+        Matchers.instanceOf(JsonMappingException.class)
     );
-
-    // Jackson creates a default type parser (StringInputRowParser) for an 
invalid type.
-    schema.getParser();
   }
 
   @Test
-  public void testEmptyDatasource()
+  void testEmptyDatasource()
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -532,7 +552,7 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
 
 
   @Test
-  public void testInvalidWhitespaceDatasource()
+  void testInvalidWhitespaceDatasource()
   {
     Map<String, String> invalidCharToDataSourceName = ImmutableMap.of(
         "\\t", "\tab\t",
@@ -557,7 +577,7 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
   }
 
   @Test
-  public void testSerde() throws Exception
+  void testSerde() throws Exception
   {
     // deserialize, then serialize, then deserialize of DataSchema.
     String jsonStr = "{"
@@ -585,8 +605,8 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         DataSchema.class
     );
 
-    Assert.assertEquals(IdUtilsTest.VALID_ID_CHARS, actual.getDataSource());
-    Assert.assertEquals(
+    Assertions.assertEquals(IdUtilsTest.VALID_ID_CHARS, 
actual.getDataSource());
+    Assertions.assertEquals(
         new JSONParseSpec(
             new TimestampSpec("xXx", null, null),
             
DimensionsSpec.builder().setDimensionExclusions(Arrays.asList("__time", 
"metric1", "xXx", "col1")).build(),
@@ -596,20 +616,20 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         ),
         actual.getParser().getParseSpec()
     );
-    Assert.assertArrayEquals(
+    Assertions.assertArrayEquals(
         new AggregatorFactory[]{
             new DoubleSumAggregatorFactory("metric1", "col1")
         },
         actual.getAggregators()
     );
-    Assert.assertEquals(
+    Assertions.assertEquals(
         new ArbitraryGranularitySpec(
             new DurationGranularity(86400000, null),
             ImmutableList.of(Intervals.of("2014/2015"))
         ),
         actual.getGranularitySpec()
     );
-    Assert.assertNull(actual.getProjections());
+    Assertions.assertNull(actual.getProjections());
   }
 
   @Test
@@ -637,15 +657,15 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                     .build();
     DataSchema serdeResult = 
jsonMapper.readValue(jsonMapper.writeValueAsString(original), DataSchema.class);
 
-    Assert.assertEquals("datasource", serdeResult.getDataSource());
-    Assert.assertArrayEquals(new AggregatorFactory[]{new 
CountAggregatorFactory("rows")}, serdeResult.getAggregators());
-    Assert.assertEquals(ImmutableList.of(projectionSpec), 
serdeResult.getProjections());
-    Assert.assertEquals(ImmutableList.of("ab_count_projection"), 
serdeResult.getProjectionNames());
-    Assert.assertEquals(jsonMapper.writeValueAsString(original), 
jsonMapper.writeValueAsString(serdeResult));
+    Assertions.assertEquals("datasource", serdeResult.getDataSource());
+    Assertions.assertArrayEquals(new AggregatorFactory[]{new 
CountAggregatorFactory("rows")}, serdeResult.getAggregators());
+    Assertions.assertEquals(ImmutableList.of(projectionSpec), 
serdeResult.getProjections());
+    Assertions.assertEquals(ImmutableList.of("ab_count_projection"), 
serdeResult.getProjectionNames());
+    Assertions.assertEquals(jsonMapper.writeValueAsString(original), 
jsonMapper.writeValueAsString(serdeResult));
   }
 
   @Test
-  public void testSerializeWithInvalidDataSourceName() throws Exception
+  void testSerializeWithInvalidDataSourceName() throws Exception
   {
     // Escape backslashes to insert a tab character in the datasource name.
     Map<String, String> datasourceToErrorMsg = ImmutableMap.of(
@@ -697,12 +717,12 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         );
         continue;
       }
-      Assert.fail("Serialization of datasource " + entry.getKey() + " should 
have failed.");
+      Assertions.fail("Serialization of datasource " + entry.getKey() + " 
should have failed.");
     }
   }
 
   @Test
-  public void testSerdeWithUpdatedDataSchemaAddedField() throws IOException
+  void testSerdeWithUpdatedDataSchemaAddedField() throws IOException
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -731,17 +751,17 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
     String serialized = jsonMapper.writeValueAsString(originalSchema);
     TestModifiedDataSchema deserialized = jsonMapper.readValue(serialized, 
TestModifiedDataSchema.class);
 
-    Assert.assertEquals(null, deserialized.getExtra());
-    Assert.assertEquals(originalSchema.getDataSource(), 
deserialized.getDataSource());
-    Assert.assertEquals(originalSchema.getGranularitySpec(), 
deserialized.getGranularitySpec());
-    Assert.assertEquals(originalSchema.getParser().getParseSpec(), 
deserialized.getParser().getParseSpec());
-    Assert.assertArrayEquals(originalSchema.getAggregators(), 
deserialized.getAggregators());
-    Assert.assertEquals(originalSchema.getTransformSpec(), 
deserialized.getTransformSpec());
-    Assert.assertEquals(originalSchema.getParserMap(), 
deserialized.getParserMap());
+    Assertions.assertEquals(null, deserialized.getExtra());
+    Assertions.assertEquals(originalSchema.getDataSource(), 
deserialized.getDataSource());
+    Assertions.assertEquals(originalSchema.getGranularitySpec(), 
deserialized.getGranularitySpec());
+    Assertions.assertEquals(originalSchema.getParser().getParseSpec(), 
deserialized.getParser().getParseSpec());
+    Assertions.assertArrayEquals(originalSchema.getAggregators(), 
deserialized.getAggregators());
+    Assertions.assertEquals(originalSchema.getTransformSpec(), 
deserialized.getTransformSpec());
+    Assertions.assertEquals(originalSchema.getParserMap(), 
deserialized.getParserMap());
   }
 
   @Test
-  public void testSerdeWithUpdatedDataSchemaRemovedField() throws IOException
+  void testSerdeWithUpdatedDataSchemaRemovedField() throws IOException
   {
     Map<String, Object> parser = jsonMapper.convertValue(
         new StringInputRowParser(
@@ -774,16 +794,16 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
     String serialized = jsonMapper.writeValueAsString(originalSchema);
     DataSchema deserialized = jsonMapper.readValue(serialized, 
DataSchema.class);
 
-    Assert.assertEquals(originalSchema.getDataSource(), 
deserialized.getDataSource());
-    Assert.assertEquals(originalSchema.getGranularitySpec(), 
deserialized.getGranularitySpec());
-    Assert.assertEquals(originalSchema.getParser().getParseSpec(), 
deserialized.getParser().getParseSpec());
-    Assert.assertArrayEquals(originalSchema.getAggregators(), 
deserialized.getAggregators());
-    Assert.assertEquals(originalSchema.getTransformSpec(), 
deserialized.getTransformSpec());
-    Assert.assertEquals(originalSchema.getParserMap(), 
deserialized.getParserMap());
+    Assertions.assertEquals(originalSchema.getDataSource(), 
deserialized.getDataSource());
+    Assertions.assertEquals(originalSchema.getGranularitySpec(), 
deserialized.getGranularitySpec());
+    Assertions.assertEquals(originalSchema.getParser().getParseSpec(), 
deserialized.getParser().getParseSpec());
+    Assertions.assertArrayEquals(originalSchema.getAggregators(), 
deserialized.getAggregators());
+    Assertions.assertEquals(originalSchema.getTransformSpec(), 
deserialized.getTransformSpec());
+    Assertions.assertEquals(originalSchema.getParserMap(), 
deserialized.getParserMap());
   }
 
   @Test
-  public void testWithDimensionSpec()
+  void testWithDimensionSpec()
   {
     TimestampSpec tsSpec = Mockito.mock(TimestampSpec.class);
     GranularitySpec gSpec = Mockito.mock(GranularitySpec.class);
@@ -806,18 +826,18 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
                                      .withObjectMapper(jsonMapper)
                                      .build();
     DataSchema newSchema = oldSchema.withDimensionsSpec(newDimSpec);
-    Assert.assertSame(oldSchema.getDataSource(), newSchema.getDataSource());
-    Assert.assertSame(oldSchema.getTimestampSpec(), 
newSchema.getTimestampSpec());
-    Assert.assertSame(newDimSpec, newSchema.getDimensionsSpec());
-    Assert.assertSame(oldSchema.getAggregators(), newSchema.getAggregators());
-    Assert.assertSame(oldSchema.getGranularitySpec(), 
newSchema.getGranularitySpec());
-    Assert.assertSame(oldSchema.getTransformSpec(), 
newSchema.getTransformSpec());
-    Assert.assertSame(oldSchema.getParserMap(), newSchema.getParserMap());
+    Assertions.assertSame(oldSchema.getDataSource(), 
newSchema.getDataSource());
+    Assertions.assertSame(oldSchema.getTimestampSpec(), 
newSchema.getTimestampSpec());
+    Assertions.assertSame(newDimSpec, newSchema.getDimensionsSpec());
+    Assertions.assertSame(oldSchema.getAggregators(), 
newSchema.getAggregators());
+    Assertions.assertSame(oldSchema.getGranularitySpec(), 
newSchema.getGranularitySpec());
+    Assertions.assertSame(oldSchema.getTransformSpec(), 
newSchema.getTransformSpec());
+    Assertions.assertSame(oldSchema.getParserMap(), newSchema.getParserMap());
 
   }
 
   @Test
-  public void testCombinedDataSchemaSetsMultiValuedColumnsInfo()
+  void testCombinedDataSchemaSetsMultiValuedColumnsInfo()
   {
     Set<String> multiValuedDimensions = ImmutableSet.of("dimA");
 
@@ -836,6 +856,100 @@ public class DataSchemaTest extends 
InitializedNullHandlingTest
         null,
         multiValuedDimensions
     );
-    Assert.assertEquals(ImmutableSet.of("dimA"), 
schema.getMultiValuedDimensions());
+    Assertions.assertEquals(ImmutableSet.of("dimA"), 
schema.getMultiValuedDimensions());
+  }
+
+  @Test
+  void testInvalidProjectionDupeNames()
+  {
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> DataSchema.builder()
+                        .withDataSource("dataSource")
+                        .withGranularity(
+                            new UniformGranularitySpec(
+                                Granularities.HOUR,
+                                Granularities.NONE,
+                                false,
+                                List.of(Intervals.of("2014/2015"))
+                            )
+                        )
+                        .withProjections(
+                            List.of(
+                                new AggregateProjectionSpec(
+                                    "some projection",
+                                    
VirtualColumns.create(Granularities.toVirtualColumn(Granularities.HOUR, "g")),
+                                    List.of(new LongDimensionSchema("g")),
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                ),
+                                new AggregateProjectionSpec(
+                                    "some projection",
+                                    
VirtualColumns.create(Granularities.toVirtualColumn(Granularities.MINUTE, "g")),
+                                    List.of(new LongDimensionSchema("g")),
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                )
+                            )
+                        )
+                        .build()
+    );
+
+    Assertions.assertEquals(
+        "projection[some projection] is already defined, projection names must 
be unique",
+        t.getMessage()
+    );
+  }
+
+  @Test
+  void testInvalidProjectionGranularity()
+  {
+    Throwable t = Assertions.assertThrows(
+        DruidException.class,
+        () -> DataSchema.builder()
+                        .withDataSource("dataSource")
+                        .withGranularity(
+                            new UniformGranularitySpec(
+                                Granularities.HOUR,
+                                Granularities.NONE,
+                                false,
+                                List.of(Intervals.of("2014/2015"))
+                            )
+                        )
+                        .withProjections(
+                            List.of(
+                                new AggregateProjectionSpec(
+                                    "ok granularity",
+                                    
VirtualColumns.create(Granularities.toVirtualColumn(Granularities.HOUR, "g")),
+                                    List.of(new LongDimensionSchema("g")),
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                ),
+                                new AggregateProjectionSpec(
+                                    "acceptable granularity",
+                                    
VirtualColumns.create(Granularities.toVirtualColumn(Granularities.MINUTE, "g")),
+                                    List.of(new LongDimensionSchema("g")),
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                ),
+                                new AggregateProjectionSpec(
+                                    "not having a time column is ok too",
+                                    VirtualColumns.EMPTY,
+                                    null,
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                ),
+                                new AggregateProjectionSpec(
+                                    "bad granularity",
+                                    
VirtualColumns.create(Granularities.toVirtualColumn(Granularities.DAY, "g")),
+                                    List.of(new LongDimensionSchema("g")),
+                                    new AggregatorFactory[]{new 
CountAggregatorFactory("count")}
+                                )
+                            )
+                        )
+                        .build()
+    );
+
+    Assertions.assertEquals(
+        "projection[bad granularity] has granularity[{type=period, period=P1D, 
timeZone=UTC, origin=null}]"
+        + " which must be finer than or equal to segment 
granularity[{type=period, period=PT1H, timeZone=UTC,"
+        + " origin=null}]",
+        t.getMessage()
+    );
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to