kfaraz commented on code in PR #18207:
URL: https://github.com/apache/druid/pull/18207#discussion_r2238425881


##########
embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java:
##########
@@ -1742,84 +1639,85 @@ public void 
testAutoCompactionDutyWithOverlappingInterval() throws Exception
       );
       // Compact the MONTH segment
       forceTriggerAutoCompaction(2);
-      verifyQuery(INDEX_ROLLUP_QUERIES_RESOURCE, queryAndResultFields);
+      verifyScanResult("added", "57.0||459.0");
+      verifyScanResult("COUNT(*)", "2");
 
       // Compact the WEEK segment
       forceTriggerAutoCompaction(2);
-      verifyQuery(INDEX_ROLLUP_QUERIES_RESOURCE, queryAndResultFields);
+      verifyScanResult("added", "57.0||459.0");
+      verifyScanResult("COUNT(*)", "2");
 
       // Verify all task succeed
-      List<TaskResponseObject> compactTasksBefore = 
indexer.getCompleteTasksForDataSource(fullDatasourceName);
-      for (TaskResponseObject taskResponseObject : compactTasksBefore) {
-        Assert.assertEquals(TaskState.SUCCESS, taskResponseObject.getStatus());
+      List<TaskStatusPlus> compactTasksBefore = 
getCompleteTasksForDataSource(fullDatasourceName);
+      for (TaskStatusPlus taskResponseObject : compactTasksBefore) {
+        Assertions.assertEquals(TaskState.SUCCESS, 
taskResponseObject.getStatusCode());
       }
 
       // Verify compacted segments does not get compacted again
       forceTriggerAutoCompaction(2);
-      List<TaskResponseObject> compactTasksAfter = 
indexer.getCompleteTasksForDataSource(fullDatasourceName);
-      Assert.assertEquals(compactTasksAfter.size(), compactTasksBefore.size());
+      List<TaskStatusPlus> compactTasksAfter = 
getCompleteTasksForDataSource(fullDatasourceName);
+      Assertions.assertEquals(compactTasksAfter.size(), 
compactTasksBefore.size());
     }
   }
 
-  private void loadData(String indexTask) throws Exception
+  private <T extends TaskBuilder.IndexCommon<?, ?, ?>> void 
loadData(Supplier<T> updatePayload)
   {
-    loadData(indexTask, ImmutableMap.of());
+    loadData(updatePayload, null);
   }
 
-  private void loadData(String indexTask, Map<String, Object> specs) throws 
Exception
+  private <T extends TaskBuilder.IndexCommon<?, ?, ?>> void loadData(
+      Supplier<T> taskPayloadSupplier,
+      GranularitySpec granularitySpec
+  )
   {
-    String taskSpec = getResourceAsString(indexTask);
-    taskSpec = StringUtils.replace(taskSpec, "%%DATASOURCE%%", 
fullDatasourceName);
-    taskSpec = StringUtils.replace(
-        taskSpec,
-        "%%SEGMENT_AVAIL_TIMEOUT_MILLIS%%",
-        jsonMapper.writeValueAsString("0")
-    );
-    for (Map.Entry<String, Object> entry : specs.entrySet()) {
-      taskSpec = StringUtils.replace(
-          taskSpec,
-          entry.getKey(),
-          jsonMapper.writeValueAsString(entry.getValue())
-      );
+    final TaskBuilder.IndexCommon<?, ?, ?> taskBuilder = 
taskPayloadSupplier.get();
+    taskBuilder.dataSource(fullDatasourceName);
+    if (granularitySpec != null) {
+      taskBuilder.granularitySpec(granularitySpec);
     }
-    final String taskId = indexer.submitTask(taskSpec);
-    LOG.info("Submitted task[%s] to load data", taskId);
-    indexer.waitUntilTaskCompletes(taskId);
 
-    ITRetryUtil.retryUntilTrue(
-        () -> coordinator.areSegmentsLoaded(fullDatasourceName),
-        "Segments are loaded"
+    runTask(taskBuilder, fullDatasourceName);
+  }
+
+  private void verifyQuery(List<Pair<String, String>> queries)
+  {
+    queries.forEach(
+        query -> verifyQuery(query.lhs, query.rhs)
     );
   }
 
-  private void verifyQuery(String queryResource) throws Exception
+  private void verifyQuery(String query, String result)
   {
-    verifyQuery(queryResource, ImmutableMap.of());
+    Assertions.assertEquals(
+        result,
+        cluster.runSql(query, dataSource),
+        StringUtils.format("Query[%s] failed", query)
+    );
   }
 
-  private void verifyQuery(String queryResource, Map<String, Object> 
keyValueToReplace) throws Exception
+  /**
+   * Verifies the result of a SELECT query
+   *
+   * @param field  Field to select
+   * @param result CSV result with special strings {@code ||} to represent

Review Comment:
   Yeah, I didn't want to do this initially, but it made for more readable 
tests as it avoided all the escaping of empty strings and newlines.
   
   ```java
   verifyScanResult("added", "...||31||...||62");
   ```
   vs
   ```java
   verifyScanResult("added", "\"\"\n31\"\"\n62");
   ```
   
   Please let me know if this seems hacky and if you feel that it is cleaner to 
just use the original.



##########
embedded-tests/src/test/java/org/apache/druid/testing/embedded/compact/AutoCompactionTest.java:
##########
@@ -1989,6 +1884,7 @@ private void forceTriggerAutoCompaction(
       );
 
       // Wait for scheduler to pick up the compaction job
+      // TODO: make this latch-based

Review Comment:
   Sorry, must have missed this in the cleanup.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to