This is an automated email from the ASF dual-hosted git repository.

nju_yaho pushed a commit to tag ebay-3.1.0-release-20200701
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 4e6884b0a982e6fdb4045ba33fb14bbe33a7dc35
Author: Zhong, Yanghong <nju_y...@apache.org>
AuthorDate: Wed Jun 10 14:21:16 2020 +0800

    KYLIN-4534 Backend support for query & storage trend
---
 .../java/org/apache/kylin/cube/CubeInstance.java   |  14 +
 .../kylin/rest/controller/CubeController.java      |  58 ++-
 .../kylin/rest/controller/DashboardController.java |  44 +--
 .../response/CubeOptimizationTrendResponse.java    |  46 +++
 .../org/apache/kylin/rest/service/CubeService.java |  96 +++--
 .../kylin/rest/service/DashboardService.java       | 288 +++------------
 .../apache/kylin/rest/service/QueryService.java    |   7 -
 .../apache/kylin/rest/util/SqlCreationUtil.java    | 388 +++++++++++++++++++++
 .../kylin/rest/util/SqlCreationUtilTest.java       | 240 +++++++++++++
 9 files changed, 837 insertions(+), 344 deletions(-)

diff --git a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java 
b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
index 0d3f39d..7a8c13b 100644
--- a/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
+++ b/core-cube/src/main/java/org/apache/kylin/cube/CubeInstance.java
@@ -25,6 +25,7 @@ import static 
org.apache.kylin.cube.cuboid.CuboidModeEnum.RECOMMEND;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -69,6 +70,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.type.TypeReference;
 import org.apache.kylin.shaded.com.google.common.base.Objects;
 import org.apache.kylin.shaded.com.google.common.collect.Lists;
+import org.apache.kylin.shaded.com.google.common.collect.Sets;
 
 @SuppressWarnings("serial")
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = 
Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = 
Visibility.NONE)
@@ -127,6 +129,9 @@ public class CubeInstance extends RootPersistentEntity 
implements IRealization,
     @JsonProperty("cuboid_last_optimized")
     private long cuboidLastOptimized;
 
+    @JsonProperty("cuboid_optimized_timestamp_serial")
+    private Set<Long> cuboidOptimizedTimestamps = Sets.newHashSet();
+
     @JsonProperty("snapshots")
     private Map<String, String> snapshots = Maps.newHashMap();
 
@@ -457,6 +462,7 @@ public class CubeInstance extends RootPersistentEntity 
implements IRealization,
         cuboidBytes = null;
         cuboidBytesRecommend = null;
         cuboidLastOptimized = 0L;
+        cuboidOptimizedTimestamps.clear();
     }
 
     public Set<Long> getCuboidsByMode(String cuboidModeName) {
@@ -559,6 +565,14 @@ public class CubeInstance extends RootPersistentEntity 
implements IRealization,
 
     public void setCuboidLastOptimized(long lastOptimized) {
         this.cuboidLastOptimized = lastOptimized;
+        this.cuboidOptimizedTimestamps.add(lastOptimized);
+    }
+
+    public List<Long> getCuboidOptimizedTimestamps() {
+        this.cuboidOptimizedTimestamps.add(cuboidLastOptimized);
+        List<Long> ret = Lists.newArrayList(this.cuboidOptimizedTimestamps);
+        Collections.sort(ret);
+        return ret;
     }
 
     /**
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
 
b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
index e937920..af3e9b1 100644
--- 
a/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/controller/CubeController.java
@@ -27,6 +27,8 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
 
 import javax.servlet.http.HttpServletResponse;
 
@@ -73,6 +75,7 @@ import org.apache.kylin.rest.request.JobBuildRequest2;
 import org.apache.kylin.rest.request.JobOptimizeRequest;
 import org.apache.kylin.rest.request.LookupSnapshotBuildRequest;
 import org.apache.kylin.rest.response.CubeInstanceResponse;
+import org.apache.kylin.rest.response.CubeOptimizationTrendResponse;
 import org.apache.kylin.rest.response.CuboidRecommendResponse;
 import org.apache.kylin.rest.response.CuboidTreeResponse;
 import org.apache.kylin.rest.response.EnvelopeResponse;
@@ -85,6 +88,9 @@ import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.QueryService;
 import org.apache.kylin.rest.util.AclEvaluate;
 import org.apache.kylin.rest.util.ValidateUtil;
+import org.apache.kylin.shaded.com.google.common.collect.Lists;
+import org.apache.kylin.shaded.com.google.common.collect.Maps;
+import org.apache.kylin.shaded.com.google.common.collect.Sets;
 import org.apache.kylin.source.kafka.util.KafkaClient;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -103,9 +109,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
 import com.fasterxml.jackson.core.JsonParseException;
 import com.fasterxml.jackson.core.JsonProcessingException;
 import com.fasterxml.jackson.databind.JsonMappingException;
-import org.apache.kylin.shaded.com.google.common.collect.Lists;
-import org.apache.kylin.shaded.com.google.common.collect.Maps;
-import org.apache.kylin.shaded.com.google.common.collect.Sets;
 
 /**
  * CubeController is defined as Restful API entrance for UI.
@@ -971,6 +974,55 @@ public class CubeController extends BasicController {
         return new CuboidRecommendResponse(cuboidTree, 
recommRet.getOptimizationBenefit());
     }
 
+    @RequestMapping(value = "/{cubeName}/optimization/trend", method = 
RequestMethod.GET)
+    @ResponseBody
+    public CubeOptimizationTrendResponse getOptimizationTrend(@PathVariable 
String cubeName) throws IOException {
+        CubeInstance cube = getCube(cubeName);
+
+        long endTimestamp = System.currentTimeMillis();
+        TreeMap<String, Double> trendOfQueryLatency = 
cubeService.getQueryLatencyTrendForOptimization(cube,
+                endTimestamp);
+        TreeMap<String, Double> trendOfStorageUsage = 
cubeService.getStorageUsageTrendForOptimization(cube,
+                endTimestamp);
+
+        TreeSet<String> timeSequenece = new TreeSet<>();
+        timeSequenece.addAll(trendOfQueryLatency.keySet());
+        timeSequenece.addAll(trendOfStorageUsage.keySet());
+
+        List<String> timeSequeneceList = new ArrayList<>(timeSequenece.size());
+        List<Double> trendOfQueryLatencyList = new 
ArrayList<>(timeSequenece.size());
+        List<Double> trendOfStorageUsageList = new 
ArrayList<>(timeSequenece.size());
+        Double prevQueryLatency = trendOfQueryLatency.firstEntry().getValue();
+        Double prevStorageUsage = trendOfStorageUsage.firstEntry().getValue();
+        for (String timeKey : timeSequenece) {
+            timeSequeneceList.add(timeKey);
+
+            // query latency sequences
+            Double queryLatency = trendOfQueryLatency.get(timeKey);
+            if (queryLatency == null) {
+                queryLatency = prevQueryLatency;
+            } else {
+                prevQueryLatency = queryLatency;
+            }
+            if (queryLatency != null) {
+                trendOfQueryLatencyList.add(queryLatency);
+            }
+
+            // storage usage sequences
+            Double storageUsage = trendOfStorageUsage.get(timeKey);
+            if (storageUsage == null) {
+                storageUsage = prevStorageUsage;
+            } else {
+                prevStorageUsage = storageUsage;
+            }
+            if (storageUsage != null) {
+                trendOfStorageUsageList.add(storageUsage);
+            }
+
+        }
+        return new CubeOptimizationTrendResponse(timeSequeneceList, 
trendOfQueryLatencyList, trendOfStorageUsageList);
+    }
+
     private RecommendResult getRecommendCuboidList(CubeInstance cube) throws 
IOException {
         // Get cuboid source info
         Map<Long, Long> optimizeHitFrequencyMap = 
getSourceCuboidHitFrequency(cube.getName());
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
 
b/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
index 8b669b3..45c2e49 100644
--- 
a/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/controller/DashboardController.java
@@ -22,12 +22,9 @@ import java.util.List;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.response.MetricsResponse;
-import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.service.CubeService;
 import org.apache.kylin.rest.service.DashboardService;
-import org.apache.kylin.rest.service.QueryService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -48,9 +45,6 @@ public class DashboardController extends BasicController {
     private DashboardService dashboardService;
 
     @Autowired
-    private QueryService queryService;
-
-    @Autowired
     private CubeService cubeService;
 
     @RequestMapping(value = "/metric/cube", method = { RequestMethod.GET })
@@ -67,21 +61,8 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "cubeName", required = false) String 
cubeName,
             @RequestParam(value = "startTime") String startTime, 
@RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
-        MetricsResponse queryMetrics = new MetricsResponse();
-        PrepareSqlRequest sqlRequest = 
dashboardService.getQueryMetricsSQLRequest(startTime, endTime, projectName,
-                cubeName);
-        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest);
-        if (!sqlResponse.getIsException()) {
-            queryMetrics.increase("queryCount",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(0)));
-            queryMetrics.increase("avgQueryLatency",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(1)));
-            queryMetrics.increase("maxQueryLatency",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(2)));
-            queryMetrics.increase("minQueryLatency",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(3)));
-        }
-        return queryMetrics;
+
+        return dashboardService.getQueryMetrics(projectName, cubeName, 
startTime, endTime);
     }
 
     @RequestMapping(value = "/metric/job", method = RequestMethod.GET)
@@ -90,20 +71,8 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "cubeName", required = false) String 
cubeName,
             @RequestParam(value = "startTime") String startTime, 
@RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
-        MetricsResponse jobMetrics = new MetricsResponse();
-        PrepareSqlRequest sqlRequest = 
dashboardService.getJobMetricsSQLRequest(startTime, endTime, projectName,
-                cubeName);
-        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest);
-        if (!sqlResponse.getIsException()) {
-            jobMetrics.increase("jobCount", 
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(0)));
-            jobMetrics.increase("avgJobBuildTime",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(1)));
-            jobMetrics.increase("maxJobBuildTime",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(2)));
-            jobMetrics.increase("minJobBuildTime",
-                    
dashboardService.getMetricValue(sqlResponse.getResults().get(0).get(3)));
-        }
-        return jobMetrics;
+
+        return dashboardService.getJobMetrics(projectName, cubeName, 
startTime, endTime);
     }
 
     @RequestMapping(value = "/chart/{category}/{metric}/{dimension}", method = 
RequestMethod.GET)
@@ -113,9 +82,8 @@ public class DashboardController extends BasicController {
             @RequestParam(value = "cubeName", required = false) String 
cubeName,
             @RequestParam(value = "startTime") String startTime, 
@RequestParam(value = "endTime") String endTime) {
         checkAuthorization(projectName);
-        PrepareSqlRequest sqlRequest = 
dashboardService.getChartSQLRequest(startTime, endTime, projectName, cubeName,
-                dimension, metric, category);
-        return 
dashboardService.transformChartData(queryService.doQueryWithCache(sqlRequest));
+
+        return dashboardService.getChartData(category, projectName, cubeName, 
startTime, endTime, dimension, metric);
     }
 
     private void checkAuthorization(String projectName) {
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/response/CubeOptimizationTrendResponse.java
 
b/server-base/src/main/java/org/apache/kylin/rest/response/CubeOptimizationTrendResponse.java
new file mode 100644
index 0000000..08d8487
--- /dev/null
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/response/CubeOptimizationTrendResponse.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+package org.apache.kylin.rest.response;
+
+import java.util.List;
+
+public class CubeOptimizationTrendResponse {
+    private List<String> timeSequence;
+    private List<Double> trendOfQueryLatency;
+    private List<Double> trendOfStorageUsage;
+
+    public CubeOptimizationTrendResponse(List<String> timeSequence, 
List<Double> trendOfQueryLatency,
+            List<Double> trendOfStorageUsage) {
+        this.timeSequence = timeSequence;
+        this.trendOfQueryLatency = trendOfQueryLatency;
+        this.trendOfStorageUsage = trendOfStorageUsage;
+    }
+
+    public List<String> getTimeSequence() {
+        return timeSequence;
+    }
+
+    public List<Double> getTrendOfQueryLatency() {
+        return trendOfQueryLatency;
+    }
+
+    public List<Double> getTrendOfStorageUsage() {
+        return trendOfStorageUsage;
+    }
+}
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java 
b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
index f332b4c..99be837 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/CubeService.java
@@ -27,6 +27,7 @@ import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
+import java.util.TreeMap;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
@@ -72,11 +73,10 @@ import org.apache.kylin.metadata.project.ProjectManager;
 import org.apache.kylin.metadata.project.RealizationEntry;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.RealizationType;
-import org.apache.kylin.metrics.MetricsManager;
-import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.exception.ForbiddenException;
+import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.msg.Message;
 import org.apache.kylin.rest.msg.MsgPicker;
 import org.apache.kylin.rest.request.MetricsRequest;
@@ -86,8 +86,14 @@ import org.apache.kylin.rest.response.CuboidTreeResponse;
 import org.apache.kylin.rest.response.CuboidTreeResponse.NodeInfo;
 import org.apache.kylin.rest.response.HBaseResponse;
 import org.apache.kylin.rest.response.MetricsResponse;
+import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.rest.util.SqlCreationUtil;
 import org.apache.kylin.rest.util.ValidateUtil;
+import org.apache.kylin.shaded.com.google.common.cache.Cache;
+import org.apache.kylin.shaded.com.google.common.cache.CacheBuilder;
+import org.apache.kylin.shaded.com.google.common.collect.Lists;
+import org.apache.kylin.shaded.com.google.common.collect.Maps;
 import org.apache.kylin.storage.hbase.HBaseConnection;
 import org.apache.kylin.storage.hbase.util.StorageCleanUtil;
 import org.apache.kylin.storage.hybrid.HybridInstance;
@@ -104,11 +110,6 @@ import 
org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.stereotype.Component;
 
-import org.apache.kylin.shaded.com.google.common.cache.Cache;
-import org.apache.kylin.shaded.com.google.common.cache.CacheBuilder;
-import org.apache.kylin.shaded.com.google.common.collect.Lists;
-import org.apache.kylin.shaded.com.google.common.collect.Maps;
-
 /**
  * Stateless & lightweight service facade of cube management functions.
  *
@@ -1026,6 +1027,33 @@ public class CubeService extends BasicService implements 
InitializingBean {
         return CuboidRecommenderUtil.getRecommendCuboidList(cube, 
hitFrequencyMap, rollingUpCountSourceMap);
     }
 
+    public TreeMap<String, Double> 
getQueryLatencyTrendForOptimization(CubeInstance cube, long endTimestamp) {
+        PrepareSqlRequest prepareSqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfQueryLatencyTrend(cube,
+                endTimestamp);
+
+        return getTrendForOptimization(prepareSqlRequest);
+    }
+
+    public TreeMap<String, Double> 
getStorageUsageTrendForOptimization(CubeInstance cube, long endTimestamp) {
+        PrepareSqlRequest prepareSqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfStorageUsageTrend(cube,
+                endTimestamp);
+
+        return getTrendForOptimization(prepareSqlRequest);
+    }
+
+    private TreeMap<String, Double> getTrendForOptimization(PrepareSqlRequest 
prepareSqlRequest) {
+        SQLResponse sqlResponse = 
queryService.doQueryWithCache(prepareSqlRequest, false);
+        if (sqlResponse.getIsException()) {
+            throw new InternalErrorException("Fail to execute optimization 
trend sql\n" + prepareSqlRequest.getSql()
+                    + "\n  due to" + sqlResponse.getExceptionMessage());
+        }
+        TreeMap<String, Double> ret = new TreeMap<>();
+        for (List<String> row : sqlResponse.getResults()) {
+            ret.put(row.get(0), Double.parseDouble(row.get(1)));
+        }
+        return ret;
+    }
+    
     public Map<Long, Long> formatQueryCount(List<List<String>> orgQueryCount) {
         Map<Long, Long> formattedQueryCount = Maps.newLinkedHashMap();
         for (List<String> hit : orgQueryCount) {
@@ -1049,59 +1077,25 @@ public class CubeService extends BasicService 
implements InitializingBean {
     }
 
     public Map<Long, Long> getCuboidHitFrequency(String cubeName, boolean 
isCuboidSource) {
-        String cuboidColumn = isCuboidSource ? 
QueryCubePropertyEnum.CUBOID_SOURCE.toString()
-                : QueryCubePropertyEnum.CUBOID_TARGET.toString();
-        String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
-        String table = 
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
-        String sql = "select " + cuboidColumn + ", sum(" + hitMeasure + ")" //
-                + " from " + table//
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
-                + " group by " + cuboidColumn;
-
-        List<List<String>> orgHitFrequency = getPrepareQueryResult(cubeName, 
sql);
+        PrepareSqlRequest prepareSqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidHitFrequency(cubeName,
+                isCuboidSource);
+
+        List<List<String>> orgHitFrequency = 
queryService.doQueryWithCache(prepareSqlRequest, false).getResults();
         return formatQueryCount(orgHitFrequency);
     }
 
     public Map<Long, Map<Long, Pair<Long, Long>>> 
getCuboidRollingUpStats(String cubeName) {
-        String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
-        String cuboidTgt = QueryCubePropertyEnum.CUBOID_TARGET.toString();
-        String aggCount = QueryCubePropertyEnum.AGGR_COUNT.toString();
-        String returnCount = QueryCubePropertyEnum.RETURN_COUNT.toString();
-        String table = 
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
-        String sql = "select " + cuboidSource + ", " + cuboidTgt + ", avg(" + 
aggCount + "), avg(" + returnCount + ")"//
-                + " from " + table //
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
-                + " group by " + cuboidSource + ", " + cuboidTgt;
-
-        List<List<String>> orgRollingUpCount = getPrepareQueryResult(cubeName, 
sql);
+        PrepareSqlRequest prepareSqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidRollingUpStats(cubeName);
+
+        List<List<String>> orgRollingUpCount = 
queryService.doQueryWithCache(prepareSqlRequest, false).getResults();
         return formatRollingUpStats(orgRollingUpCount);
     }
 
     public Map<Long, Long> getCuboidQueryMatchCount(String cubeName) {
-        String cuboidSource = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
-        String hitMeasure = QueryCubePropertyEnum.WEIGHT_PER_HIT.toString();
-        String table = 
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQueryCube());
-        String sql = "select " + cuboidSource + ", sum(" + hitMeasure + ")" //
-                + " from " + table //
-                + " where " + QueryCubePropertyEnum.CUBE.toString() + " = ?" //
-                + " and " + QueryCubePropertyEnum.IF_MATCH.toString() + " = 
true" //
-                + " group by " + cuboidSource;
-
-        List<List<String>> orgMatchHitFrequency = 
getPrepareQueryResult(cubeName, sql);
-        return formatQueryCount(orgMatchHitFrequency);
-    }
+        PrepareSqlRequest prepareSqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidQueryMatchCount(cubeName);
 
-    private List<List<String>> getPrepareQueryResult(String cubeName, String 
sql) {
-        PrepareSqlRequest sqlRequest = new PrepareSqlRequest();
-        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
-        PrepareSqlRequest.StateParam[] params = new 
PrepareSqlRequest.StateParam[1];
-        params[0] = new PrepareSqlRequest.StateParam();
-        params[0].setClassName("java.lang.String");
-        params[0].setValue(cubeName);
-        sqlRequest.setParams(params);
-        sqlRequest.setSql(sql);
-
-        return queryService.doQueryWithCache(sqlRequest, false).getResults();
+        List<List<String>> orgMatchHitFrequency = 
queryService.doQueryWithCache(prepareSqlRequest, false).getResults();
+        return formatQueryCount(orgMatchHitFrequency);
     }
 
     private class HTableInfoSyncListener extends Broadcaster.Listener {
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java 
b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
index f622b89..17d51aa 100644
--- 
a/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
+++ 
b/server-base/src/main/java/org/apache/kylin/rest/service/DashboardService.java
@@ -18,26 +18,17 @@
 
 package org.apache.kylin.rest.service;
 
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.List;
-import java.util.Locale;
-import java.util.Map;
 
 import org.apache.kylin.cube.CubeInstance;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.metadata.project.RealizationEntry;
 import org.apache.kylin.metadata.realization.RealizationType;
-import org.apache.kylin.metrics.MetricsManager;
-import org.apache.kylin.metrics.lib.impl.TimePropertyEnum;
-import org.apache.kylin.metrics.property.JobPropertyEnum;
-import org.apache.kylin.metrics.property.QueryPropertyEnum;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.BadRequestException;
 import org.apache.kylin.rest.request.PrepareSqlRequest;
 import org.apache.kylin.rest.response.MetricsResponse;
 import org.apache.kylin.rest.response.SQLResponse;
-import org.apache.kylin.storage.hybrid.HybridInstance;
+import org.apache.kylin.rest.util.SqlCreationUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -45,9 +36,6 @@ import 
org.springframework.security.access.AccessDeniedException;
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
 
-import org.apache.kylin.shaded.com.google.common.base.Strings;
-import org.apache.kylin.shaded.com.google.common.collect.Lists;
-
 @Component("dashboardService")
 public class DashboardService extends BasicService {
 
@@ -56,6 +44,9 @@ public class DashboardService extends BasicService {
     @Autowired
     private CubeService cubeService;
 
+    @Autowired
+    private QueryService queryService;
+
     public MetricsResponse getCubeMetrics(String projectName, String cubeName) 
{
         MetricsResponse cubeMetrics = new MetricsResponse();
         Float totalCubeSize = 0f;
@@ -95,73 +86,54 @@ public class DashboardService extends BasicService {
         return cubeMetrics;
     }
 
-    private List<CubeInstance> getCubeByHybrid(HybridInstance hybridInstance) {
-        List<CubeInstance> cubeInstances = Lists.newArrayList();
-        List<RealizationEntry> realizationEntries = 
hybridInstance.getRealizationEntries();
-        for (RealizationEntry realizationEntry : realizationEntries) {
-            String reName = realizationEntry.getRealization();
-            if (RealizationType.CUBE == realizationEntry.getType()) {
-                CubeInstance cubeInstance = getCubeManager().getCube(reName);
-                cubeInstances.add(cubeInstance);
-            } else if (RealizationType.HYBRID == realizationEntry.getType()) {
-                HybridInstance innerHybridInstance = 
getHybridManager().getHybridInstance(reName);
-                cubeInstances.addAll(getCubeByHybrid(innerHybridInstance));
-            }
-        }
-        return cubeInstances;
-    };
+    public MetricsResponse getQueryMetrics(String startTime, String endTime, 
String projectName, String cubeName) {
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfTotalQueryMetrics(startTime, endTime,
+                projectName, cubeName);
+        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest, 
false);
 
-    public PrepareSqlRequest getQueryMetricsSQLRequest(String startTime, 
String endTime, String projectName,
-            String cubeName) {
-        String[] metrics = new String[] { QueryMetricEnum.QUERY_COUNT.toSQL(),
-                QueryMetricEnum.AVG_QUERY_LATENCY.toSQL(), 
QueryMetricEnum.MAX_QUERY_LATENCY.toSQL(),
-                QueryMetricEnum.MIN_QUERY_LATENCY.toSQL() };
-        Map<String, String> filterMap = getBaseFilterMap(CategoryEnum.QUERY, 
projectName, startTime, endTime);
-        filterMap.putAll(getCubeFilterMap(CategoryEnum.QUERY, cubeName));
-        return createPrepareSqlRequest(null, metrics,
-                
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery()),
 filterMap);
-    };
+        MetricsResponse queryMetrics = new MetricsResponse();
+        if (!sqlResponse.getIsException()) {
+            List<String> row = sqlResponse.getResults().get(0);
+            queryMetrics.increase("queryCount", getMetricValue(row.get(0)));
+            queryMetrics.increase("avgQueryLatency", 
getMetricValue(row.get(1)));
+            queryMetrics.increase("maxQueryLatency", 
getMetricValue(row.get(2)));
+            queryMetrics.increase("minQueryLatency", 
getMetricValue(row.get(3)));
+        }
 
-    public PrepareSqlRequest getJobMetricsSQLRequest(String startTime, String 
endTime, String projectName,
-            String cubeName) {
-        String[] metrics = new String[] { JobMetricEnum.JOB_COUNT.toSQL(), 
JobMetricEnum.AVG_JOB_BUILD_TIME.toSQL(),
-                JobMetricEnum.MAX_JOB_BUILD_TIME.toSQL(), 
JobMetricEnum.MIN_JOB_BUILD_TIME.toSQL() };
-        Map<String, String> filterMap = getBaseFilterMap(CategoryEnum.JOB, 
projectName, startTime, endTime);
-        filterMap.putAll(getCubeFilterMap(CategoryEnum.JOB, cubeName));
-        return createPrepareSqlRequest(null, metrics,
-                
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob()),
 filterMap);
+        return queryMetrics;
     }
 
-    public PrepareSqlRequest getChartSQLRequest(String startTime, String 
endTime, String projectName, String cubeName,
-            String dimension, String metric, String category) {
-        try {
-            CategoryEnum categoryEnum = CategoryEnum.valueOf(category);
-            String table = "";
-            String[] dimensionSQL = null;
-            String[] metricSQL = null;
+    public MetricsResponse getJobMetrics(String startTime, String endTime, 
String projectName, String cubeName) {
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfTotalJobMetrics(startTime, endTime,
+                projectName, cubeName);
+        SQLResponse sqlResponse = queryService.doQueryWithCache(sqlRequest, 
false);
 
-            if (categoryEnum == CategoryEnum.QUERY) {
-                dimensionSQL = new String[] { 
QueryDimensionEnum.valueOf(dimension).toSQL() };
-                metricSQL = new String[] { 
QueryMetricEnum.valueOf(metric).toSQL() };
-                table = 
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectQuery());
-            } else if (categoryEnum == CategoryEnum.JOB) {
-                dimensionSQL = new String[] { 
JobDimensionEnum.valueOf(dimension).toSQL() };
-                metricSQL = new String[] { 
JobMetricEnum.valueOf(metric).toSQL() };
-                table = 
getMetricsManager().getSystemTableFromSubject(getConfig().getKylinMetricsSubjectJob());
-            }
+        MetricsResponse jobMetrics = new MetricsResponse();
+        if (!sqlResponse.getIsException()) {
+            List<String> row = sqlResponse.getResults().get(0);
+            jobMetrics.increase("jobCount", getMetricValue(row.get(0)));
+            jobMetrics.increase("avgJobBuildTime", getMetricValue(row.get(1)));
+            jobMetrics.increase("maxJobBuildTime", getMetricValue(row.get(2)));
+            jobMetrics.increase("minJobBuildTime", getMetricValue(row.get(3)));
+        }
+
+        return jobMetrics;
+    }
 
-            Map<String, String> filterMap = getBaseFilterMap(categoryEnum, 
projectName, startTime, endTime);
-            filterMap.putAll(getCubeFilterMap(categoryEnum, cubeName));
+    public MetricsResponse getChartData(String category, String projectName, 
String cubeName, String startTime,
+            String endTime, String dimension, String measure) {
+        try {
+            PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfChartMetrics(category, projectName,
+                    cubeName, startTime, endTime, dimension, measure);
+            SQLResponse sqlResponse = 
queryService.doQueryWithCache(sqlRequest, false);
 
-            return createPrepareSqlRequest(dimensionSQL, metricSQL, table, 
filterMap);
-        } catch (IllegalArgumentException e) {
-            String message = "Generate dashboard chart sql failed. Please 
double check the input parameter: dimension, metric or category.";
-            logger.error(message, e);
-            throw new BadRequestException(message + " Caused by: " + 
e.getMessage(), null, e.getCause());
+            return transformChartData(sqlResponse);
+        } catch (Exception e) {
+            throw new BadRequestException("Bad request due to " + e);
         }
     }
 
-    public MetricsResponse transformChartData(SQLResponse sqlResponse) {
+    private MetricsResponse transformChartData(SQLResponse sqlResponse) {
         if (!sqlResponse.getIsException()) {
             MetricsResponse metrics = new MetricsResponse();
             List<List<String>> results = sqlResponse.getResults();
@@ -177,7 +149,7 @@ public class DashboardService extends BasicService {
         return null;
     }
 
-    public Float getMetricValue(String value) {
+    private Float getMetricValue(String value) {
         if (value == null || value.isEmpty()) {
             return 0f;
         } else {
@@ -193,178 +165,4 @@ public class DashboardService extends BasicService {
     public void checkAuthorization() throws AccessDeniedException {
     }
 
-    private Map<String, String> getBaseFilterMap(CategoryEnum category, String 
projectName, String startTime,
-            String endTime) {
-        HashMap<String, String> filterMap = new HashMap<>();
-        String project = "";
-        if (category == CategoryEnum.QUERY) {
-            project = QueryDimensionEnum.PROJECT.toSQL();
-        } else {
-            project = JobDimensionEnum.PROJECT.toSQL();
-        }
-        filterMap.put(TimePropertyEnum.DAY_DATE.toString() + " >= ?", 
startTime);
-        filterMap.put(TimePropertyEnum.DAY_DATE.toString() + " <= ?", endTime);
-
-        if (!Strings.isNullOrEmpty(projectName)) {
-            filterMap.put(project + " = ?", 
projectName.toUpperCase(Locale.ROOT));
-        } else {
-            filterMap.put(project + " <> ?", MetricsManager.SYSTEM_PROJECT);
-        }
-        return filterMap;
-    }
-
-    private Map<String, String> getCubeFilterMap(CategoryEnum category, String 
cubeName) {
-        HashMap<String, String> filterMap = new HashMap<>();
-
-        if (category == CategoryEnum.QUERY) {
-            filterMap.put(QueryPropertyEnum.EXCEPTION.toString() + " = ?", 
"NULL");
-
-            if (!Strings.isNullOrEmpty(cubeName)) {
-                filterMap.put(QueryPropertyEnum.REALIZATION + " = ?", 
cubeName);
-            }
-        } else if (category == CategoryEnum.JOB && 
!Strings.isNullOrEmpty(cubeName)) {
-            HybridInstance hybridInstance = 
getHybridManager().getHybridInstance(cubeName);
-            if (null != hybridInstance) {
-                StringBuffer cubeNames = new StringBuffer();
-                for (CubeInstance cube : getCubeByHybrid(hybridInstance)) {
-                    cubeNames.append(",'" + cube.getName() + "'");
-                }
-                filterMap.put(JobPropertyEnum.CUBE.toString() + " IN (?)", 
cubeNames.substring(1));
-            } else {
-                filterMap.put(JobPropertyEnum.CUBE.toString() + " = ?", 
cubeName);
-            }
-        }
-        return filterMap;
-    }
-
-    private PrepareSqlRequest createPrepareSqlRequest(String[] dimensions, 
String[] metrics, String category,
-            Map<String, String> filterMap) {
-        PrepareSqlRequest sqlRequest = new PrepareSqlRequest();
-        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
-        StringBuffer baseSQL = new StringBuffer("select ");
-        StringBuffer groupBy = new StringBuffer("");
-        if (dimensions != null && dimensions.length > 0) {
-            groupBy.append(" group by ");
-            StringBuffer dimensionSQL = new StringBuffer("");
-            for (String dimension : dimensions) {
-                dimensionSQL.append(",");
-                dimensionSQL.append(dimension);
-            }
-            baseSQL.append(dimensionSQL.substring(1));
-            groupBy.append(dimensionSQL.substring(1));
-        }
-        if (metrics != null && metrics.length > 0) {
-            StringBuffer metricSQL = new StringBuffer("");
-            for (String metric : metrics) {
-                metricSQL.append(",");
-                metricSQL.append(metric);
-            }
-            if (groupBy.length() > 0) {
-                baseSQL.append(metricSQL);
-            } else {
-                baseSQL.append(metricSQL.substring(1));
-            }
-        }
-        baseSQL.append(" from ");
-        baseSQL.append(category);
-        if (filterMap != null && filterMap.size() > 0) {
-            PrepareSqlRequest.StateParam[] params = new 
PrepareSqlRequest.StateParam[filterMap.size()];
-            int i = 0;
-            StringBuffer filterSQL = new StringBuffer(" where ");
-            Iterator<String> it = filterMap.keySet().iterator();
-            String filter = it.next();
-            filterSQL.append(filter);
-            params[i] = new PrepareSqlRequest.StateParam();
-            params[i].setClassName("java.lang.String");
-            params[i++].setValue(filterMap.get(filter));
-
-            while (it.hasNext()) {
-                filter = it.next();
-                filterSQL.append(" and ");
-                filterSQL.append(filter);
-                params[i] = new PrepareSqlRequest.StateParam();
-                params[i].setClassName("java.lang.String");
-                params[i++].setValue(filterMap.get(filter));
-            }
-            baseSQL.append(filterSQL.toString());
-            sqlRequest.setParams(params);
-        }
-        baseSQL.append(groupBy);
-        sqlRequest.setSql(baseSQL.toString());
-        return sqlRequest;
-    }
-
-    private enum CategoryEnum {
-        QUERY, JOB
-    }
-
-    private enum QueryDimensionEnum {
-        PROJECT(QueryPropertyEnum.PROJECT.toString()), //
-        CUBE(QueryPropertyEnum.REALIZATION.toString()), //
-        DAY(TimePropertyEnum.DAY_DATE.toString()), //
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
-        MONTH(TimePropertyEnum.MONTH.toString());
-
-        private final String sql;
-
-        QueryDimensionEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
-
-    private enum JobDimensionEnum {
-        PROJECT(JobPropertyEnum.PROJECT.toString()), //
-        CUBE(JobPropertyEnum.CUBE.toString()), //
-        DAY(TimePropertyEnum.DAY_DATE.toString()), //
-        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
-        MONTH(TimePropertyEnum.MONTH.toString());
-
-        private final String sql;
-
-        JobDimensionEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
-
-    private enum QueryMetricEnum {
-        QUERY_COUNT("count(*)"), //
-        AVG_QUERY_LATENCY("avg(" + QueryPropertyEnum.TIME_COST.toString() + 
")"), //
-        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + 
")"), //
-        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + 
")");
-
-        private final String sql;
-
-        QueryMetricEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
-
-    private enum JobMetricEnum {
-        JOB_COUNT("count(*)"), //
-        AVG_JOB_BUILD_TIME("avg(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
-        MAX_JOB_BUILD_TIME("max(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
-        MIN_JOB_BUILD_TIME("min(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
-
-        private final String sql;
-
-        JobMetricEnum(String sql) {
-            this.sql = sql;
-        }
-
-        public String toSQL() {
-            return this.sql;
-        }
-    }
 }
\ No newline at end of file
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java 
b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
index 6c38f65..625b688 100644
--- a/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/server-base/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -386,13 +386,6 @@ public class QueryService extends BasicService {
         logger.info(stringBuilder.toString());
     }
 
-    public SQLResponse querySystemCube(String sql) {
-        SQLRequest sqlRequest = new SQLRequest();
-        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
-        sqlRequest.setSql(sql);
-        return doQueryWithCache(sqlRequest, false);
-    }
-
     public SQLResponse doQueryWithCache(SQLRequest sqlRequest) {
         long t = System.currentTimeMillis();
         aclEvaluate.checkProjectReadPermission(sqlRequest.getProject());
diff --git 
a/server-base/src/main/java/org/apache/kylin/rest/util/SqlCreationUtil.java 
b/server-base/src/main/java/org/apache/kylin/rest/util/SqlCreationUtil.java
new file mode 100644
index 0000000..4b6acf9
--- /dev/null
+++ b/server-base/src/main/java/org/apache/kylin/rest/util/SqlCreationUtil.java
@@ -0,0 +1,388 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.util;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.StringUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.metrics.MetricsManager;
+import org.apache.kylin.metrics.lib.impl.RecordEventTimeDetail;
+import org.apache.kylin.metrics.lib.impl.TimePropertyEnum;
+import org.apache.kylin.metrics.property.JobPropertyEnum;
+import org.apache.kylin.metrics.property.QueryCubePropertyEnum;
+import org.apache.kylin.metrics.property.QueryPropertyEnum;
+import org.apache.kylin.rest.request.PrepareSqlRequest;
+import org.apache.kylin.rest.request.PrepareSqlRequest.StateParam;
+import org.apache.kylin.shaded.com.google.common.collect.Lists;
+import org.apache.kylin.shaded.com.google.common.collect.Maps;
+
+public class SqlCreationUtil {
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfChartMetrics(String category, String projectName,
+            String cubeName, String startTime, String endTime, String 
dimension, String measure) throws Exception {
+        CategoryEnum categoryEnum = CategoryEnum.valueOf(category);
+        switch (categoryEnum) {
+        case QUERY:
+            String[] dimensionsQuery = new String[] { 
QueryDimensionEnum.valueOf(dimension).toSQL() };
+            String[] measuresQuery = new String[] { 
QueryMeasureEnum.valueOf(measure).toSQL() };
+            return createPrepareSqlRequestOfQueryMetrics(projectName, 
cubeName, startTime, endTime, dimensionsQuery,
+                    measuresQuery);
+        case JOB:
+            String[] dimensionsJob = new String[] { 
JobDimensionEnum.valueOf(dimension).toSQL() };
+            String[] measuresJob = new String[] { 
JobMeasureEnum.valueOf(measure).toSQL() };
+            return createPrepareSqlRequestOfJobMetrics(projectName, cubeName, 
startTime, endTime, dimensionsJob,
+                    measuresJob);
+        default:
+            throw new Exception("Category should either be QUERY or JOB");
+        }
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfTotalQueryMetrics(String projectName, String cubeName,
+            String startTime, String endTime) {
+        String[] measures = new String[] { 
QueryMeasureEnum.QUERY_COUNT.toSQL(),
+                QueryMeasureEnum.AVG_QUERY_LATENCY.toSQL(), 
QueryMeasureEnum.MAX_QUERY_LATENCY.toSQL(),
+                QueryMeasureEnum.MIN_QUERY_LATENCY.toSQL() };
+
+        return createPrepareSqlRequestOfQueryMetrics(projectName, cubeName, 
startTime, endTime, null, measures);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfQueryMetrics(String projectName, String cubeName,
+            String startTime, String endTime, String[] dimensions, String[] 
measures) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQuery());
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        if (StringUtil.isEmpty(projectName)) {
+            addFilter(filterMap, QueryPropertyEnum.PROJECT.toString(), "<>", 
MetricsManager.SYSTEM_PROJECT,
+                    String.class.getName());
+        } else {
+            addFilter(filterMap, QueryPropertyEnum.PROJECT.toString(), "=", 
projectName.toUpperCase(Locale.ROOT),
+                    String.class.getName());
+        }
+        if (!StringUtil.isEmpty(cubeName)) {
+            addFilter(filterMap, QueryPropertyEnum.REALIZATION.toString(), 
"=", cubeName, String.class.getName());
+        }
+        addFilter(filterMap, QueryPropertyEnum.EXCEPTION.toString(), "=", 
"NULL", String.class.getName());
+
+        addFilter(filterMap, TimePropertyEnum.DAY_DATE.toString(), ">=", 
startTime, String.class.getName());
+        addFilter(filterMap, TimePropertyEnum.DAY_DATE.toString(), "<=", 
endTime, String.class.getName());
+
+        return SqlCreationUtil.createPrepareSqlRequest(table, dimensions, 
measures, filterMap);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfTotalJobMetrics(String projectName, String cubeName,
+            String startTime, String endTime) {
+        String[] measures = new String[] { JobMeasureEnum.JOB_COUNT.toSQL(), 
JobMeasureEnum.AVG_JOB_BUILD_TIME.toSQL(),
+                JobMeasureEnum.MAX_JOB_BUILD_TIME.toSQL(), 
JobMeasureEnum.MIN_JOB_BUILD_TIME.toSQL() };
+
+        return createPrepareSqlRequestOfJobMetrics(projectName, cubeName, 
startTime, endTime, null, measures);
+    }
+
+    public static PrepareSqlRequest createPrepareSqlRequestOfJobMetrics(String 
projectName, String cubeName,
+            String startTime, String endTime, String[] dimensions, String[] 
measures) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectJob());
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        if (StringUtil.isEmpty(projectName)) {
+            addFilter(filterMap, JobPropertyEnum.PROJECT.toString(), "<>", 
MetricsManager.SYSTEM_PROJECT,
+                    String.class.getName());
+        } else {
+            addFilter(filterMap, JobPropertyEnum.PROJECT.toString(), "=", 
projectName.toUpperCase(Locale.ROOT),
+                    String.class.getName());
+        }
+        if (!StringUtil.isEmpty(cubeName)) {
+            addFilter(filterMap, JobPropertyEnum.CUBE.toString(), "IN", 
cubeName, String.class.getName());
+        }
+
+        addFilter(filterMap, TimePropertyEnum.DAY_DATE.toString(), ">=", 
startTime, String.class.getName());
+        addFilter(filterMap, TimePropertyEnum.DAY_DATE.toString(), "<=", 
endTime, String.class.getName());
+
+        return SqlCreationUtil.createPrepareSqlRequest(table, dimensions, 
measures, filterMap);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfCuboidHitFrequency(String cubeName,
+            boolean isCuboidSource) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
+
+        String[] dimensions = new String[1];
+        dimensions[0] = isCuboidSource ? 
QueryCubePropertyEnum.CUBOID_SOURCE.toString()
+                : QueryCubePropertyEnum.CUBOID_TARGET.toString();
+
+        String[] measures = new String[1];
+        measures[0] = "sum(" + QueryCubePropertyEnum.WEIGHT_PER_HIT.toString() 
+ ")";
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        addFilter(filterMap, QueryCubePropertyEnum.CUBE.toString(), "=", 
cubeName, String.class.getName());
+
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfCuboidRollingUpStats(String cubeName) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
+
+        String[] dimensions = new String[2];
+        dimensions[0] = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
+        dimensions[1] = QueryCubePropertyEnum.CUBOID_TARGET.toString();
+
+        String[] measures = new String[2];
+        measures[0] = "avg(" + QueryCubePropertyEnum.AGGR_COUNT.toString() + 
")";
+        measures[1] = "avg(" + QueryCubePropertyEnum.RETURN_COUNT.toString() + 
")";
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        addFilter(filterMap, QueryCubePropertyEnum.CUBE.toString(), "=", 
cubeName, String.class.getName());
+
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfCuboidQueryMatchCount(String cubeName) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQueryCube());
+
+        String[] dimensions = new String[1];
+        dimensions[0] = QueryCubePropertyEnum.CUBOID_SOURCE.toString();
+
+        String[] measures = new String[1];
+        measures[0] = "sum(" + QueryCubePropertyEnum.WEIGHT_PER_HIT.toString() 
+ ")";
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        addFilter(filterMap, QueryCubePropertyEnum.CUBE.toString(), "=", 
cubeName, String.class.getName());
+        addFilter(filterMap, QueryCubePropertyEnum.IF_MATCH.toString(), "=", 
"true", Boolean.class.getName());
+
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfQueryLatencyTrend(CubeInstance cube, long 
endTimestamp) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectQuery());
+
+        String[] groupBys = new String[1];
+        String[] dimensions = new String[1];
+        groupBys[0] = getTimeSlotStringForOptimizationTrend(cube, 
endTimestamp);
+        dimensions[0] = groupBys[0] + " as time_slot";
+
+        String[] measures = new String[1];
+        measures[0] = "avg(" + QueryPropertyEnum.TIME_COST.toString() + ") as 
query_latency";
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        addFilter(filterMap, QueryPropertyEnum.REALIZATION.toString(), "=", 
cube.getName(), String.class.getName());
+
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap, 
groupBys, groupBys);
+    }
+
+    public static PrepareSqlRequest 
createPrepareSqlRequestOfStorageUsageTrend(CubeInstance cube, long 
endTimestamp) {
+        String table = MetricsManager
+                
.getSystemTableFromSubject(KylinConfig.getInstanceFromEnv().getKylinMetricsSubjectJob());
+
+        String[] groupBys = new String[1];
+        String[] dimensions = new String[1];
+        groupBys[0] = getTimeSlotStringForOptimizationTrend(cube, 
endTimestamp);
+        dimensions[0] = groupBys[0] + " as time_slot";
+
+        String[] measures = new String[1];
+        measures[0] = getExpansionRateMetric() + " as expansion_rate";
+
+        Map<String, StateParam> filterMap = Maps.newHashMap();
+        addFilter(filterMap, JobPropertyEnum.CUBE.toString(), "=", 
cube.getName(), String.class.getName());
+
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap, 
groupBys, groupBys);
+    }
+
+    private static String getExpansionRateMetric() {
+        String sourceSizeStr = JobPropertyEnum.SOURCE_SIZE.toString();
+        String cubeSizeStr = JobPropertyEnum.CUBE_SIZE.toString();
+        return String.format(Locale.ROOT, "(case \n when sum(%s) = 0 then -1 
\n else 1.0 * sum(%s) / sum(%s) \n end)",
+                sourceSizeStr, cubeSizeStr, sourceSizeStr);
+    }
+
+    private static String getTimeSlotStringForOptimizationTrend(CubeInstance 
cube, long endTimestamp) {
+        List<Long> optTimeList = cube.getCuboidOptimizedTimestamps();
+        List<RecordEventTimeDetail> optTimeSerial = 
optTimeList.stream().map(RecordEventTimeDetail::new)
+                .collect(Collectors.toList());
+        RecordEventTimeDetail endTime = new 
RecordEventTimeDetail(endTimestamp);
+        String endTimeStr = String.format(Locale.ROOT, "'%s %02d:00:00'", 
endTime.date, endTime.hour);
+        if (optTimeSerial.isEmpty()) {
+            return endTimeStr;
+        }
+        String dayDateStr = TimePropertyEnum.DAY_DATE.toString();
+        String timeHourStr = TimePropertyEnum.TIME_HOUR.toString();
+        StringBuilder sb = new StringBuilder();
+        sb.append("(case \n");
+        for (RecordEventTimeDetail optTime : optTimeSerial) {
+            String whenStr = String.format(Locale.ROOT,
+                    "  when %s < '%s' or (%s = '%s' and %s < %d) then '%s 
%02d:00:00'\n", dayDateStr, optTime.date,
+                    dayDateStr, optTime.date, timeHourStr, optTime.hour, 
optTime.date, optTime.hour);
+            sb.append(whenStr);
+        }
+        String elseStr = String.format(Locale.ROOT, "  else %s\n", endTimeStr);
+        sb.append(elseStr);
+        sb.append("end) \n");
+        return sb.toString();
+    }
+
+    private static PrepareSqlRequest createPrepareSqlRequest(String table, 
String[] dimensions, String[] measures,
+            Map<String, StateParam> filterMap) {
+        return createPrepareSqlRequest(table, dimensions, measures, filterMap, 
dimensions, null);
+    }
+
+    private static PrepareSqlRequest createPrepareSqlRequest(String table, 
String[] dimensions, String[] measures,
+            Map<String, StateParam> filterMap, String[] groupBys, String[] 
orderBys) {
+        PrepareSqlRequest sqlRequest = new PrepareSqlRequest();
+        sqlRequest.setProject(MetricsManager.SYSTEM_PROJECT);
+
+        StringBuilder sqlBuilder = new StringBuilder();
+
+        String dimPart = concatElements(dimensions);
+        String measurePart = concatElements(measures);
+
+        String filterPart = "";
+        if (filterMap != null && !filterMap.isEmpty()) {
+            List<Map.Entry<String, StateParam>> filterList = 
Lists.newArrayList(filterMap.entrySet());
+
+            filterPart = 
filterList.stream().map(Map.Entry::getKey).collect(Collectors.joining(" and "));
+
+            StateParam[] params = 
filterList.stream().map(Map.Entry::getValue).collect(Collectors.toList())
+                    .toArray(new StateParam[filterMap.size()]);
+            sqlRequest.setParams(params);
+        }
+
+        String groupByPart = concatElements(groupBys);
+        String orderByPart = concatElements(orderBys);
+
+        sqlBuilder.append("select ");
+        if (!dimPart.isEmpty()) {
+            sqlBuilder.append(dimPart);
+        }
+        if (!measurePart.isEmpty()) {
+            if (!dimPart.isEmpty()) {
+                sqlBuilder.append(", ");
+            }
+            sqlBuilder.append(measurePart);
+        }
+        sqlBuilder.append("\n").append("from ").append(table).append("\n");
+        if (!filterPart.isEmpty()) {
+            sqlBuilder.append("where ").append(filterPart).append("\n");
+        }
+        if (!groupByPart.isEmpty()) {
+            sqlBuilder.append("group by ").append(groupByPart).append("\n");
+        }
+        if (!orderByPart.isEmpty()) {
+            sqlBuilder.append("order by ").append(orderByPart).append("\n");
+        }
+
+        sqlRequest.setSql(sqlBuilder.toString());
+
+        return sqlRequest;
+    }
+
+    private static String concatElements(String[] elements) {
+        return elements != null && elements.length > 0 ? 
StringUtil.join(Arrays.asList(elements), ", ") : "";
+    }
+
+    private static void addFilter(Map<String, StateParam> filterMap, String 
keyName, String compareSign, String value,
+            String className) {
+        StateParam stateParam = new StateParam();
+        stateParam.setClassName(className);
+        stateParam.setValue(value);
+        String mark = compareSign.equalsIgnoreCase("IN") ? "(?)" : "?";
+        filterMap.put(String.format(Locale.ROOT, "%s %s %s", keyName, 
compareSign, mark), stateParam);
+    }
+
+    private enum CategoryEnum {
+        QUERY, JOB
+    }
+
+    private enum QueryDimensionEnum {
+        PROJECT(QueryPropertyEnum.PROJECT.toString()), //
+        CUBE(QueryPropertyEnum.REALIZATION.toString()), //
+        DAY(TimePropertyEnum.DAY_DATE.toString()), //
+        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
+        MONTH(TimePropertyEnum.MONTH.toString());
+
+        private final String sql;
+
+        QueryDimensionEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum JobDimensionEnum {
+        PROJECT(JobPropertyEnum.PROJECT.toString()), //
+        CUBE(JobPropertyEnum.CUBE.toString()), //
+        DAY(TimePropertyEnum.DAY_DATE.toString()), //
+        WEEK(TimePropertyEnum.WEEK_BEGIN_DATE.toString()), //
+        MONTH(TimePropertyEnum.MONTH.toString());
+
+        private final String sql;
+
+        JobDimensionEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum QueryMeasureEnum {
+        QUERY_COUNT("count(*)"), //
+        AVG_QUERY_LATENCY("avg(" + QueryPropertyEnum.TIME_COST.toString() + 
")"), //
+        MAX_QUERY_LATENCY("max(" + QueryPropertyEnum.TIME_COST.toString() + 
")"), //
+        MIN_QUERY_LATENCY("min(" + QueryPropertyEnum.TIME_COST.toString() + 
")");
+
+        private final String sql;
+
+        QueryMeasureEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+
+    private enum JobMeasureEnum {
+        JOB_COUNT("count(*)"), //
+        AVG_JOB_BUILD_TIME("avg(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
+        MAX_JOB_BUILD_TIME("max(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")"), //
+        MIN_JOB_BUILD_TIME("min(" + 
JobPropertyEnum.PER_BYTES_TIME_COST.toString() + ")");
+
+        private final String sql;
+
+        JobMeasureEnum(String sql) {
+            this.sql = sql;
+        }
+
+        public String toSQL() {
+            return this.sql;
+        }
+    }
+}
diff --git 
a/server-base/src/test/java/org/apache/kylin/rest/util/SqlCreationUtilTest.java 
b/server-base/src/test/java/org/apache/kylin/rest/util/SqlCreationUtilTest.java
new file mode 100644
index 0000000..aae31c1
--- /dev/null
+++ 
b/server-base/src/test/java/org/apache/kylin/rest/util/SqlCreationUtilTest.java
@@ -0,0 +1,240 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.util;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.kylin.common.util.LocalFileMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.StringUtil;
+import org.apache.kylin.cube.CubeInstance;
+import org.apache.kylin.metrics.MetricsManager;
+import org.apache.kylin.metrics.lib.ActiveReservoir;
+import org.apache.kylin.metrics.lib.Sink;
+import org.apache.kylin.metrics.lib.impl.hive.HiveSink;
+import org.apache.kylin.rest.request.PrepareSqlRequest;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+public class SqlCreationUtilTest extends LocalFileMetadataTestCase {
+
+    private static final String projectName = "project";
+    private static final String cubeName = "cube";
+    private static final String startTime = "2020-06-10";
+    private static final String endTime = "2020-06-20";
+
+    private static final Sink sink = new HiveSink();
+
+    private static String eventTimeZone;
+
+    @BeforeClass
+    public static void setUp() {
+        staticCreateTestMetadata();
+
+        eventTimeZone = System.getProperty("kylin.metrics.event-time-zone");
+        System.setProperty("kylin.metrics.event-time-zone", "GMT");
+
+        Map<ActiveReservoir, List<Pair<String, Properties>>> 
sourceReporterBindProperties = new HashMap<>();
+        MetricsManager.initMetricsManager(sink, sourceReporterBindProperties);
+    }
+
+    @AfterClass
+    public static void after() throws Exception {
+        staticCleanupTestMetadata();
+
+        if (!StringUtil.isEmpty(eventTimeZone)) {
+            System.setProperty("kylin.metrics.event-time-zone", eventTimeZone);
+        }
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfChartMetrics() {
+        final String expectedQuerySql = "select REALIZATION, 
avg(QUERY_TIME_COST)\n"
+                + "from KYLIN.HIVE_METRICS_QUERY_DEV\n"
+                + "where PROJECT = ? and EXCEPTION = ? and KDAY_DATE >= ? and 
KDAY_DATE <= ? and REALIZATION = ?\n"
+                + "group by REALIZATION\n";
+
+        String categoryQ = "QUERY";
+        String dimensionQ = "CUBE";
+        String measureQ = "AVG_QUERY_LATENCY";
+        try {
+            PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfChartMetrics(categoryQ, projectName,
+                    cubeName, startTime, endTime, dimensionQ, measureQ);
+            Assert.assertEquals(expectedQuerySql, sqlRequest.getSql());
+        } catch (Exception e) {
+            Assert.fail("category is not correct");
+        }
+
+        final String expectedJobSql = "select CUBE_NAME, 
avg(PER_BYTES_TIME_COST)\n"
+                + "from KYLIN.HIVE_METRICS_JOB_DEV\n"
+                + "where PROJECT = ? and CUBE_NAME IN (?) and KDAY_DATE >= ? 
and KDAY_DATE <= ?\n"
+                + "group by CUBE_NAME\n";
+
+        String categoryJ = "JOB";
+        String dimensionJ = "CUBE";
+        String measureJ = "AVG_JOB_BUILD_TIME";
+        try {
+            PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfChartMetrics(categoryJ, projectName,
+                    cubeName, startTime, endTime, dimensionJ, measureJ);
+            Assert.assertEquals(expectedJobSql, sqlRequest.getSql());
+        } catch (Exception e) {
+            Assert.fail("category is not correct");
+        }
+
+        String categoryBad = "JOB1";
+        try {
+            SqlCreationUtil.createPrepareSqlRequestOfChartMetrics(categoryBad, 
projectName, cubeName, startTime,
+                    endTime, dimensionJ, measureJ);
+            Assert.fail("category is not correct");
+        } catch (Exception e) {
+        }
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfTotalQueryMetrics() {
+        final String expectedSql = "select count(*), avg(QUERY_TIME_COST), 
max(QUERY_TIME_COST), min(QUERY_TIME_COST)\n"
+                + "from KYLIN.HIVE_METRICS_QUERY_DEV\n"
+                + "where PROJECT = ? and EXCEPTION = ? and KDAY_DATE >= ? and 
KDAY_DATE <= ? and REALIZATION = ?\n";
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfTotalQueryMetrics(projectName, 
cubeName,
+                startTime, endTime);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfTotalJobMetrics() {
+        final String expectedSql = "select count(*), avg(PER_BYTES_TIME_COST), 
max(PER_BYTES_TIME_COST), min(PER_BYTES_TIME_COST), (case \n"
+                + " when sum(TABLE_SIZE) = 0 then -1 \n" + " else 1.0 * 
sum(CUBE_SIZE) / sum(TABLE_SIZE) \n" + " end)\n"
+                + "from KYLIN.HIVE_METRICS_JOB_DEV\n"
+                + "where PROJECT = ? and CUBE_NAME IN (?) and KDAY_DATE >= ? 
and KDAY_DATE <= ?\n";
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfTotalJobMetrics(projectName, cubeName,
+                startTime, endTime);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfCuboidHitFrequency() {
+        {
+            final String expectedSql = "select CUBOID_TARGET, 
sum(WEIGHT_PER_HIT)\n"
+                    + "from KYLIN.HIVE_METRICS_QUERY_CUBE_DEV\n" + "where 
CUBE_NAME = ?\n" + "group by CUBOID_TARGET\n";
+
+            PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidHitFrequency(cubeName, false);
+            Assert.assertEquals(expectedSql, sqlRequest.getSql());
+        }
+
+        {
+            final String expectedSql = "select CUBOID_SOURCE, 
sum(WEIGHT_PER_HIT)\n"
+                    + "from KYLIN.HIVE_METRICS_QUERY_CUBE_DEV\n" + "where 
CUBE_NAME = ?\n" + "group by CUBOID_SOURCE\n";
+
+            PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidHitFrequency(cubeName, true);
+            Assert.assertEquals(expectedSql, sqlRequest.getSql());
+        }
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfCuboidRollingUpStats() {
+        final String expectedSql = "select CUBOID_SOURCE, CUBOID_TARGET, 
avg(STORAGE_COUNT_AGGREGATE), avg(STORAGE_COUNT_RETURN)\n"
+                + "from KYLIN.HIVE_METRICS_QUERY_CUBE_DEV\n" + "where 
CUBE_NAME = ?\n"
+                + "group by CUBOID_SOURCE, CUBOID_TARGET\n";
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidRollingUpStats(cubeName);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfCuboidQueryMatchCount() {
+        final String expectedSql = "select CUBOID_SOURCE, 
sum(WEIGHT_PER_HIT)\n"
+                + "from KYLIN.HIVE_METRICS_QUERY_CUBE_DEV\n" + "where IF_MATCH 
= ? and CUBE_NAME = ?\n"
+                + "group by CUBOID_SOURCE\n";
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfCuboidQueryMatchCount(cubeName);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfQueryLatencyTrend() {
+        final String expectedSql = "select (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n"
+                + " as time_slot, avg(QUERY_TIME_COST) as query_latency\n" + 
"from KYLIN.HIVE_METRICS_QUERY_DEV\n"
+                + "where REALIZATION = ?\n" + "group by (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n" + "\n" + "order 
by (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n\n";
+
+        List<Long> optTimeList = new ArrayList<>();
+        optTimeList.add(1000000L);
+        optTimeList.add(20000000L);
+        optTimeList.add(100000000L);
+        CubeInstance cubeInstance = Mockito.mock(CubeInstance.class);
+        Mockito.when(cubeInstance.getName()).thenReturn(cubeName);
+        
Mockito.when(cubeInstance.getCuboidOptimizedTimestamps()).thenReturn(optTimeList);
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfQueryLatencyTrend(cubeInstance,
+                200000000L);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+
+    @Test
+    public void testCreatePrepareSqlRequestOfStorageUsageTrend() {
+        final String expectedSql = "select (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n" + " as 
time_slot, (case \n"
+                + " when sum(TABLE_SIZE) = 0 then -1 \n" + " else 1.0 * 
sum(CUBE_SIZE) / sum(TABLE_SIZE) \n"
+                + " end) as expansion_rate\n" + "from 
KYLIN.HIVE_METRICS_JOB_DEV\n" + "where CUBE_NAME = ?\n"
+                + "group by (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n" + "\n" + "order 
by (case \n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 0) then '1970-01-01 00:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-01' or (KDAY_DATE = 
'1970-01-01' and KTIME_HOUR < 5) then '1970-01-01 05:00:00'\n"
+                + "  when KDAY_DATE < '1970-01-02' or (KDAY_DATE = 
'1970-01-02' and KTIME_HOUR < 3) then '1970-01-02 03:00:00'\n"
+                + "  else '1970-01-03 07:00:00'\n" + "end) \n\n";
+
+        List<Long> optTimeList = new ArrayList<>();
+        optTimeList.add(1000000L);
+        optTimeList.add(20000000L);
+        optTimeList.add(100000000L);
+        CubeInstance cubeInstance = Mockito.mock(CubeInstance.class);
+        Mockito.when(cubeInstance.getName()).thenReturn(cubeName);
+        
Mockito.when(cubeInstance.getCuboidOptimizedTimestamps()).thenReturn(optTimeList);
+
+        PrepareSqlRequest sqlRequest = 
SqlCreationUtil.createPrepareSqlRequestOfStorageUsageTrend(cubeInstance,
+                200000000L);
+        Assert.assertEquals(expectedSql, sqlRequest.getSql());
+    }
+}

Reply via email to