Repository: incubator-kylin
Updated Branches:
  refs/heads/KYLIN-942 b8b3c02cf -> 84ad08c12


2 regions for 1~2


Project: http://git-wip-us.apache.org/repos/asf/incubator-kylin/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-kylin/commit/84ad08c1
Tree: http://git-wip-us.apache.org/repos/asf/incubator-kylin/tree/84ad08c1
Diff: http://git-wip-us.apache.org/repos/asf/incubator-kylin/diff/84ad08c1

Branch: refs/heads/KYLIN-942
Commit: 84ad08c1260c1abcc57e414e8ad13f12ae8b8fc0
Parents: b8b3c02
Author: honma <ho...@ebay.com>
Authored: Tue Sep 29 15:09:06 2015 +0800
Committer: honma <ho...@ebay.com>
Committed: Tue Sep 29 15:09:06 2015 +0800

----------------------------------------------------------------------
 .../org/apache/kylin/storage/hbase/steps/CreateHTableJob.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-kylin/blob/84ad08c1/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
----------------------------------------------------------------------
diff --git 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
index 6a4373e..24d6e08 100644
--- 
a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
+++ 
b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/steps/CreateHTableJob.java
@@ -257,8 +257,8 @@ public class CreateHTableJob extends AbstractHadoopJob {
         Collections.sort(allCuboids);
 
         Map<Long, Double> cubeSizeMap = Maps.newHashMap();
-        for (Map.Entry<Long, Long> entry : cubeRowCountMap.entrySet()) { 
-            cubeSizeMap.put(entry.getKey(),estimateCuboidStorageSize(cubeDesc, 
entry.getKey(), entry.getValue(), baseCuboidId, rowkeyColumnSize));
+        for (Map.Entry<Long, Long> entry : cubeRowCountMap.entrySet()) {
+            cubeSizeMap.put(entry.getKey(), 
estimateCuboidStorageSize(cubeDesc, entry.getKey(), entry.getValue(), 
baseCuboidId, rowkeyColumnSize));
         }
 
         for (Double cuboidSize : cubeSizeMap.values()) {
@@ -298,7 +298,7 @@ public class CreateHTableJob extends AbstractHadoopJob {
             for (long cuboidId : allCuboids) {
                 double estimatedSize = cubeSizeMap.get(cuboidId);
                 double magic = 23;
-                int shardNum = (int) (1.0 * estimatedSize * magic / 
mbPerRegion);
+                int shardNum = (int) (1.0 * estimatedSize * magic / 
mbPerRegion + 1);
                 if (shardNum < 1) {
                     shardNum = 1;
                 }

Reply via email to