LENS-871 : Fix Dropping any partition in dimtable is clearing latest cache for 
that dimtable.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/87049563
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/87049563
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/87049563

Branch: refs/heads/master
Commit: 87049563a4cbc20cf510f6906bf67ddd330ef508
Parents: f7ab827
Author: Rajat Khandelwal <pro...@apache.org>
Authored: Wed Nov 25 16:53:23 2015 +0530
Committer: Amareshwari Sriramadasu <amareshw...@apache.org>
Committed: Wed Nov 25 16:53:23 2015 +0530

----------------------------------------------------------------------
 .../lens/cube/metadata/CubeFactTable.java       |    2 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |  273 ++-
 .../lens/cube/metadata/FactPartition.java       |    2 +-
 .../lens/cube/metadata/MetastoreUtil.java       |  122 +-
 .../org/apache/lens/cube/metadata/Storage.java  |    2 +-
 .../cube/metadata/StoragePartitionDesc.java     |    2 +-
 .../lens/cube/metadata/StorageTableDesc.java    |   21 +
 .../lens/cube/metadata/TimePartition.java       |    4 +-
 .../apache/lens/cube/metadata/UpdatePeriod.java |   42 +-
 .../org/apache/lens/cube/parse/DateUtil.java    |   13 -
 .../cube/metadata/TestCubeMetastoreClient.java  | 1740 +++++++-----------
 .../lens/cube/metadata/TestFactPartition.java   |    4 +-
 .../lens/cube/metadata/TestTimePartition.java   |    2 +-
 .../lens/cube/metadata/UpdatePeriodTest.java    |    2 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |   52 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |    4 +-
 .../cube/parse/TestBetweenTimeRangeWriter.java  |    2 +-
 .../lens/cube/parse/TestORTimeRangeWriter.java  |   14 +-
 .../apache/lens/driver/hive/TestHiveDriver.java |    2 +-
 .../apache/lens/server/api/util/LensUtil.java   |   10 +
 .../metastore/CubeMetastoreServiceImpl.java     |    8 +-
 .../apache/lens/server/metastore/JAXBUtils.java |    2 +-
 .../server/metastore/TestMetastoreService.java  |  343 ++--
 23 files changed, 1167 insertions(+), 1501 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
index a7a5bb0..d6bfb79 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeFactTable.java
@@ -154,7 +154,7 @@ public class CubeFactTable extends AbstractCubeTable {
     List<String> partitions = new ArrayList<String>();
     Date dt = cal.getTime();
     while (dt.compareTo(toDate) < 0) {
-      String part = interval.format().format(cal.getTime());
+      String part = interval.format(cal.getTime());
       partitions.add(part);
       cal.add(interval.calendarField(), 1);
       dt = cal.getTime();

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 1f13617..e7550ca 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -19,7 +19,7 @@
 
 package org.apache.lens.cube.metadata;
 
-import static 
org.apache.lens.cube.metadata.MetastoreUtil.getFactOrDimtableStorageTableName;
+import static org.apache.lens.cube.metadata.MetastoreUtil.*;
 
 import java.text.ParseException;
 import java.util.*;
@@ -31,6 +31,7 @@ import 
org.apache.lens.cube.metadata.Storage.LatestPartColumnInfo;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimelineFactory;
 import org.apache.lens.server.api.error.LensException;
+import org.apache.lens.server.api.util.LensUtil;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -103,9 +104,9 @@ public class CubeMetastoreClient {
    * latest date for a single fact-storage table for given time dimension is 
the latest of the latest dates for all its
    * update periods
    *
-   * @param cube
-   * @param timeDimension
-   * @return
+   * @param cube             Cube to get latest date of
+   * @param timeDimension    time dimension
+   * @return                 latest date among all facts of cube in 
timeDimension
    * @throws HiveException
    * @throws LensException
    */
@@ -213,7 +214,7 @@ public class CubeMetastoreClient {
           latestPartSpec.put(timePartCol, partSpec.get(timePartCol));
           if (partSpec.equals(latestPartSpec)) {
             latestPart.getParameters().putAll(partition.getParameters());
-            
latestPart.getParameters().put(MetastoreUtil.getLatestPartTimestampKey(timePartCol),
+            
latestPart.getParameters().put(getLatestPartTimestampKey(timePartCol),
               partSpec.get(timePartCol));
             
latestPart.getTPartition().getSd().getSerdeInfo().getParameters().putAll(
               
partition.getTPartition().getSd().getSerdeInfo().getParameters());
@@ -244,12 +245,11 @@ public class CubeMetastoreClient {
       CaseInsensitiveStringHashMap<// partition column
         PartitionTimeline>>> {
     /**
-     * Returns true if all the timelines for fact-storage table are empty for 
all valid update periods.
      *
-     * @param fact
-     * @param storage
-     * @param partCol
-     * @return
+     * @param fact      fact
+     * @param storage   storage
+     * @param partCol   part column
+     * @return          true if all the timelines for fact-storage table are 
empty for all valid update periods.
      * @throws HiveException
      * @throws LensException
      */
@@ -273,22 +273,21 @@ public class CubeMetastoreClient {
      * properties, it'll get all partitions, compute timelines in memory, 
write back all loads timelines to table
      * properties for further usage and return them.
      *
-     * @param fact
-     * @param storage
-     * @return
+     * @param fact          fact
+     * @param storage       storage
+     * @return              all timelines for fact-storage pair. Load from 
properties/all partitions if needed.
      * @throws HiveException
      * @throws LensException
      */
     public TreeMap<UpdatePeriod, 
CaseInsensitiveStringHashMap<PartitionTimeline>> get(String fact, String 
storage)
       throws HiveException, LensException {
       // SUSPEND CHECKSTYLE CHECK DoubleCheckedLockingCheck
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, 
Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, 
Storage.getPrefix(storage));
       if (get(storageTableName) == null) {
         synchronized (this) {
           if (get(storageTableName) == null) {
             Table storageTable = getTable(storageTableName);
-            if ("true".equalsIgnoreCase(storageTable.getParameters().get(
-              MetastoreUtil.getPartitionTimelineCachePresenceKey()))) {
+            if 
("true".equalsIgnoreCase(storageTable.getParameters().get(getPartitionTimelineCachePresenceKey())))
 {
               try {
                 loadTimelinesFromTableProperties(fact, storage);
               } catch (Exception e) {
@@ -313,7 +312,7 @@ public class CubeMetastoreClient {
       // Not found in table properties either, compute from all partitions of 
the fact-storage table.
       // First make sure all combinations of update period and partition 
column have an entry even
       // if no partitions exist
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, 
Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, 
Storage.getPrefix(storage));
       log.info("loading from all partitions: {}", storageTableName);
       Table storageTable = getTable(storageTableName);
       if (getCubeFact(fact).getUpdatePeriods() != null && 
getCubeFact(fact).getUpdatePeriods().get(
@@ -349,7 +348,7 @@ public class CubeMetastoreClient {
 
     private void loadTimelinesFromTableProperties(String fact, String storage) 
throws HiveException, LensException {
       // found in table properties, load from there.
-      String storageTableName = MetastoreUtil.getStorageTableName(fact, 
Storage.getPrefix(storage));
+      String storageTableName = getStorageTableName(fact, 
Storage.getPrefix(storage));
       log.info("loading from table properties: {}", storageTableName);
       for (UpdatePeriod updatePeriod : 
getCubeFact(fact).getUpdatePeriods().get(storage)) {
         for (String partCol : getTimePartColNamesOfTable(storageTableName)) {
@@ -362,10 +361,10 @@ public class CubeMetastoreClient {
      * Adds given partition(for storageTable, updatePeriod, 
partitionColum=partition) for batch addition in an
      * appropriate timeline object. Ignore if partition is not valid.
      *
-     * @param storageTable
-     * @param updatePeriod
-     * @param partitionColumn
-     * @param partition
+     * @param storageTable      storage table
+     * @param updatePeriod      update period
+     * @param partitionColumn   partition column
+     * @param partition         partition
      */
     public void addForBatchAddition(String storageTable, UpdatePeriod 
updatePeriod, String partitionColumn,
       String partition) {
@@ -383,9 +382,9 @@ public class CubeMetastoreClient {
      * <p></p>
      * kind of like mkdir -p
      *
-     * @param storageTable
-     * @param updatePeriod
-     * @param partitionColumn
+     * @param storageTable    storage table
+     * @param updatePeriod    update period
+     * @param partitionColumn partition column
      * @return timeline if already exists, or puts a new timeline and returns.
      */
     public PartitionTimeline ensureEntry(String storageTable, UpdatePeriod 
updatePeriod, String partitionColumn) {
@@ -405,7 +404,7 @@ public class CubeMetastoreClient {
     /**
      * commit all batch addition for all its timelines.
      *
-     * @param storageTable
+     * @param storageTable   storage table
      * @throws HiveException
      * @throws LensException
      */
@@ -451,7 +450,6 @@ public class CubeMetastoreClient {
       return timeline;
     }
 
-
     /** update partition timeline cache for addition of time partition */
     public void updateForAddition(String cubeTableName, String storageName, 
UpdatePeriod updatePeriod,
       Map<String, TreeSet<Date>> timePartSpec) throws HiveException, 
LensException {
@@ -490,8 +488,8 @@ public class CubeMetastoreClient {
   /**
    * Get the instance of {@link CubeMetastoreClient} corresponding to {@link 
HiveConf}
    *
-   * @param conf
-   * @return CubeMetastoreClient
+   * @param conf                  conf
+   * @return                      CubeMetastoreClient instance
    * @throws HiveException
    */
   public static CubeMetastoreClient getInstance(HiveConf conf) throws 
HiveException {
@@ -826,7 +824,7 @@ public class CubeMetastoreClient {
 
   private List<Partition> addPartitions(String factOrDimTable, String 
storageName, UpdatePeriod updatePeriod,
     List<StoragePartitionDesc> storagePartitionDescs) throws HiveException, 
LensException {
-    String storageTableName = 
MetastoreUtil.getStorageTableName(factOrDimTable.trim(),
+    String storageTableName = getStorageTableName(factOrDimTable.trim(),
       Storage.getPrefix(storageName.trim())).toLowerCase();
     if (getDimensionTable(factOrDimTable) != null) {
       // Adding partition in dimension table.
@@ -855,7 +853,7 @@ public class CubeMetastoreClient {
       List<Partition> partsAdded =
         getStorage(storageName).addPartitions(getClient(), factOrDimTable, 
updatePeriod, storagePartitionDescs, null);
       // update hive table
-      
alterTablePartitionCache(MetastoreUtil.getStorageTableName(factOrDimTable, 
Storage.getPrefix(storageName)));
+      alterTablePartitionCache(getStorageTableName(factOrDimTable, 
Storage.getPrefix(storageName)));
       return partsAdded;
     }
   }
@@ -892,7 +890,7 @@ public class CubeMetastoreClient {
   /**
    * store back all timelines of given storage table to table properties
    *
-   * @param storageTableName
+   * @param storageTableName  storage table name
    * @throws HiveException
    */
   private void alterTablePartitionCache(String storageTableName) throws 
HiveException {
@@ -905,7 +903,7 @@ public class CubeMetastoreClient {
           entry.getValue().updateTableParams(table);
         }
       }
-      params.put(MetastoreUtil.getPartitionTimelineCachePresenceKey(), "true");
+      params.put(getPartitionTimelineCachePresenceKey(), "true");
       alterHiveTable(storageTableName, table);
     }
   }
@@ -930,14 +928,14 @@ public class CubeMetastoreClient {
         boolean makeLatest = true;
         Partition part = getLatestPart(storageTableName, partCol, 
nonTimeParts);
         Date pTimestamp = timePartSpecs.get(partCol).last();
-        Date latestTimestamp = 
MetastoreUtil.getLatestTimeStampOfDimtable(part, partCol);
+        Date latestTimestamp = getLatestTimeStampFromPartition(part, partCol);
         if (latestTimestamp != null && pTimestamp.before(latestTimestamp)) {
           makeLatest = false;
         }
 
         if (makeLatest) {
-          Map<String, String> latestParams = new HashMap<String, String>();
-          latestParams.put(MetastoreUtil.getLatestPartTimestampKey(partCol), 
updatePeriod.format().format(pTimestamp));
+          Map<String, String> latestParams = 
LensUtil.getHashMap(getLatestPartTimestampKey(partCol),
+            updatePeriod.format(pTimestamp));
           latest.latestParts.put(partCol, new 
LatestPartColumnInfo(latestParams));
         }
       }
@@ -970,7 +968,7 @@ public class CubeMetastoreClient {
     if (updatePeriodStr != null) {
       UpdatePeriod partInterval = UpdatePeriod.valueOf(updatePeriodStr);
       try {
-        partDate = partInterval.format().parse(partVal);
+        partDate = partInterval.parse(partVal);
       } catch (ParseException e) {
         // ignore
       }
@@ -982,17 +980,18 @@ public class CubeMetastoreClient {
     UpdatePeriod updatePeriod, Map<String, String> nonTimePartSpec)
     throws HiveException {
     // getClient().getPartitionsByNames(tbl, partNames)
-    List<Partition> partitions = null;
+    List<Partition> partitions;
     try {
       partitions = getClient().getPartitionsByFilter(hiveTable, 
StorageConstants.getPartFilter(nonTimePartSpec));
-      MetastoreUtil.filterPartitionsByNonTimeParts(partitions, 
nonTimePartSpec, timeCol);
+      filterPartitionsByUpdatePeriod(partitions, updatePeriod);
+      filterPartitionsByNonTimeParts(partitions, nonTimePartSpec, timeCol);
     } catch (TException e) {
       throw new HiveException(e);
     }
 
     // tree set contains partitions with timestamp as value for timeCol, in
     // descending order
-    TreeSet<Partition> allPartTimeVals = new TreeSet<Partition>(new 
Comparator<Partition>() {
+    TreeSet<Partition> allPartTimeVals = new TreeSet<>(new 
Comparator<Partition>() {
       @Override
       public int compare(Partition o1, Partition o2) {
         Date partDate1 = getPartDate(o1, timeColIndex);
@@ -1001,7 +1000,7 @@ public class CubeMetastoreClient {
           return -1;
         } else if (partDate1 == null && partDate2 != null) {
           return 1;
-        } else if (partDate1 == null && partDate2 == null) {
+        } else if (partDate1 == null) {
           return o2.getTPartition().compareTo(o1.getTPartition());
         } else if (!partDate2.equals(partDate1)) {
           return partDate2.compareTo(partDate1);
@@ -1025,9 +1024,8 @@ public class CubeMetastoreClient {
       Partition nextLatest = it.next();
       latest = new LatestInfo();
       latest.setPart(nextLatest);
-      Map<String, String> latestParams = new HashMap<String, String>();
-      String partVal = nextLatest.getValues().get(timeColIndex);
-      latestParams.put(MetastoreUtil.getLatestPartTimestampKey(timeCol), 
partVal);
+      Map<String, String> latestParams = 
LensUtil.getHashMap(getLatestPartTimestampKey(timeCol),
+        nextLatest.getValues().get(timeColIndex));
       latest.addLatestPartInfo(timeCol, new 
LatestPartColumnInfo(latestParams));
     }
     return latest;
@@ -1045,16 +1043,16 @@ public class CubeMetastoreClient {
    */
   public void dropPartition(String cubeTableName, String storageName, 
Map<String, Date> timePartSpec,
     Map<String, String> nonTimePartSpec, UpdatePeriod updatePeriod) throws 
HiveException, LensException {
-    String storageTableName = 
MetastoreUtil.getStorageTableName(cubeTableName.trim(),
+    String storageTableName = getStorageTableName(cubeTableName.trim(),
       Storage.getPrefix(storageName.trim())).toLowerCase();
     Table hiveTable = getHiveTable(storageTableName);
     List<FieldSchema> partCols = hiveTable.getPartCols();
-    List<String> partColNames = new ArrayList<String>(partCols.size());
-    List<String> partVals = new ArrayList<String>(partCols.size());
+    List<String> partColNames = new ArrayList<>(partCols.size());
+    List<String> partVals = new ArrayList<>(partCols.size());
     for (FieldSchema column : partCols) {
       partColNames.add(column.getName());
       if (timePartSpec.containsKey(column.getName())) {
-        
partVals.add(updatePeriod.format().format(timePartSpec.get(column.getName())));
+        partVals.add(updatePeriod.format(timePartSpec.get(column.getName())));
       } else if (nonTimePartSpec.containsKey(column.getName())) {
         partVals.add(nonTimePartSpec.get(column.getName()));
       } else {
@@ -1063,7 +1061,7 @@ public class CubeMetastoreClient {
     }
     if (isDimensionTable(cubeTableName)) {
       String timePartColsStr = 
hiveTable.getTTable().getParameters().get(MetastoreConstants.TIME_PART_COLUMNS);
-      Map<String, LatestInfo> latest = new HashMap<String, 
Storage.LatestInfo>();
+      Map<String, LatestInfo> latest = new HashMap<>();
       boolean latestAvailable = false;
       if (timePartColsStr != null) {
         List<String> timePartCols = 
Arrays.asList(StringUtils.split(timePartColsStr, ','));
@@ -1074,30 +1072,30 @@ public class CubeMetastoreClient {
           int timeColIndex = partColNames.indexOf(timeCol);
           Partition part = getLatestPart(storageTableName, timeCol, 
nonTimePartSpec);
 
-          boolean isLatest = true;
+          Date latestTimestamp = getLatestTimeStampFromPartition(part, 
timeCol);
+          Date dropTimestamp;
+          try {
+            dropTimestamp = 
updatePeriod.parse(updatePeriod.format(timePartSpec.get(timeCol)));
+          } catch (ParseException e) {
+            throw new HiveException(e);
+          }
           // check if partition being dropped is the latest partition
-          for (int i = 0; i < partVals.size(); i++) {
-            if (i != timeColIndex) {
-              if (!part.getValues().get(i).equals(partVals.get(i))) {
-                isLatest = false;
-                break;
+          boolean isLatest = latestTimestamp != null && 
dropTimestamp.equals(latestTimestamp);
+          if (isLatest) {
+            for (int i = 0; i < partVals.size(); i++) {
+              if (i != timeColIndex) {
+                if (!part.getValues().get(i).equals(partVals.get(i))) {
+                  isLatest = false;
+                  break;
+                }
               }
             }
           }
           if (isLatest) {
-            Date latestTimestamp = 
MetastoreUtil.getLatestTimeStampOfDimtable(part, timeCol);
-            Date dropTimestamp;
-            try {
-              dropTimestamp = 
updatePeriod.format().parse(updatePeriod.format().format(timePartSpec.get(timeCol)));
-            } catch (ParseException e) {
-              throw new HiveException(e);
-            }
-            if (latestTimestamp != null && 
dropTimestamp.equals(latestTimestamp)) {
-              LatestInfo latestInfo =
-                getNextLatestOfDimtable(hiveTable, timeCol, timeColIndex, 
updatePeriod, nonTimePartSpec);
-              latestAvailable = (latestInfo != null && latestInfo.part != 
null);
-              latest.put(timeCol, latestInfo);
-            }
+            LatestInfo latestInfo =
+              getNextLatestOfDimtable(hiveTable, timeCol, timeColIndex, 
updatePeriod, nonTimePartSpec);
+            latestAvailable = (latestInfo != null && latestInfo.part != null);
+            latest.put(timeCol, latestInfo);
           } else {
             latestAvailable = true;
           }
@@ -1122,9 +1120,9 @@ public class CubeMetastoreClient {
   }
 
   private Map<String, String> getPartitionSpec(UpdatePeriod updatePeriod, 
Map<String, Date> partitionTimestamps) {
-    Map<String, String> partSpec = new HashMap<String, String>();
+    Map<String, String> partSpec = new HashMap<>();
     for (Map.Entry<String, Date> entry : partitionTimestamps.entrySet()) {
-      String pval = updatePeriod.format().format(entry.getValue());
+      String pval = updatePeriod.format(entry.getValue());
       partSpec.put(entry.getKey(), pval);
     }
     return partSpec;
@@ -1158,13 +1156,13 @@ public class CubeMetastoreClient {
   }
 
   public boolean partitionExistsByFilter(String cubeTableName, String 
storageName, String filter) throws HiveException {
-    return 
partitionExistsByFilter(MetastoreUtil.getStorageTableName(cubeTableName, 
Storage.getPrefix(storageName)),
+    return partitionExistsByFilter(getStorageTableName(cubeTableName, 
Storage.getPrefix(storageName)),
       filter);
   }
 
   public boolean partitionExistsByFilter(String storageTableName, String 
filter) throws HiveException {
     int parts;
-    Table tbl = null;
+    Table tbl;
     try {
       tbl = getTable(storageTableName);
     } catch (Exception e) {
@@ -1199,12 +1197,9 @@ public class CubeMetastoreClient {
     }
   }
 
-  public int getNumPartitionsByFilter(String storageTableName, String filter) 
throws HiveException, TException {
-    return getClient().getNumPartitionsByFilter(getTable(storageTableName), 
filter);
-  }
-
   boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod, 
Map<String, Date> partitionTimestamps,
-    Map<String, String> partSpec) throws HiveException {
+    Map<String, String> nonTimePartSpec) throws HiveException {
+    HashMap<String, String> partSpec = new HashMap<>(nonTimePartSpec);
     partSpec.putAll(getPartitionSpec(updatePeriod, partitionTimestamps));
     return partitionExists(storageTableName, partSpec);
   }
@@ -1228,7 +1223,7 @@ public class CubeMetastoreClient {
 
   boolean latestPartitionExists(String factOrDimTblName, String storageName, 
String latestPartCol)
     throws HiveException, LensException {
-    String storageTableName = 
MetastoreUtil.getStorageTableName(factOrDimTblName, 
Storage.getPrefix(storageName));
+    String storageTableName = getStorageTableName(factOrDimTblName, 
Storage.getPrefix(storageName));
     if (isDimensionTable(factOrDimTblName)) {
       return dimTableLatestPartitionExists(storageTableName);
     } else {
@@ -1258,17 +1253,14 @@ public class CubeMetastoreClient {
   /**
    * Get the hive {@link Table} corresponding to the name
    *
-   * @param tableName
-   * @return {@link Table} object
+   * @param tableName table name
+   * @return {@link Table} object corresponding to the name
    * @throws HiveException
    */
   public Table getHiveTable(String tableName) throws HiveException {
     return getTable(tableName);
   }
-  public List<String> getTimePartColNamesOfTable(String tblName, String 
storageName) throws HiveException {
-    return 
getTimePartColNamesOfTable(getFactOrDimtableStorageTableName(tblName,
-      storageName));
-  }
+
   public List<String> getTimePartColNamesOfTable(String storageTableName) 
throws HiveException {
     return getTimePartColNamesOfTable(getTable(storageTableName));
   }
@@ -1340,10 +1332,8 @@ public class CubeMetastoreClient {
   }
 
   boolean isFactTableForCube(Table tbl, String cube) {
-    if (isFactTable(tbl)) {
-      return CubeFactTable.getCubeName(tbl.getTableName(), 
tbl.getParameters()).equalsIgnoreCase(cube.toLowerCase());
-    }
-    return false;
+    return isFactTable(tbl) && CubeFactTable.getCubeName(tbl.getTableName(), 
tbl.getParameters())
+      .equalsIgnoreCase(cube.toLowerCase());
   }
 
   /**
@@ -1404,8 +1394,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a cube table?
    *
-   * @param tbl
-   * @return
+   * @param tbl table
+   * @return    whether it's a cube table or not
    * @throws HiveException
    */
   boolean isCube(Table tbl) throws HiveException {
@@ -1416,8 +1406,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a dimension?
    *
-   * @param tbl
-   * @return
+   * @param tbl  table
+   * @return     whether the hive table is a dimension or not
    * @throws HiveException
    */
   boolean isDimension(Table tbl) throws HiveException {
@@ -1440,8 +1430,8 @@ public class CubeMetastoreClient {
   /**
    * Is the hive table a storage
    *
-   * @param tbl
-   * @return
+   * @param tbl table
+   * @return    whether the hive table is a storage
    * @throws HiveException
    */
   boolean isStorage(Table tbl) throws HiveException {
@@ -1481,10 +1471,10 @@ public class CubeMetastoreClient {
           Table tbl = getTable(tableName);
           if (isDimensionTable(tbl)) {
             dimTable = getDimensionTable(tbl);
-            if (enableCaching && dimTable != null) {
+            if (enableCaching) {
               allDimTables.put(tableName, dimTable);
               // update latest partition cache for all storages
-              if (dimTable.getStorages() != null && 
!dimTable.getStorages().isEmpty()) {
+              if (!dimTable.getStorages().isEmpty()) {
                 for (String storageName : dimTable.getStorages()) {
                   if (dimTable.hasStorageSnapshots(storageName)) {
                     String storageTableName = 
getFactOrDimtableStorageTableName(dimTable.getName(),
@@ -1630,7 +1620,7 @@ public class CubeMetastoreClient {
   }
 
   private CubeInterface getCube(Table tbl) throws HiveException {
-    String parentCube = 
tbl.getParameters().get(MetastoreUtil.getParentCubeNameKey(tbl.getTableName()));
+    String parentCube = 
tbl.getParameters().get(getParentCubeNameKey(tbl.getTableName()));
     if (parentCube != null) {
       return new DerivedCube(tbl, (Cube) getCube(parentCube));
     } else {
@@ -1650,7 +1640,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeDimensionTable> getAllDimensionTables() throws 
HiveException {
     if (!allDimTablesPopulated) {
-      List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
+      List<CubeDimensionTable> dimTables = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeDimensionTable dim = getDimensionTable(table);
@@ -1676,7 +1666,7 @@ public class CubeMetastoreClient {
    */
   public Collection<Storage> getAllStorages() throws HiveException {
     if (!allStoragesPopulated) {
-      List<Storage> storages = new ArrayList<Storage>();
+      List<Storage> storages = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           Storage storage = getStorage(table);
@@ -1702,7 +1692,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeInterface> getAllCubes() throws HiveException {
     if (!allCubesPopulated) {
-      List<CubeInterface> cubes = new ArrayList<CubeInterface>();
+      List<CubeInterface> cubes = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeInterface cube = getCube(table);
@@ -1728,7 +1718,7 @@ public class CubeMetastoreClient {
    */
   public Collection<Dimension> getAllDimensions() throws HiveException {
     if (!allDimensionsPopulated) {
-      List<Dimension> dims = new ArrayList<Dimension>();
+      List<Dimension> dims = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           Dimension dim = getDimension(table);
@@ -1754,7 +1744,7 @@ public class CubeMetastoreClient {
    */
   public Collection<CubeFactTable> getAllFacts() throws HiveException {
     if (!allFactTablesPopulated) {
-      List<CubeFactTable> facts = new ArrayList<CubeFactTable>();
+      List<CubeFactTable> facts = new ArrayList<>();
       try {
         for (String table : getAllHiveTableNames()) {
           CubeFactTable fact = getCubeFact(table);
@@ -1801,7 +1791,7 @@ public class CubeMetastoreClient {
       }
       cubeName = cube.getName();
     }
-    List<CubeFactTable> cubeFacts = new ArrayList<CubeFactTable>();
+    List<CubeFactTable> cubeFacts = new ArrayList<>();
     try {
       for (CubeFactTable fact : getAllFacts()) {
         if (cubeName == null || fact.getCubeName().equalsIgnoreCase(cubeName)) 
{
@@ -1815,27 +1805,6 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Get all derived cubes of the cube.
-   *
-   * @param cube Cube object
-   * @return List of DerivedCube objects
-   * @throws HiveException
-   */
-  public List<DerivedCube> getAllDerivedCubes(CubeInterface cube) throws 
HiveException {
-    List<DerivedCube> dcubes = new ArrayList<DerivedCube>();
-    try {
-      for (CubeInterface cb : getAllCubes()) {
-        if (cb.isDerivedCube() && ((DerivedCube) 
cb).getParent().getName().equalsIgnoreCase(cube.getName())) {
-          dcubes.add((DerivedCube) cb);
-        }
-      }
-    } catch (HiveException e) {
-      throw new HiveException("Could not get all derived cubes of " + cube, e);
-    }
-    return dcubes;
-  }
-
-  /**
    * Get all derived cubes of the cube, that have all fields queryable together
    *
    * @param cube Cube object
@@ -1843,7 +1812,7 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public List<DerivedCube> getAllDerivedQueryableCubes(CubeInterface cube) 
throws HiveException {
-    List<DerivedCube> dcubes = new ArrayList<DerivedCube>();
+    List<DerivedCube> dcubes = new ArrayList<>();
     try {
       for (CubeInterface cb : getAllCubes()) {
         if (cb.isDerivedCube() && ((DerivedCube) 
cb).getParent().getName().equalsIgnoreCase(cube.getName())
@@ -1865,7 +1834,7 @@ public class CubeMetastoreClient {
    * @throws HiveException
    */
   public List<CubeDimensionTable> getAllDimensionTables(Dimension dim) throws 
HiveException {
-    List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
+    List<CubeDimensionTable> dimTables = new ArrayList<>();
     try {
       for (CubeDimensionTable dimTbl : getAllDimensionTables()) {
         if (dim == null || 
dimTbl.getDimName().equalsIgnoreCase(dim.getName().toLowerCase())) {
@@ -1878,15 +1847,6 @@ public class CubeMetastoreClient {
     return dimTables;
   }
 
-  public List<String> getPartColNames(String tableName) throws HiveException {
-    List<String> partColNames = new ArrayList<String>();
-    Table tbl = getTable(tableName);
-    for (FieldSchema f : tbl.getPartCols()) {
-      partColNames.add(f.getName().toLowerCase());
-    }
-    return partColNames;
-  }
-
   public boolean partColExists(String tableName, String partCol) throws 
HiveException {
     Table tbl = getTable(tableName);
     for (FieldSchema f : tbl.getPartCols()) {
@@ -1905,11 +1865,11 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Returns true if columns changed
    *
-   * @param table
-   * @param hiveTable
-   * @param cubeTable
+   * @param table     table name
+   * @param hiveTable hive table
+   * @param cubeTable lens cube table
+   * @return true if columns changed in alter
    * @throws HiveException
    */
   private boolean alterCubeTable(String table, Table hiveTable, 
AbstractCubeTable cubeTable) throws HiveException {
@@ -1943,11 +1903,6 @@ public class CubeMetastoreClient {
     }
   }
 
-  private void alterHiveTable(String table, Table hiveTable, List<FieldSchema> 
columns) throws HiveException {
-    hiveTable.getTTable().getSd().setCols(columns);
-    alterHiveTable(table, hiveTable);
-  }
-
   /**
    * Alter cube specified by the name to new definition
    *
@@ -1977,7 +1932,7 @@ public class CubeMetastoreClient {
   public void alterDimension(String dimName, Dimension newDim) throws 
HiveException {
     Table tbl = getTable(dimName);
     if (isDimension(tbl)) {
-      alterCubeTable(dimName, tbl, (AbstractCubeTable) newDim);
+      alterCubeTable(dimName, tbl, newDim);
       if (enableCaching) {
         allDims.put(dimName.trim().toLowerCase(), 
getDimension(refreshTable(dimName)));
       }
@@ -2008,7 +1963,7 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage
    *
-   * @param storageName
+   * @param storageName  storage name
    * @throws HiveException
    */
   public void dropStorage(String storageName) throws HiveException {
@@ -2023,7 +1978,7 @@ public class CubeMetastoreClient {
   /**
    * Drop a cube
    *
-   * @param cubeName
+   * @param cubeName cube name
    * @throws HiveException
    */
   public void dropCube(String cubeName) throws HiveException {
@@ -2053,9 +2008,9 @@ public class CubeMetastoreClient {
   }
 
   /**
-   * Drop a fact with cascade flag
+   * Drop a fact with cascade  flag
    *
-   * @param factName
+   * @param factName fact name
    * @param cascade  If true, will drop all the storages of the fact
    * @throws HiveException
    */
@@ -2077,8 +2032,8 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage from fact
    *
-   * @param factName
-   * @param storage
+   * @param factName fact name
+   * @param storage  storage name
    * @throws HiveException
    */
   public void dropStorageFromFact(String factName, String storage) throws 
HiveException {
@@ -2103,8 +2058,8 @@ public class CubeMetastoreClient {
   /**
    * Drop a storage from dimension
    *
-   * @param dimTblName
-   * @param storage
+   * @param dimTblName dim table name
+   * @param storage    storage
    * @throws HiveException
    */
   public void dropStorageFromDim(String dimTblName, String storage) throws 
HiveException {
@@ -2127,7 +2082,7 @@ public class CubeMetastoreClient {
   /**
    * Drop the dimension table
    *
-   * @param dimTblName
+   * @param dimTblName dim table name
    * @param cascade    If true, will drop all the dimension storages
    * @throws HiveException
    */
@@ -2149,9 +2104,9 @@ public class CubeMetastoreClient {
   /**
    * Alter a cubefact with new definition and alter underlying storage tables 
as well.
    *
-   * @param factTableName
-   * @param cubeFactTable
-   * @param storageTableDescs
+   * @param factTableName     fact table name
+   * @param cubeFactTable     cube fact table
+   * @param storageTableDescs storage table desc objects
    *
    * @throws HiveException
    */
@@ -2187,8 +2142,8 @@ public class CubeMetastoreClient {
   /**
    * Alter dimension table with new dimension definition and underlying 
storage tables as well
    *
-   * @param dimTableName
-   * @param cubeDimensionTable
+   * @param dimTableName         dim table name
+   * @param cubeDimensionTable   cube dimention table
    * @throws HiveException
    */
   public void alterCubeDimensionTable(String dimTableName, CubeDimensionTable 
cubeDimensionTable,

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index f934ad3..1e5ef93 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -87,7 +87,7 @@ public class FactPartition implements 
Comparable<FactPartition> {
   }
 
   public String getPartString() {
-    return period.format().format(partSpec);
+    return period.format(partSpec);
   }
 
   public String getFormattedFilter(String tableName) {

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index e5cf468..4b57d95 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -25,7 +25,6 @@ import java.text.ParseException;
 import java.util.*;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 
@@ -34,11 +33,11 @@ public class MetastoreUtil {
 
   }
 
-  public static final String getFactOrDimtableStorageTableName(String 
factName, String storageName) {
+  public static String getFactOrDimtableStorageTableName(String factName, 
String storageName) {
     return getStorageTableName(factName, Storage.getPrefix(storageName));
   }
 
-  public static final String getStorageTableName(String cubeTableName, String 
storagePrefix) {
+  public static String getStorageTableName(String cubeTableName, String 
storagePrefix) {
     return (storagePrefix + cubeTableName).toLowerCase();
   }
 
@@ -46,26 +45,26 @@ public class MetastoreUtil {
     return getStorageEntityPrefix(name) + CLASS_SFX;
   }
 
-  public static final String getStorageEntityPrefix(String storageName) {
+  public static String getStorageEntityPrefix(String storageName) {
     return STORAGE_ENTITY_PFX + storageName.toLowerCase();
   }
 
   // //////////////////////////
   // Dimension properties ///
   // /////////////////////////
-  public static final String getDimPrefix(String dimName) {
+  public static String getDimPrefix(String dimName) {
     return DIMENSION_PFX + dimName.toLowerCase();
   }
 
-  public static final String getDimAttributeListKey(String dimName) {
+  public static String getDimAttributeListKey(String dimName) {
     return getDimPrefix(dimName) + ATTRIBUTES_LIST_SFX;
   }
 
-  public static final String getDimTablePartsKey(String dimtableName) {
+  public static String getDimTablePartsKey(String dimtableName) {
     return DIM_TABLE_PFX + dimtableName + PARTCOLS_SFX;
   }
 
-  public static final String getDimTimedDimensionKey(String dimName) {
+  public static String getDimTimedDimensionKey(String dimName) {
     return getDimPrefix(dimName) + TIMED_DIMENSION_SFX;
   }
 
@@ -76,7 +75,7 @@ public class MetastoreUtil {
     return DIM_KEY_PFX + dimName.toLowerCase();
   }
 
-  public static final String getDimensionClassPropertyKey(String dimName) {
+  public static String getDimensionClassPropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CLASS_SFX;
   }
 
@@ -84,11 +83,11 @@ public class MetastoreUtil {
     return getDimensionKeyPrefix(name) + INLINE_VALUES_SFX;
   }
 
-  public static final String getDimTypePropertyKey(String dimName) {
+  public static String getDimTypePropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + TYPE_SFX;
   }
 
-  public static final String getDimNumOfDistinctValuesPropertyKey(String 
dimName) {
+  public static String getDimNumOfDistinctValuesPropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + NUM_DISTINCT_VALUES;
   }
 
@@ -104,15 +103,15 @@ public class MetastoreUtil {
     return 
Integer.parseInt(param.substring(getHierachyElementKeyPFX(dimName).length()));
   }
 
-  public static final String getDimensionSrcReferenceKey(String dimName) {
+  public static String getDimensionSrcReferenceKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + DIM_REFERS_SFX;
   }
 
-  public static final String getDimRefChainNameKey(String dimName) {
+  public static String getDimRefChainNameKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CHAIN_NAME_SFX;
   }
 
-  public static final String getDimRefChainColumnKey(String dimName) {
+  public static String getDimRefChainColumnKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CHAIN_REF_COLUMN_SFX;
   }
 
@@ -120,11 +119,7 @@ public class MetastoreUtil {
     return getDimensionKeyPrefix(dimName) + IS_JOIN_KEY_SFX;
   }
 
-  public static final String getDimensionDestReference(String tableName, 
String columnName) {
-    return tableName.toLowerCase() + TABLE_COLUMN_SEPERATOR + 
columnName.toLowerCase();
-  }
-
-  public static final String getReferencesString(List<TableReference> 
references) {
+  public static String getReferencesString(List<TableReference> references) {
     String[] toks = new String[references.size()];
 
     for (int i = 0; i < references.size(); i++) {
@@ -150,10 +145,12 @@ public class MetastoreUtil {
   public static String getCubeColEndTimePropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + END_TIME_SFX;
   }
-  public static String getStoragetableStartTimesKey(){
+
+  public static String getStoragetableStartTimesKey() {
     return STORAGE_PFX + "start.times";
   }
-  public static String getStoragetableEndTimesKey(){
+
+  public static String getStoragetableEndTimesKey() {
     return STORAGE_PFX + "end.times";
   }
 
@@ -169,15 +166,15 @@ public class MetastoreUtil {
     return getColumnKeyPrefix(colName) + DISPLAY_SFX;
   }
 
-  public static final String getExprColumnKey(String colName) {
+  public static String getExprColumnKey(String colName) {
     return getColumnKeyPrefix(colName) + EXPR_SFX;
   }
 
-  public static final String getExprTypePropertyKey(String colName) {
+  public static String getExprTypePropertyKey(String colName) {
     return getColumnKeyPrefix(colName) + TYPE_SFX;
   }
 
-  public static final String getExprEncodingPropertyKey(String colName) {
+  public static String getExprEncodingPropertyKey(String colName) {
     return getExprColumnKey(colName) + BASE64_SFX;
   }
 
@@ -247,78 +244,78 @@ public class MetastoreUtil {
   // //////////////////////////
   // Measure properties ///
   // /////////////////////////
-  public static final String getMeasurePrefix(String measureName) {
+  public static String getMeasurePrefix(String measureName) {
     return MEASURE_KEY_PFX + measureName.toLowerCase();
   }
 
-  public static final String getMeasureClassPropertyKey(String measureName) {
+  public static String getMeasureClassPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + CLASS_SFX;
   }
 
-  public static final String getMeasureUnitPropertyKey(String measureName) {
+  public static String getMeasureUnitPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + UNIT_SFX;
   }
 
-  public static final String getMeasureTypePropertyKey(String measureName) {
+  public static String getMeasureTypePropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + TYPE_SFX;
   }
 
-  public static final String getMeasureFormatPropertyKey(String measureName) {
+  public static String getMeasureFormatPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + FORMATSTRING_SFX;
   }
 
-  public static final String getMeasureAggrPropertyKey(String measureName) {
+  public static String getMeasureAggrPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + AGGR_SFX;
   }
 
-  public static final String getMeasureMinPropertyKey(String measureName) {
+  public static String getMeasureMinPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + MIN_SFX;
   }
 
-  public static final String getMeasureMaxPropertyKey(String measureName) {
+  public static String getMeasureMaxPropertyKey(String measureName) {
     return getMeasurePrefix(measureName) + MAX_SFX;
   }
 
-  public static final String getExpressionListKey(String name) {
+  public static String getExpressionListKey(String name) {
     return getBasePrefix(name) + EXPRESSIONS_LIST_SFX;
   }
 
   // //////////////////////////
   // Cube properties ///
   // /////////////////////////
-  public static final String getBasePrefix(String base) {
+  public static String getBasePrefix(String base) {
     return BASE_KEY_PFX + base.toLowerCase();
   }
 
-  public static final String getCubePrefix(String cubeName) {
+  public static String getCubePrefix(String cubeName) {
     return CUBE_KEY_PFX + cubeName.toLowerCase();
   }
 
-  public static final String getCubeMeasureListKey(String cubeName) {
+  public static String getCubeMeasureListKey(String cubeName) {
     return getCubePrefix(cubeName) + MEASURES_LIST_SFX;
   }
 
-  public static final String getCubeDimensionListKey(String cubeName) {
+  public static String getCubeDimensionListKey(String cubeName) {
     return getCubePrefix(cubeName) + DIMENSIONS_LIST_SFX;
   }
 
-  public static final String getCubeTimedDimensionListKey(String cubeName) {
+  public static String getCubeTimedDimensionListKey(String cubeName) {
     return getCubePrefix(cubeName) + TIMED_DIMENSIONS_LIST_SFX;
   }
 
-  public static final String getCubeJoinChainListKey(String cubeName) {
+  public static String getCubeJoinChainListKey(String cubeName) {
     return getCubePrefix(cubeName) + JOIN_CHAIN_LIST_SFX;
   }
 
-  public static final String getDimensionJoinChainListKey(String dimName) {
+  public static String getDimensionJoinChainListKey(String dimName) {
     return getDimPrefix(dimName) + JOIN_CHAIN_LIST_SFX;
   }
 
-  public static final String getParentCubeNameKey(String cubeName) {
+  public static String getParentCubeNameKey(String cubeName) {
     return getCubePrefix(cubeName) + PARENT_CUBE_SFX;
   }
 
-  public static final String getCubeTableKeyPrefix(String tableName) {
+  public static String getCubeTableKeyPrefix(String tableName) {
     return CUBE_TABLE_PFX + tableName.toLowerCase();
   }
 
@@ -350,7 +347,7 @@ public class MetastoreUtil {
   }
 
   public static String getLatestPartTimestampKey(String partCol) {
-    return MetastoreConstants.STORAGE_PFX + partCol + 
MetastoreConstants.LATEST_PART_TIMESTAMP_SFX;
+    return STORAGE_PFX + partCol + LATEST_PART_TIMESTAMP_SFX;
   }
 
   // //////////////////////////
@@ -362,16 +359,15 @@ public class MetastoreUtil {
     }
     String sep = "";
     StringBuilder valueStr = new StringBuilder();
-    Iterator<E> it = set.iterator();
-    while (it.hasNext()) {
-      valueStr.append(sep).append(it.next().getName());
+    for (E aSet : set) {
+      valueStr.append(sep).append(aSet.getName());
       sep = ",";
     }
     return valueStr.toString();
   }
 
   static <E extends Named> List<String> getNamedStrs(Collection<E> set, int 
maxLength) {
-    List<String> namedStrings = new ArrayList<String>();
+    List<String> namedStrings = new ArrayList<>();
     if (set == null || set.isEmpty()) {
       return namedStrings;
     }
@@ -396,10 +392,10 @@ public class MetastoreUtil {
     return namedStrings;
   }
 
-  private static int maxParamLength = 3999;
+  private static final int MAX_PARAM_LENGTH = 3999;
 
   public static <E extends Named> void addNameStrings(Map<String, String> 
props, String key, Collection<E> set) {
-    addNameStrings(props, key, set, maxParamLength);
+    addNameStrings(props, key, set, MAX_PARAM_LENGTH);
   }
 
   static <E extends Named> void addNameStrings(Map<String, String> props, 
String key,
@@ -454,15 +450,6 @@ public class MetastoreUtil {
     return valueStr.toString();
   }
 
-  public static Set<String> getColumnNames(AbstractCubeTable table) {
-    List<FieldSchema> fields = table.getColumns();
-    Set<String> columns = new HashSet<String>(fields.size());
-    for (FieldSchema f : fields) {
-      columns.add(f.getName().toLowerCase());
-    }
-    return columns;
-  }
-
   public static void addColumnNames(CubeDimAttribute dim, Set<String> cols) {
     if (dim instanceof HierarchicalDimAttribute) {
       HierarchicalDimAttribute h = (HierarchicalDimAttribute) dim;
@@ -486,6 +473,16 @@ public class MetastoreUtil {
     return STORAGE_PFX + PARTITION_TIMELINE_CACHE + "present";
   }
 
+  public static void filterPartitionsByUpdatePeriod(List<Partition> 
partitions, UpdatePeriod updatePeriod) {
+    Iterator<Partition> iter = partitions.iterator();
+    while (iter.hasNext()) {
+      Partition part = iter.next();
+      if 
(!UpdatePeriod.valueOf(part.getParameters().get(PARTITION_UPDATE_PERIOD)).equals(updatePeriod))
 {
+        iter.remove();
+      }
+    }
+  }
+
   public static List<Partition> filterPartitionsByNonTimeParts(List<Partition> 
partitions,
     Map<String, String> nonTimePartSpec,
     String latestPartCol) {
@@ -497,7 +494,7 @@ public class MetastoreUtil {
         if ((nonTimePartSpec == null || 
!nonTimePartSpec.containsKey(entry1.getKey()))
           && !entry1.getKey().equals(latestPartCol)) {
           try {
-            
UpdatePeriod.valueOf(part.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD))
+            
UpdatePeriod.valueOf(part.getParameters().get(PARTITION_UPDATE_PERIOD))
               .format()
               .parse(entry1.getValue());
           } catch (ParseException e) {
@@ -505,7 +502,6 @@ public class MetastoreUtil {
           }
         }
       }
-
       if (ignore) {
         iter.remove();
       }
@@ -513,13 +509,13 @@ public class MetastoreUtil {
     return partitions;
   }
 
-  public static Date getLatestTimeStampOfDimtable(Partition part, String 
partCol) throws HiveException {
+  public static Date getLatestTimeStampFromPartition(Partition part, String 
partCol) throws HiveException {
     if (part != null) {
       String latestTimeStampStr = 
part.getParameters().get(MetastoreUtil.getLatestPartTimestampKey(partCol));
-      String latestPartUpdatePeriod = 
part.getParameters().get(MetastoreConstants.PARTITION_UPDATE_PERIOD);
+      String latestPartUpdatePeriod = 
part.getParameters().get(PARTITION_UPDATE_PERIOD);
       UpdatePeriod latestUpdatePeriod = 
UpdatePeriod.valueOf(latestPartUpdatePeriod.toUpperCase());
       try {
-        return latestUpdatePeriod.format().parse(latestTimeStampStr);
+        return latestUpdatePeriod.parse(latestTimeStampStr);
       } catch (ParseException e) {
         throw new HiveException(e);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
index 437227c..9318603 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/Storage.java
@@ -300,7 +300,7 @@ public abstract class Storage extends AbstractCubeTable 
implements PartitionMeta
             
.get(addPartitionDesc.getNonTimePartSpec()).latestParts.entrySet()) {
             if (addPartitionDesc.getTimePartSpec().containsKey(entry.getKey())
               && 
entry.getValue().get(MetastoreUtil.getLatestPartTimestampKey(entry.getKey())).equals(
-                
updatePeriod.format().format(addPartitionDesc.getTimePartSpec().get(entry.getKey()))))
 {
+                
updatePeriod.format(addPartitionDesc.getTimePartSpec().get(entry.getKey())))) {
               if 
(latestPartIndexForPartCols.get(addPartitionDesc.getNonTimePartSpec()) == null) 
{
                 
latestPartIndexForPartCols.put(addPartitionDesc.getNonTimePartSpec(),
                   Maps.<String, Integer>newHashMap());

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
index 044425b..b99fef2 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StoragePartitionDesc.java
@@ -73,7 +73,7 @@ public class StoragePartitionDesc extends 
AddPartitionDesc.OnePartitionDesc {
     if (fullPartSpec == null) {
       fullPartSpec = new HashMap<String, String>();
       for (Map.Entry<String, Date> entry : timePartSpec.entrySet()) {
-        fullPartSpec.put(entry.getKey(), 
updatePeriod.format().format(entry.getValue()));
+        fullPartSpec.put(entry.getKey(), 
updatePeriod.format(entry.getValue()));
       }
       if (nonTimePartSpec != null) {
         fullPartSpec.putAll(nonTimePartSpec);

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
index 0a2c5df..1e276df 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/StorageTableDesc.java
@@ -19,10 +19,12 @@
 
 package org.apache.lens.cube.metadata;
 
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
 public class StorageTableDesc extends CreateTableDesc {
@@ -45,6 +47,25 @@ public class StorageTableDesc extends CreateTableDesc {
     super.getTblProps().put(MetastoreConstants.TIME_PART_COLUMNS, 
StringUtils.join(this.timePartCols, ','));
   }
 
+  public StorageTableDesc() {
+  }
+
+  public StorageTableDesc(Class<?> inputFormatClass, Class<?> 
outputFormatClass,
+    ArrayList<FieldSchema> partCols, List<String> timePartCols) {
+    if (inputFormatClass != null) {
+      setInputFormat(inputFormatClass.getCanonicalName());
+    }
+    if (outputFormatClass != null) {
+      setOutputFormat(outputFormatClass.getCanonicalName());
+    }
+    if (partCols != null) {
+      setPartCols(partCols);
+    }
+    if (timePartCols != null) {
+      setTimePartCols(timePartCols);
+    }
+  }
+
   /**
    * @deprecated
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
index 0026262..80295b1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
@@ -38,7 +38,7 @@ public class TimePartition implements 
Comparable<TimePartition>, Named {
   private TimePartition(@NonNull UpdatePeriod updatePeriod, @NonNull Date 
date) {
     this.updatePeriod = updatePeriod;
     this.date = updatePeriod.truncate(date);
-    this.dateString = updatePeriod.format().format(this.date);
+    this.dateString = updatePeriod.format(this.date);
   }
 
   public static TimePartition of(UpdatePeriod updatePeriod, Date date) throws 
LensException {
@@ -56,7 +56,7 @@ public class TimePartition implements 
Comparable<TimePartition>, Named {
         throw new LensException(getWrongUpdatePeriodMessage(updatePeriod, 
dateString));
       }
       try {
-        return TimePartition.of(updatePeriod, 
updatePeriod.format().parse(dateString));
+        return TimePartition.of(updatePeriod, updatePeriod.parse(dateString));
       } catch (ParseException e) {
         throw new LensException(getWrongUpdatePeriodMessage(updatePeriod, 
dateString), e);
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
index f192463..4c76a69 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/UpdatePeriod.java
@@ -22,17 +22,22 @@ package org.apache.lens.cube.metadata;
 import static java.util.Calendar.*;
 
 import java.text.DateFormat;
+import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Comparator;
 import java.util.Date;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.parse.DateUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang3.time.DateUtils;
 
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import lombok.Getter;
 
 public enum UpdatePeriod implements Named {
@@ -181,10 +186,6 @@ public enum UpdatePeriod implements Named {
     return this.weight;
   }
 
-  public long monthWeight(Date date) {
-    return DateUtil.getNumberofDaysInMonth(date) * DAILY.weight();
-  }
-
   public static UpdatePeriod fromUnitName(String unitName) throws 
LensException {
     for (UpdatePeriod up : values()) {
       if (up.getUnitName().equals(unitName)) {
@@ -219,6 +220,37 @@ public enum UpdatePeriod implements Named {
     }
   }
 
+  Cache<Date, String> dateToStringCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+  Cache<String, Date> stringToDateCache = CacheBuilder.newBuilder()
+    .expireAfterWrite(2, TimeUnit.HOURS).maximumSize(100).build();
+
+  public String format(final Date date) {
+    try {
+      return dateToStringCache.get(date, new Callable<String>() {
+        @Override
+        public String call() {
+          return format().format(date);
+        }
+      });
+    } catch (ExecutionException e) {
+      return format().format(date);
+    }
+  }
+
+  public Date parse(final String dateString) throws ParseException {
+    try {
+      return stringToDateCache.get(dateString, new Callable<Date>() {
+        @Override
+        public Date call() throws Exception {
+          return format().parse(dateString);
+        }
+      });
+    } catch (ExecutionException e) {
+      return format().parse(dateString);
+    }
+  }
+
   public String formatStr() {
     return this.format;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/87049563/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 4690d1d..5e17eac 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -45,7 +45,6 @@ public final class DateUtil {
   private DateUtil() {
 
   }
-
   /*
    * NOW -> new java.util.Date() NOW-7DAY -> a date one week earlier NOW (+-)
    * <NUM>UNIT or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
@@ -68,7 +67,6 @@ public final class DateUtil {
 
   public static final String WSPACE = "\\s+";
   public static final String OPTIONAL_WSPACE = "\\s*";
-  public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
 
   public static final String SIGNAGE = "\\+|\\-";
   public static final Pattern P_SIGNAGE = Pattern.compile(SIGNAGE);
@@ -108,10 +106,6 @@ public final class DateUtil {
       }
     };
 
-  public static String formatDate(Date dt) {
-    return ABSDATE_PARSER.get().format(dt);
-  }
-
   public static String getAbsDateFormatString(String str) {
     if (str.matches(YEAR_FMT)) {
       return str + "-01-01-00:00:00,000";
@@ -178,7 +172,6 @@ public final class DateUtil {
 
       Matcher granularityMatcher = P_UNIT.matcher(nowWithGranularity);
       if (granularityMatcher.find()) {
-        String unit = granularityMatcher.group().toLowerCase();
         calendar = 
UpdatePeriod.fromUnitName(granularityMatcher.group().toLowerCase()).truncate(calendar);
       }
     }
@@ -269,12 +262,6 @@ public final class DateUtil {
     return cal.getTime();
   }
 
-  public static int getNumberofDaysInMonth(Date date) {
-    Calendar calendar = Calendar.getInstance();
-    calendar.setTime(date);
-    return calendar.getActualMaximum(DAY_OF_MONTH);
-  }
-
   public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
     // Move 'from' to end of month, unless its the first day of month
     boolean coverable = true;

Reply via email to