Author: amareshwari
Date: Fri Jun  7 12:33:02 2013
New Revision: 1490612

URL: http://svn.apache.org/r1490612
Log:
Add javadoc to cube metastore api

Modified:
    
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
    
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
    
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
    
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
    
hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java

Modified: 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java?rev=1490612&r1=1490611&r2=1490612&view=diff
==============================================================================
--- 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
 (original)
+++ 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
 Fri Jun  7 12:33:02 2013
@@ -35,6 +35,14 @@ public class CubeMetastoreClient {
   private static final Map<HiveConf, CubeMetastoreClient> clientMapping =
       new HashMap<HiveConf, CubeMetastoreClient>();
 
+  /**
+   * Get the instance of {@link CubeMetastoreClient} corresponding
+   * to {@link HiveConf}
+   *
+   * @param conf
+   * @return CubeMetastoreClient
+   * @throws HiveException
+   */
   public static CubeMetastoreClient getInstance(HiveConf conf)
       throws HiveException {
     if (clientMapping.get(conf) == null) {
@@ -47,7 +55,10 @@ public class CubeMetastoreClient {
     return metastore;
   }
 
-  public void close() {
+  /**
+   * Close the current metastore client
+   */
+  public static void close() {
     Hive.closeCurrent();
   }
 
@@ -143,16 +154,42 @@ public class CubeMetastoreClient {
     }
   }
 
+  /**
+   * Create cube in metastore defined by {@link Cube} object
+   *
+   * @param cube the {@link Cube} object.
+   * @throws HiveException
+   */
   public void createCube(Cube cube) throws HiveException {
     createCubeHiveTable(cube);
   }
 
+  /**
+   * Create cube defined by measures and dimensions
+   *
+   * @param name Name of the cube
+   * @param measures Measures of the cube
+   * @param dimensions Dimensions of the cube
+   *
+   * @throws HiveException
+   */
   public void createCube(String name, Set<CubeMeasure> measures,
       Set<CubeDimension> dimensions) throws HiveException {
     Cube cube = new Cube(name, measures, dimensions);
     createCube(cube);
   }
 
+  /**
+   * Create a cube fact table
+   *
+   * @param cubeName The cube name to which fact belongs to.
+   * @param factName The fact name
+   * @param columns The columns of fact table
+   * @param storageAggregatePeriods Aggregate periods for the storages
+   * @param weight Weight of the cube
+   *
+   * @throws HiveException
+   */
   public void createCubeFactTable(String cubeName, String factName,
       List<FieldSchema> columns,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods, double weight)
@@ -162,6 +199,16 @@ public class CubeMetastoreClient {
     createCubeTable(factTable, storageAggregatePeriods);
   }
 
+  /**
+   * Create a cube dimension table
+   *
+   * @param dimName dimensions name
+   * @param columns Columns of the dimension table
+   * @param weight Weight of the dimension table
+   * @param dimensionReferences References to other dimensions
+   * @param storages Storages on which dimension is available
+   * @throws HiveException
+   */
   public void createCubeDimensionTable(String dimName,
       List<FieldSchema> columns, double weight,
       Map<String, TableReference> dimensionReferences, Set<Storage> storages)
@@ -194,6 +241,17 @@ public class CubeMetastoreClient {
     }
   }
 
+  /**
+   * Create a cube dimension table
+   *
+   * @param dimName dimensions name
+   * @param columns Columns of the dimension table
+   * @param weight Weight of the dimension table
+   * @param dimensionReferences References to other dimensions
+   * @param dumpPeriods Storages and their dump periods on which dimension
+   *  is available
+   * @throws HiveException
+   */
   public void createCubeDimensionTable(String dimName,
       List<FieldSchema> columns, double weight,
       Map<String, TableReference> dimensionReferences,
@@ -214,6 +272,14 @@ public class CubeMetastoreClient {
     }
   }
 
+  /**
+   * Create cube fact table defined by {@link CubeFactTable} object
+   *
+   * @param factTable The {@link CubeFactTable} object
+   * @param storageAggregatePeriods Storages and their aggregate periods on
+   *  which fact is available
+   * @throws HiveException
+   */
   public void createCubeTable(CubeFactTable factTable,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
           throws HiveException {
@@ -231,6 +297,13 @@ public class CubeMetastoreClient {
     }
   }
 
+  /**
+   * Create cube dimension table defined by {@link CubeDimensionTable} object
+   *
+   * @param dimTable The {@link CubeDimensionTable} object
+   * @param storages Storages on which dimension is available
+   * @throws HiveException
+   */
   public void createCubeTable(CubeDimensionTable dimTable,
       Set<Storage> storages) throws HiveException {
     // create virtual cube table in metastore
@@ -244,6 +317,14 @@ public class CubeMetastoreClient {
     }
   }
 
+  /**
+   * Add storage to fact
+   *
+   * @param table The CubeFactTable
+   * @param storage The storage
+   * @param updatePeriods Update periods of the fact on the storage
+   * @throws HiveException
+   */
   public void addStorage(CubeFactTable table, Storage storage,
       List<UpdatePeriod> updatePeriods) throws HiveException {
     // TODO add the update periods to cube table properties
@@ -251,6 +332,14 @@ public class CubeMetastoreClient {
         updatePeriods);
   }
 
+  /**
+   * Add an update period to a fact storage
+   *
+   * @param table The CubeFactTable
+   * @param storage The storage
+   * @param updatePeriod The Update period of the fact on the storage
+   * @throws HiveException
+   */
   public void addStorageUpdatePeriod(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod) throws HiveException {
     // TODO add the update periods to cube table properties
@@ -258,24 +347,7 @@ public class CubeMetastoreClient {
         getStorageSD(storage, getCubeTableSd(table)), updatePeriod);
   }
 
-  public void addColumn(AbstractCubeTable table, FieldSchema column) {
-    // TODO
-  }
-
-  public void addDimensionReference(AbstractCubeTable srcTable, String srcCol,
-      TableReference reference) {
-    // TODO
-  }
-
-  // public void addMeasure(CubeFactTable table, Measure measure) {
-  // TODO
-  // }
-
-  public void addUpdatePeriod(CubeFactTable table, UpdatePeriod updatePeriod) {
-    // TODO
-  }
-
-  public static List<String> getPartitionValues(Table tbl,
+  static List<String> getPartitionValues(Table tbl,
       Map<String, String> partSpec) throws HiveException {
     List<String> pvals = new ArrayList<String>();
     for (FieldSchema field : tbl.getPartitionKeys()) {
@@ -289,6 +361,15 @@ public class CubeMetastoreClient {
     return pvals;
   }
 
+  /**
+   * Add time partition to the fact on given storage for an updateperiod
+   *
+   * @param table The {@link CubeFactTable} object
+   * @param storage The {@link Storage} object
+   * @param updatePeriod The updatePeriod
+   * @param partitionTimestamp partition timestamp
+   * @throws HiveException
+   */
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp)
           throws HiveException {
@@ -298,6 +379,17 @@ public class CubeMetastoreClient {
         partitionTimestamp), false);
   }
 
+  /**
+   * Add a partition to the fact on given storage for an updateperiod, with
+   *  custom partition spec
+   *
+   * @param table The {@link CubeFactTable} object
+   * @param storage The {@link Storage} object
+   * @param updatePeriod The updatePeriod
+   * @param partitionTimestamp partition timestamp
+   * @param partSpec The partition spec
+   * @throws HiveException
+   */
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp,
       Map<String, String> partSpec)
@@ -309,6 +401,14 @@ public class CubeMetastoreClient {
     addPartition(storageTableName, storage, partSpec, false);
   }
 
+  /**
+   * Add a partition to dimension table on a give storage
+   *
+   * @param table The {@link CubeDimensionTable} object
+   * @param storage The {@link Storage} object
+   * @param partitionTimestamp
+   * @throws HiveException
+   */
   public void addPartition(CubeDimensionTable table, Storage storage,
       Date partitionTimestamp) throws HiveException {
     String storageTableName = MetastoreUtil.getDimStorageTableName(
@@ -401,14 +501,17 @@ public class CubeMetastoreClient {
     return partitionExists(storageTableName, Storage.getLatestPartSpec());
   }
 
+  /**
+   * Get the hive {@link Table} corresponding to the name
+   *
+   * @param tableName
+   * @return {@link Table} object
+   * @throws HiveException
+   */
   public Table getHiveTable(String tableName) throws HiveException {
     return getTable(tableName);
   }
 
-  public Table getStorageTable(String tableName) throws HiveException {
-    return getHiveTable(tableName);
-  }
-
   private Table getTable(String tableName) throws HiveException {
     Table tbl;
     try {
@@ -420,6 +523,13 @@ public class CubeMetastoreClient {
     return tbl;
   }
 
+  /**
+   * Is the table name passed a fact table?
+   *
+   * @param tableName table name
+   * @return true if it is cube fact, false otherwise
+   * @throws HiveException
+   */
   public boolean isFactTable(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
     return isFactTable(tbl);
@@ -440,6 +550,13 @@ public class CubeMetastoreClient {
     return false;
   }
 
+  /**
+   * Is the table name passed a dimension table?
+   *
+   * @param tableName table name
+   * @return true if it is cube dimension, false otherwise
+   * @throws HiveException
+   */
   public boolean isDimensionTable(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
     return isDimensionTable(tbl);
@@ -451,17 +568,40 @@ public class CubeMetastoreClient {
     return CubeTableType.DIMENSION.name().equals(tableType);
   }
 
+  /**
+   * Is the table name passed a cube?
+   *
+   * @param tableName table name
+   * @return true if it is cube, false otherwise
+   * @throws HiveException
+   */
   public boolean isCube(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
     return isCube(tbl);
   }
 
+  /**
+   * Is the hive table a cube table?
+   *
+   * @param tbl
+   * @return
+   * @throws HiveException
+   */
   boolean isCube(Table tbl) throws HiveException {
     String tableType = tbl.getParameters().get(
         MetastoreConstants.TABLE_TYPE_KEY);
     return CubeTableType.CUBE.name().equals(tableType);
   }
 
+  /**
+   * Get {@link CubeFactTable} object corresponding to the name
+   *
+   * @param tableName The cube fact name
+   * @return Returns CubeFactTable if table name passed is a fact table,
+   *  null otherwise
+   * @throws HiveException
+   */
+
   public CubeFactTable getFactTable(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
     return getFactTable(tbl);
@@ -474,6 +614,14 @@ public class CubeMetastoreClient {
     return null;
   }
 
+  /**
+   * Get {@link CubeDimensionTable} object corresponding to the name
+   *
+   * @param tableName The cube dimension name
+   * @return Returns CubeDimensionTable if table name passed is a dimension
+   *  table, null otherwise
+   * @throws HiveException
+   */
   public CubeDimensionTable getDimensionTable(String tableName)
       throws HiveException {
     Table tbl = getTable(tableName);
@@ -483,6 +631,13 @@ public class CubeMetastoreClient {
     return null;
   }
 
+  /**
+   * Get {@link Cube} object corresponding to the name
+   *
+   * @param tableName The cube name
+   * @return Returns cube is table name passed is a cube, null otherwise
+   * @throws HiveException
+   */
   public Cube getCube(String tableName) throws HiveException {
     Table tbl = getTable(tableName);
     if (isCube(tableName)) {
@@ -491,6 +646,13 @@ public class CubeMetastoreClient {
     return null;
   }
 
+  /**
+   * Get all dimension tables in metastore
+   *
+   * @return List of dimension tables
+   *
+   * @throws HiveException
+   */
   public List<CubeDimensionTable> getAllDimensionTables()
       throws HiveException {
     List<CubeDimensionTable> dimTables = new ArrayList<CubeDimensionTable>();
@@ -506,6 +668,14 @@ public class CubeMetastoreClient {
     return dimTables;
   }
 
+  /**
+   * Get all fact tables in the cube.
+   *
+   * @param cube Cube object
+   *
+   * @return List of fact tables
+   * @throws HiveException
+   */
   public List<CubeFactTable> getAllFactTables(Cube cube) throws HiveException {
     List<CubeFactTable> factTables = new ArrayList<CubeFactTable>();
     try {
@@ -520,27 +690,4 @@ public class CubeMetastoreClient {
     }
     return factTables;
   }
-
-  public boolean isColumnInTable(String column, String table) {
-    try {
-      List<String> columns = getColumnNames(table);
-      if (columns == null) {
-        return false;
-      } else {
-        return columns.contains(column);
-      }
-    } catch (HiveException e) {
-      e.printStackTrace();
-      return false;
-    }
-  }
-
-  private List<String> getColumnNames(String table) throws HiveException {
-    List<FieldSchema> fields = getTable(table).getCols();
-    List<String> columns = new ArrayList<String>(fields.size());
-    for (FieldSchema f : fields) {
-      columns.add(f.getName());
-    }
-    return columns;
-  }
 }

Modified: 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
URL: 
http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java?rev=1490612&r1=1490611&r2=1490612&view=diff
==============================================================================
--- 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
 (original)
+++ 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
 Fri Jun  7 12:33:02 2013
@@ -7,6 +7,7 @@ import static org.apache.hadoop.hive.ser
 import static org.apache.hadoop.hive.serde.serdeConstants.MAPKEY_DELIM;
 import static org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT;
 
+import java.util.List;
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
@@ -186,4 +187,10 @@ public class HDFSStorage extends Storage
     }
   }
 
+  @Override
+  public void dropPartition(String storageTableName,
+      List<String> partVals, HiveConf conf) throws HiveException {
+    Hive.get(conf).dropPartition(storageTableName, partVals, false);
+  }
+
 }

Modified: 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
URL: 
http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java?rev=1490612&r1=1490611&r2=1490612&view=diff
==============================================================================
--- 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
 (original)
+++ 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
 Fri Jun  7 12:33:02 2013
@@ -12,13 +12,20 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.serdeConstants;
 
+/**
+ *
+ * Storage is Named Interface which would represent the underlying storage of
+ * the data.
+ *
+ */
 public abstract class Storage implements Named {
 
   private final TableType tableType;
   private final Map<String, String> tableParameters =
       new HashMap<String, String>();
   private final List<FieldSchema> partCols = new ArrayList<FieldSchema>();
-  protected Map<String, String> serdeParameters = new HashMap<String, 
String>();
+  protected final Map<String, String> serdeParameters =
+      new HashMap<String, String>();
   private final String name;
 
   protected Storage(String name, TableType type) {
@@ -26,26 +33,57 @@ public abstract class Storage implements
     this.name = name;
   }
 
+  /**
+   * Get all the partition columns of the storage.
+   *
+   * @return List of {@link FieldSchema}
+   */
   public List<FieldSchema> getPartCols() {
     return partCols;
   }
 
+  /**
+   * Get the table type. It can be MANAGED or EXTERNAL.
+   *
+   * @return TableType enum
+   */
   public TableType getTableType() {
     return tableType;
   }
 
+  /**
+   * Get table properties
+   *
+   * @return Map<String, String>
+   */
   public Map<String, String> getTableParameters() {
     return tableParameters;
   }
 
+  /**
+   * Add a partition column
+   *
+   * @param column having a name and type as String
+   */
   public void addToPartCols(FieldSchema column) {
     partCols.add(column);
   }
 
+  /**
+   * Add more table parameters
+   *
+   * @param parameters
+   */
   protected void addToTableParameters(Map<String, String> parameters) {
     tableParameters.putAll(tableParameters);
   }
 
+  /**
+   * Add a table property
+   *
+   * @param key property key
+   * @param value property value
+   */
   protected void addTableProperty(String key, String value) {
     tableParameters.put(key, value);
   }
@@ -54,21 +92,65 @@ public abstract class Storage implements
     return name;
   }
 
+  /**
+   * Get the name prefix of the storage
+   *
+   * @return Name followed by storage separator
+   */
   public String getPrefix() {
     return getPrefix(getName());
   }
 
+  /**
+   * Get the name prefix of the storage
+   *
+   * @param name Name of the storage
+   * @return Name followed by storage separator
+   */
   public static String getPrefix(String name) {
-    return name + StorageConstants.STORGAE_SEPERATOR;
+    return name + StorageConstants.STORGAE_SEPARATOR;
   }
 
+  /**
+   * Set storage descriptor for the underlying hive table
+   *
+   * @param physicalSd {@link StorageDescriptor}
+   *
+   * @throws HiveException
+   */
   public abstract void setSD(StorageDescriptor physicalSd) throws 
HiveException;
 
+  /**
+   * Add a partition in the underlying hive table
+   *
+   * @param storageTableName TableName
+   * @param partSpec Partition specification
+   * @param conf {@link HiveConf} object
+   * @param makeLatest boolean saying whether this is the latest partition
+   *
+   * @throws HiveException
+   */
   public abstract void addPartition(String storageTableName,
       Map<String, String> partSpec, HiveConf conf, boolean makeLatest)
       throws HiveException;
 
-
+  /**
+   * Drop the partition in the underlying hive table
+   *
+   * @param storageTableName TableName
+   * @param partSpec Partition specification
+   * @param conf {@link HiveConf} object
+   *
+   * @throws HiveException
+   */
+  public abstract void dropPartition(String storageTableName,
+      List<String> partVals, HiveConf conf) throws HiveException;
+
+  /**
+   * Get the date partition key
+   *
+   * @return String
+   */
   public static String getDatePartitionKey() {
     return StorageConstants.DATE_PARTITION_KEY;
   }
@@ -79,10 +161,20 @@ public abstract class Storage implements
         StorageConstants.LATEST_PARTITION_VALUE);
   }
 
+  /**
+   * Get the partition spec for latest partition
+   *
+   * @return latest partition spec as Map from String to String
+   */
   public static Map<String, String> getLatestPartSpec() {
     return latestSpec;
   }
 
+  /**
+   * Get the latest partition value as List
+   *
+   * @return List
+   */
   public static List<String> getPartitionsForLatest() {
     List<String> parts = new ArrayList<String>();
     parts.add(StorageConstants.LATEST_PARTITION_VALUE);
@@ -93,6 +185,11 @@ public abstract class Storage implements
       serdeConstants.STRING_TYPE_NAME,
       "date partition");
 
+  /**
+   * Get the date partition as fieldschema
+   *
+   * @return FieldSchema
+   */
   public static FieldSchema getDatePartition() {
     return dtPart;
   }

Modified: 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
URL: 
http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java?rev=1490612&r1=1490611&r2=1490612&view=diff
==============================================================================
--- 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
 (original)
+++ 
hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/StorageConstants.java
 Fri Jun  7 12:33:02 2013
@@ -1,9 +1,7 @@
 package org.apache.hadoop.hive.ql.cube.metadata;
 
 public interface StorageConstants {
-  public static final String ES_STORAGE_NAME = "ES";
-  public static final String HDFS_STORAGE_NAME = "HDFS";
   public static final String DATE_PARTITION_KEY = "dt";
-  public static final String STORGAE_SEPERATOR = "_";
+  public static final String STORGAE_SEPARATOR = "_";
   public static final String LATEST_PARTITION_VALUE = "latest";
 }

Modified: 
hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java?rev=1490612&r1=1490611&r2=1490612&view=diff
==============================================================================
--- 
hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java
 (original)
+++ 
hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/metadata/TestCubeMetastoreClient.java
 Fri Jun  7 12:33:02 2013
@@ -478,7 +478,7 @@ public class TestCubeMetastoreClient {
       String storageTableName = MetastoreUtil.getDimStorageTableName(dimName,
           storage.getPrefix());
       Assert.assertTrue(client.tableExists(storageTableName));
-      
Assert.assertTrue(!client.getStorageTable(storageTableName).isPartitioned());
+      
Assert.assertTrue(!client.getHiveTable(storageTableName).isPartitioned());
     }
   }
 
@@ -525,7 +525,7 @@ public class TestCubeMetastoreClient {
     String storageTableName2 = MetastoreUtil.getDimStorageTableName(dimName,
         hdfsStorage2.getPrefix());
     Assert.assertTrue(client.tableExists(storageTableName2));
-    
Assert.assertTrue(!client.getStorageTable(storageTableName2).isPartitioned());
+    Assert.assertTrue(!client.getHiveTable(storageTableName2).isPartitioned());
   }
 
 }


Reply via email to