svn commit: r1635577 - in /hive/trunk/hcatalog: core/src/main/java/org/apache/hive/hcatalog/data/ hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/ hcatalog-pig-adapter/src/test/java/or

2014-10-30 Thread daijy
Author: daijy
Date: Thu Oct 30 18:27:59 2014
New Revision: 1635577

URL: http://svn.apache.org/r1635577
Log:
HIVE-7282: HCatLoader fail to load Orc map with null key (Daniel Dai)

Modified:

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java

Modified: 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java?rev=1635577r1=1635576r2=1635577view=diff
==
--- 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
 (original)
+++ 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
 Thu Oct 30 18:27:59 2014
@@ -23,7 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.TreeMap;
+import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -212,7 +212,7 @@ public class HCatRecordSerDe implements 
   private static Map?, ? serializeMap(Object f, MapObjectInspector moi) 
throws SerDeException {
 ObjectInspector koi = moi.getMapKeyObjectInspector();
 ObjectInspector voi = moi.getMapValueObjectInspector();
-MapObject, Object m = new TreeMapObject, Object();
+MapObject, Object m = new HashMapObject, Object();
 
 Map?, ? readMap = moi.getMap(f);
 if (readMap == null) {

Modified: 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java?rev=1635577r1=1635576r2=1635577view=diff
==
--- 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 (original)
+++ 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 Thu Oct 30 18:27:59 2014
@@ -480,7 +480,9 @@ class PigHCatUtil {
 MapString, Object result = new HashMapString, Object();
 for (Entry?, ? entry : map.entrySet()) {
   // since map key for Pig has to be Strings
-  result.put(entry.getKey().toString(), extractPigObject(entry.getValue(), 
hfs.getMapValueSchema().get(0)));
+  if (entry.getKey()!=null) {
+result.put(entry.getKey().toString(), 
extractPigObject(entry.getValue(), hfs.getMapValueSchema().get(0)));
+  }
 }
 return result;
   }

Modified: 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1635577r1=1635576r2=1635577view=diff
==
--- 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 (original)
+++ 
hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 Thu Oct 30 18:27:59 2014
@@ -18,8 +18,6 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import com.google.common.collect.ImmutableSet;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -52,7 +50,6 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
 
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -79,11 +76,13 @@ public class TestHCatLoaderComplexSchema
   add(testSyntheticComplexSchema);
   add(testTupleInBagInTupleInBag);
   add(testMapWithComplexData);
+  add(testMapNullKey);
 }});
 put(IOConstants.PARQUETFILE, new HashSetString() {{
   add(testSyntheticComplexSchema);
   add(testTupleInBagInTupleInBag);
   add(testMapWithComplexData);
+  add(testMapNullKey);
 }});
   }};
 
@@ -223,6 +222,10 @@ public class TestHCatLoaderComplexSchema
 
   private void verifyWriteRead(String tablename, String pigSchema, String 
tableSchema, ListTuple data, boolean provideSchemaToStorer)
 throws IOException, CommandNeedRetryException, ExecException, 
FrontendException {
+verifyWriteRead(tablename, pigSchema, tableSchema, data, data, 
provideSchemaToStorer

svn commit: r1635579 - in /hive/branches/branch-0.14/hcatalog: core/src/main/java/org/apache/hive/hcatalog/data/ hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/ hcatalog-pig-adapter/s

2014-10-30 Thread daijy
Author: daijy
Date: Thu Oct 30 18:29:35 2014
New Revision: 1635579

URL: http://svn.apache.org/r1635579
Log:
HIVE-7282: HCatLoader fail to load Orc map with null key (Daniel Dai)

Modified:

hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java

hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java

hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java

Modified: 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java?rev=1635579r1=1635578r2=1635579view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java
 Thu Oct 30 18:29:35 2014
@@ -23,7 +23,7 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
-import java.util.TreeMap;
+import java.util.HashMap;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.serde.serdeConstants;
@@ -212,7 +212,7 @@ public class HCatRecordSerDe implements 
   private static Map?, ? serializeMap(Object f, MapObjectInspector moi) 
throws SerDeException {
 ObjectInspector koi = moi.getMapKeyObjectInspector();
 ObjectInspector voi = moi.getMapValueObjectInspector();
-MapObject, Object m = new TreeMapObject, Object();
+MapObject, Object m = new HashMapObject, Object();
 
 Map?, ? readMap = moi.getMap(f);
 if (readMap == null) {

Modified: 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java?rev=1635579r1=1635578r2=1635579view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 Thu Oct 30 18:29:35 2014
@@ -480,7 +480,9 @@ class PigHCatUtil {
 MapString, Object result = new HashMapString, Object();
 for (Entry?, ? entry : map.entrySet()) {
   // since map key for Pig has to be Strings
-  result.put(entry.getKey().toString(), extractPigObject(entry.getValue(), 
hfs.getMapValueSchema().get(0)));
+  if (entry.getKey()!=null) {
+result.put(entry.getKey().toString(), 
extractPigObject(entry.getValue(), hfs.getMapValueSchema().get(0)));
+  }
 }
 return result;
   }

Modified: 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1635579r1=1635578r2=1635579view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderComplexSchema.java
 Thu Oct 30 18:29:35 2014
@@ -18,8 +18,6 @@
  */
 package org.apache.hive.hcatalog.pig;
 
-import com.google.common.collect.ImmutableSet;
-
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -52,7 +50,6 @@ import org.apache.pig.impl.logicalLayer.
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
 
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -79,11 +76,13 @@ public class TestHCatLoaderComplexSchema
   add(testSyntheticComplexSchema);
   add(testTupleInBagInTupleInBag);
   add(testMapWithComplexData);
+  add(testMapNullKey);
 }});
 put(IOConstants.PARQUETFILE, new HashSetString() {{
   add(testSyntheticComplexSchema);
   add(testTupleInBagInTupleInBag);
   add(testMapWithComplexData);
+  add(testMapNullKey);
 }});
   }};
 
@@ -223,6 +222,10 @@ public class TestHCatLoaderComplexSchema
 
   private void verifyWriteRead(String tablename, String pigSchema, String 
tableSchema, ListTuple

svn commit: r1637478 - /hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java

2014-11-07 Thread daijy
Author: daijy
Date: Fri Nov  7 23:00:05 2014
New Revision: 1637478

URL: http://svn.apache.org/r1637478
Log:
HIVE-8484: HCatalog throws an exception if Pig job is of type 'fetch' (Lorand 
Bendig via Daniel Dai)

Modified:

hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java

Modified: 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1637478r1=1637477r2=1637478view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 Fri Nov  7 23:00:05 2014
@@ -83,8 +83,9 @@ public class HCatUtil {
   private static volatile HiveClientCache hiveClientCache;
 
   public static boolean checkJobContextIfRunningFromBackend(JobContext j) {
-if (j.getConfiguration().get(mapred.task.id, ).equals() 
-!(true.equals(j.getConfiguration().get(pig.illustrating {
+if (j.getConfiguration().get(pig.job.converted.fetch, ).equals() 
+  j.getConfiguration().get(mapred.task.id, ).equals() 
+  !(true.equals(j.getConfiguration().get(pig.illustrating {
   return false;
 }
 return true;




svn commit: r1637479 - /hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java

2014-11-07 Thread daijy
Author: daijy
Date: Fri Nov  7 23:00:41 2014
New Revision: 1637479

URL: http://svn.apache.org/r1637479
Log:
HIVE-8484: HCatalog throws an exception if Pig job is of type 'fetch' (Lorand 
Bendig via Daniel Dai)

Modified:

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java

Modified: 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1637479r1=1637478r2=1637479view=diff
==
--- 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 (original)
+++ 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 Fri Nov  7 23:00:41 2014
@@ -84,8 +84,9 @@ public class HCatUtil {
   private static volatile HiveClientCache hiveClientCache;
 
   public static boolean checkJobContextIfRunningFromBackend(JobContext j) {
-if (j.getConfiguration().get(mapred.task.id, ).equals() 
-!(true.equals(j.getConfiguration().get(pig.illustrating {
+if (j.getConfiguration().get(pig.job.converted.fetch, ).equals() 
+  j.getConfiguration().get(mapred.task.id, ).equals() 
+  !(true.equals(j.getConfiguration().get(pig.illustrating {
   return false;
 }
 return true;




svn commit: r1606277 - /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java

2014-06-27 Thread daijy
Author: daijy
Date: Sat Jun 28 01:07:11 2014
New Revision: 1606277

URL: http://svn.apache.org/r1606277
Log:
HIVE-7301 : Restore constants moved to HiveConf by HIVE-7211 (Navis review by 
Daniel Dai)

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java?rev=1606277r1=1606276r2=1606277view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java Sat Jun 28 
01:07:11 2014
@@ -59,6 +59,8 @@ import org.apache.hadoop.io.compress.Dec
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*;
+
 /**
  * codeRCFile/codes, short of Record Columnar File, are flat files
  * consisting of binary key/value pairs, which shares much similarity with
@@ -340,8 +342,15 @@ public class RCFile {
 
   private static final Log LOG = LogFactory.getLog(RCFile.class);
 
+  // internal variable
   public static final String COLUMN_NUMBER_METADATA_STR = 
hive.io.rcfile.column.number;
 
+  public static final String RECORD_INTERVAL_CONF_STR = 
HIVE_RCFILE_RECORD_INTERVAL.varname;
+
+  public static final String COLUMN_NUMBER_CONF_STR = 
HIVE_RCFILE_COLUMN_NUMBER_CONF.varname;
+
+  public static final String TOLERATE_CORRUPTIONS_CONF_STR = 
HIVE_RCFILE_TOLERATE_CORRUPTIONS.varname;
+
   // HACK: We actually need BlockMissingException, but that is not available
   // in all hadoop versions.
   public static final String BLOCK_MISSING_MESSAGE =
@@ -978,8 +987,8 @@ public class RCFile {
 public Writer(FileSystem fs, Configuration conf, Path name, int bufferSize,
 short replication, long blockSize, Progressable progress,
 Metadata metadata, CompressionCodec codec) throws IOException {
-  RECORD_INTERVAL = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HIVE_RCFILE_RECORD_INTERVAL);
-  columnNumber = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HIVE_RCFILE_COLUMN_NUMBER_CONF);
+  RECORD_INTERVAL = HiveConf.getIntVar(conf, HIVE_RCFILE_RECORD_INTERVAL);
+  columnNumber = HiveConf.getIntVar(conf, HIVE_RCFILE_COLUMN_NUMBER_CONF);
 
   if (metadata == null) {
 metadata = new Metadata();
@@ -1051,8 +1060,7 @@ public class RCFile {
   this.out = out;
   this.codec = codec;
   this.metadata = metadata;
-  this.useNewMagic =
-  
conf.getBoolean(HiveConf.ConfVars.HIVEUSEEXPLICITRCFILEHEADER.varname, true);
+  this.useNewMagic = conf.getBoolean(HIVEUSEEXPLICITRCFILEHEADER.varname, 
true);
 }
 
 /** Returns the compression codec of data in this file. */
@@ -1339,8 +1347,7 @@ public class RCFile {
 /** Create a new RCFile reader. */
 public Reader(FileSystem fs, Path file, int bufferSize, Configuration conf,
 long start, long length) throws IOException {
-  tolerateCorruptions = HiveConf.getBoolVar(
-  conf, HiveConf.ConfVars.HIVE_RCFILE_TOLERATE_CORRUPTIONS);
+  tolerateCorruptions = HiveConf.getBoolVar(conf, 
HIVE_RCFILE_TOLERATE_CORRUPTIONS);
   conf.setInt(io.file.buffer.size, bufferSize);
   this.file = file;
   in = openFile(fs, file, bufferSize, length);




svn commit: r1532003 - /hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf

2013-10-14 Thread daijy
Author: daijy
Date: Mon Oct 14 17:51:25 2013
New Revision: 1532003

URL: http://svn.apache.org/r1532003
Log:
HIVE-5453 : jobsubmission2.conf should use 'timeout' property (Eugene Koifman 
via Daniel Dai)

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf?rev=1532003r1=1532002r2=1532003view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission2.conf Mon 
Oct 14 17:51:25 2013
@@ -42,11 +42,12 @@ $cfg = 
 # see HIVE-4808
 # this is a long running test, takes 11 minutes
 # -mt must be greater than mapred.task.timeout (60ms)
- 'num' = 2,
+ 'num' = 1,
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/mapreduce/jar',
  'post_options' = ['user.name=:UNAME:','arg=-mt', 'arg=642000',
- 'jar=:INPDIR_HDFS:/hclient.jar', 'class=sleep', 
'statusdir=/tmp' ],
+ 'jar=:INPDIR_HDFS:/hclient.jar', 'class=sleep', 
+ 'statusdir=/tmp/TestHeartbeat_1' ],
  'json_field_substr_match' = { 'id' = '\d+'},
  'status_code' = 200,
  'check_job_created' = 1,
@@ -55,7 +56,7 @@ $cfg = 
 # it contains the value.
 # 'check_job_exit_value' = 0,
 'check_call_back' = 1,
-'timeout_seconds' = 800,
+'timeout' = 800,
 },
]
   },




svn commit: r1532018 - /hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

2013-10-14 Thread daijy
Author: daijy
Date: Mon Oct 14 18:38:29 2013
New Revision: 1532018

URL: http://svn.apache.org/r1532018
Log:
HIVE-5448: webhcat duplicate test TestMapReduce_2 should be removed (Thejas M 
Nair via Daniel Dai)

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1532018r1=1532017r2=1532018view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Mon Oct 
14 18:38:29 2013
@@ -77,24 +77,8 @@ $cfg = 
  'check_call_back' = 1,
 },
 {
- 
- 'num' = 2,
- 'method' = 'POST',
- 'url' = ':TEMPLETON_URL:/templeton/v1/mapreduce/jar',
- 'post_options' = ['user.name=:UNAME:','arg=-mt', 'arg=66', 
-'jar=:INPDIR_HDFS:/hexamples.jar', 'class=sleep', ],
- 'json_field_substr_match' = { 'id' = '\d+'},
-#results
- 'status_code' = 200,
- 'check_job_created' = 1,
- 'check_job_complete' = 'SUCCESS',
- 'check_job_exit_value' = 0,
- 'check_call_back' = 1,
- 'timeout' = 840, #increase timeout as this test takes long
-},
-{
  # with log enabled 
- 'num' = 3,
+ 'num' = 2,
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/mapreduce/jar',
  'post_options' = ['user.name=:UNAME:','arg=:INPDIR_HDFS:/nums.txt', 
'arg= :OUTDIR:/wc.txt', 




svn commit: r1533658 - in /hive/trunk/hcatalog: src/test/e2e/templeton/inpdir/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ webhcat/svr/src/main/java/org

2013-10-18 Thread daijy
Author: daijy
Date: Fri Oct 18 22:42:59 2013
New Revision: 1533658

URL: http://svn.apache.org/r1533658
Log:
HIVE-5133: webhcat jobs that need to access metastore fails in secure mode 
(Eugene Koifman via Daniel Dai)

Added:
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/DelegationTokenCache.java
Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/PigDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StreamingDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java

Added: hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig?rev=1533658view=auto
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig (added)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/hcatloadstore.pig Fri Oct 
18 22:42:59 2013
@@ -0,0 +1,21 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements.  See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership.  The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- License); you may not use this file except in compliance
+-- with the License.  You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied.  See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+l = load '$INPDIR/nums.txt' as (i:int, j:int);
+store l into 'hcattest_pig' using org.apache.hive.hcatalog.pig.HCatStorer();
+s = load 'hcattest_pig' using org.apache.hive.hcatalog.pig.HCatLoader();
+store s into '$OUTDIR/loadstore.out';

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1533658r1=1533657r2=1533658view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Fri Oct 
18 22:42:59 2013
@@ -234,7 +234,7 @@ $cfg = 
 
 {
 #a simple load store script with log enabled
- 'num' = 9,
+ 'num' = 10,
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/pig',
  'post_options' = ['user.name=:UNAME:', 'arg=-p', 
'arg=INPDIR=:INPDIR_HDFS:','arg=-p', 'arg=OUTDIR=:OUTDIR:', 
'file=:INPDIR_HDFS:/loadstore.pig',
@@ -249,7 +249,31 @@ $cfg = 
  'check_call_back' = 1,
 },
 
-#test 10
+{
+#note: this test will fail unless Hive is installed in the default 
location Pig expects it in
+#HIVE-5547 will address this limitation
+ 'num' = 11,
+ 'setup' = [
+ {
+  'method' = 'POST',
+  'url' = ':TEMPLETON_URL:/templeton/v1/ddl',
+  'status_code' = 200,
+  'post_options' = ['user.name=:UNAME:','exec=drop table if 
exists hcattest_pig; create table hcattest_pig(i int, j int) STORED AS 
textfile;'],
+  'json_field_substr_match' = {'stderr' = 'OK'}
+ }
+],
+ 'method' = 'POST',
+ 'url' = ':TEMPLETON_URL:/templeton/v1/pig',
+ 'post_options' = ['user.name=:UNAME:', 'arg=-useHCatalog', 'arg=-p', 
'arg=INPDIR=:INPDIR_HDFS:', 'arg=-p', 'arg= OUTDIR=:OUTDIR:', 
'file=:INPDIR_HDFS:/hcatloadstore.pig'],
+ 
+ 'json_field_substr_match' = { 'id' = '\d+'},
+ 'status_code' = 200,
+ 'check_job_created' = 1,
+ 'check_job_complete' = 'SUCCESS',
+ 'check_job_exit_value' = 0

svn commit: r1537184 - /hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

2013-10-30 Thread daijy
Author: daijy
Date: Wed Oct 30 17:15:02 2013
New Revision: 1537184

URL: http://svn.apache.org/r1537184
Log:
HIVE-5696: WebHCat e2e tests/jobsubmission.conf file is malformed and loosing 
tests ( Eugene Koifman via Daniel Dai )

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1537184r1=1537183r2=1537184view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Wed Oct 
30 17:15:02 2013
@@ -441,10 +441,6 @@ $cfg = 
  'check_job_exit_value' = 1,
  'check_call_back' = 1,
 },
-
-   ]
-  },
-
 {
 #test add jar
  'num' = 11,
@@ -490,11 +486,8 @@ $cfg = 
  'check_job_exit_value' = 0,
  'check_call_back' = 1,
 },
-
-
-
-
-
+   ]
+  },
  ]
 },
   ;




svn commit: r1537610 - in /hive/trunk/hcatalog: src/test/e2e/templeton/drivers/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ webhcat/svr/src/main/java/or

2013-10-31 Thread daijy
Author: daijy
Date: Thu Oct 31 20:02:30 2013
New Revision: 1537610

URL: http://svn.apache.org/r1537610
Log:
HIVE-5510: [WebHCat] GET job/queue return wrong job information

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf

hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1537610r1=1537609r2=1537610view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Thu 
Oct 31 20:02:30 2013
@@ -647,19 +647,16 @@ sub compare
   foreach my $key (keys %$json_matches) {
 my $regex_expected_value = $json_matches-{$key};
 my $path = JSON::Path-new($key);
-my $value; 
-# when filter_job_status is defined 
-if (defined $testCmd-{'filter_job_status'}) {
-   # decode $testResult-{'body'} to an array of hash
-   my $body = decode_json $testResult-{'body'};
-   # in the tests, we run this case with jobName = 
PigLatin:loadstore.pig
-   # filter $body to leave only records with this jobName
-   my @filtered_body = grep {($_-{detail}{profile}{jobName} eq 
PigLatin:loadstore.pig)}  @$body;
-   my @sorted_filtered_body = sort { $a-{id} cmp $b-{id} 
} @filtered_body;
-   $value = $path-value(\@sorted_filtered_body);
+
+# decode $testResult-{'body'} to an array of hash
+my $body = decode_json $testResult-{'body'};
+my @sorted_body;
+if (ref @$body[0] eq 'HASH') {
+  @sorted_body = sort { $a-{id} cmp $b-{id} } @$body;
 } else {
-   $value = $path-value($testResult-{'body'});
+  @sorted_body = sort { $a cmp $b } @$body;
 }
+my $value = $path-value(\@sorted_body);
 
 if ($value !~ /$regex_expected_value/s) {
   print $log $0::$subName INFO check failed:

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf?rev=1537610r1=1537609r2=1537610view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf Thu Oct 31 
20:02:30 2013
@@ -89,7 +89,6 @@ $cfg = 
  'json_path' = {'$[-1:].detail.status.username' = ':UNAME_OTHER:', 
'$[-2:].detail.status.username' = ':UNAME_OTHER:', 
'$[-3:].detail.status.username' = ':UNAME:',
  '$[-4:].detail.status.username' = ':UNAME:', 
'$[-5:].detail.status.username' = ':UNAME_OTHER:', 
'$[-6:].detail.status.username' = ':UNAME_OTHER:'},
  'status_code' = 200,
- 'filter_job_status' = 1, 
 },
 {
  # GET jobs?user.name=UNAME_OTHERfields=*, should get only jobs launched 
as UNAME_OTHER
@@ -134,11 +133,11 @@ $cfg = 
  '$[-1:].detail.profile.jobId' = 'job_.*',
  '$[-1:].detail.id' = 'job_.*',
  '$[-1:].detail.parentId' = 'job_.*',
- '$[-1:].detail.percentComplete' = '100%',
- '$[-1:].detail.exitValue' = '0',
- '$[-1:].detail.user' = ':UNAME_OTHER:',
- '$[-1:].detail.callback' = '^.+$',
- '$[-1:].detail.completed' = 'done',
+ '$[-2:].detail.exitValue' = '0',
+ '$[-2:].detail.user' = ':UNAME_OTHER:',
+ '$[-2:].detail.callback' = '^.+$',
+ '$[-2:].detail.completed' = 'done',
+ '$[-2:].detail.percentComplete' = '100%',
 },
  'status_code' = 200,
 },

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests

svn commit: r1552393 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

2013-12-19 Thread daijy
Author: daijy
Date: Thu Dec 19 18:38:03 2013
New Revision: 1552393

URL: http://svn.apache.org/r1552393
Log:
HIVE-5540: webhcat e2e test failures: Expect 1 jobs in logs, but get 1

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1552393r1=1552392r2=1552393view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 Thu Dec 19 18:38:03 2013
@@ -241,6 +241,7 @@ public class LaunchMapper extends Mapper
 private OutputStream out;
 private final JobID jobid;
 private final Configuration conf;
+boolean needCloseOutput = false;
 
 public Watcher(Configuration conf, JobID jobid, InputStream in, String 
statusdir, String name)
   throws IOException {
@@ -258,16 +259,18 @@ public class LaunchMapper extends Mapper
 Path p = new Path(statusdir, name);
 FileSystem fs = p.getFileSystem(conf);
 out = fs.create(p);
+needCloseOutput = true;
 LOG.info(templeton: Writing status to  + p);
   }
 }
 
 @Override
 public void run() {
+  PrintWriter writer = null;
   try {
 InputStreamReader isr = new InputStreamReader(in);
 BufferedReader reader = new BufferedReader(isr);
-PrintWriter writer = new PrintWriter(out);
+writer = new PrintWriter(out);
 
 String line;
 while ((line = reader.readLine()) != null) {
@@ -308,6 +311,15 @@ public class LaunchMapper extends Mapper
 }
   } catch (IOException e) {
 LOG.error(templeton: execute error: , e);
+  } finally {
+// Need to close() because in some FileSystem
+// implementations flush() is no-op.
+// Close the file handle if it is a hdfs file.
+// But if it is stderr/stdout, skip it since
+// WebHCat is not supposed to close it
+if (needCloseOutput  writer!=null) {
+  writer.close();
+}
   }
 }
   }




svn commit: r1552815 - in /hive/trunk: hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/ hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/ hcatalog/core/src/test/java/org/apache/hi

2013-12-20 Thread daijy
Author: daijy
Date: Fri Dec 20 21:56:10 2013
New Revision: 1552815

URL: http://svn.apache.org/r1552815
Log:
HIVE-5957: Fix HCatalog Unit tests on Windows

Modified:

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestMultiOutputFormat.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/HcatTestUtils.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatBaseTest.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestMultiOutputFormat.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoader.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatLoaderStorer.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerMulti.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/TestHCatStorerWrapper.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java

hive/trunk/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatStorerMulti.java

hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/SkeletonHBaseTest.java

hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hcatalog/hbase/TestHCatHBaseInputFormat.java

hive/trunk/hcatalog/storage-handlers/hbase/src/test/org/apache/hive/hcatalog/hbase/SkeletonHBaseTest.java

hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hcatalog/api/TestHCatClient.java

hive/trunk/hcatalog/webhcat/java-client/src/test/java/org/apache/hive/hcatalog/api/TestHCatClient.java

hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hcatalog/mapreduce/TestSequenceFileReadWrite.java

hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/TestPigHBaseStorageHandler.java

hive/trunk/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java

Modified: 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java?rev=1552815r1=1552814r2=1552815view=diff
==
--- 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 (original)
+++ 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
 Fri Dec 20 21:56:10 2013
@@ -21,6 +21,7 @@ package org.apache.hive.hcatalog.common;
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
@@ -645,4 +646,10 @@ public class HCatUtil {
   return true;
 return false;
   }
+  /**
+   * Used by various tests to make sure the path is safe for Windows
+   */
+  public static String makePathASafeFileName(String filePath) {
+return new File(filePath).getPath().replaceAll(, /);
+  }
 }

Modified: 
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java?rev=1552815r1=1552814r2=1552815view=diff
==
--- 
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java
 (original)
+++ 
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/HCatBaseTest.java
 Fri Dec 20 21:56:10 2013
@@ -41,8 +41,7 @@ import java.io.IOException;
  */
 public class HCatBaseTest {
   protected static final Logger LOG = 
LoggerFactory.getLogger(HCatBaseTest.class);
-  protected static final String TEST_DATA_DIR = System.getProperty(user.dir) 
+
-  /build/test/data/ + HCatBaseTest.class.getCanonicalName();
+  protected static final String TEST_DATA_DIR = 
org.apache.hive.hcatalog.mapreduce.HCatBaseTest.TEST_DATA_DIR;
   protected static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
/warehouse;
 
   protected HiveConf hiveConf = null;

Modified: 
hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hcatalog/mapreduce/TestHCatPartitionPublish.java?rev=1552815r1=1552814r2

svn commit: r1553607 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

2013-12-26 Thread daijy
Author: daijy
Date: Fri Dec 27 00:07:52 2013
New Revision: 1553607

URL: http://svn.apache.org/r1553607
Log:
HIVE-5702: add logging on WebHCat startup to print all env var and 
Configuration (Eugene Koifman via Daniel Dai)

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1553607r1=1553606r2=1553607view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 Fri Dec 27 00:07:52 2013
@@ -19,7 +19,13 @@
 package org.apache.hive.hcatalog.templeton;
 
 import java.io.File;
+import java.io.StringBufferInputStream;
 import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
 import java.util.Map;
 
 import org.apache.commons.logging.Log;
@@ -141,16 +147,52 @@ public class AppConfig extends Configura
   set(env. + e.getKey(), e.getValue());
 
 String templetonDir = getTempletonDir();
-for (String fname : TEMPLETON_CONF_FILENAMES)
+for (String fname : TEMPLETON_CONF_FILENAMES) {
+  logConfigLoadAttempt(templetonDir + File.separator + fname);
   if (! loadOneClasspathConfig(fname))
 loadOneFileConfig(templetonDir, fname);
-
+}
 String hadoopConfDir = getHadoopConfDir();
-for (String fname : HADOOP_CONF_FILENAMES)
+for (String fname : HADOOP_CONF_FILENAMES) {
+  logConfigLoadAttempt(hadoopConfDir + File.separator + fname);
   loadOneFileConfig(hadoopConfDir, fname);
+}
 ProxyUserSupport.processProxyuserConfig(this);
+LOG.info(dumpEnvironent());
+  }
+  private static void logConfigLoadAttempt(String path) {
+LOG.info(Attempting to load config file:  + path);
   }
 
+  /**
+   * Dumps all env and config state.  Should be called once on WebHCat start 
up to facilitate 
+   * support/debugging.  Later it may be worth adding a REST call which will 
return this data.
+   */
+  private String dumpEnvironent() {
+StringBuilder sb = new StringBuilder(WebHCat environment:\n);
+MapString, String env = System.getenv();
+ListString propKeys = new ArrayListString(env.keySet());
+Collections.sort(propKeys);
+for(String propKey : propKeys) {
+  sb.append(propKey).append('=').append(env.get(propKey)).append('\n');
+}
+sb.append(Configration properties: \n);
+IteratorMap.EntryString, String configIter = this.iterator();
+ListMap.EntryString, String configVals = new 
ArrayListMap.EntryString, String();
+while(configIter.hasNext()) {
+  configVals.add(configIter.next());
+}
+Collections.sort(configVals, new ComparatorMap.EntryString, String () {
+  @Override
+  public int compare(Map.EntryString, String ent, Map.EntryString, 
String ent2) {
+return ent.getKey().compareTo(ent2.getKey());
+  }
+});
+for(Map.EntryString, String entry : configVals) {
+  
sb.append(entry.getKey()).append('=').append(entry.getValue()).append('\n');
+}
+return sb.toString();
+  }
   public void startCleanup() {
 JobState.getStorageInstance(this).startCleanup(this);
   }
@@ -182,7 +224,7 @@ public class AppConfig extends Configura
 URL x = getResource(fname);
 if (x != null) {
   addResource(x);
-  LOG.debug(loaded config from classpath  + x);
+  LOG.info(loaded config from classpath  + x);
   return true;
 }
 




svn commit: r1558700 - in /hive/trunk/hcatalog/src/test/e2e/templeton: drivers/TestDriverCurl.pm tests/jobstatus.conf

2014-01-15 Thread daijy
Author: daijy
Date: Thu Jan 16 05:58:23 2014
New Revision: 1558700

URL: http://svn.apache.org/r1558700
Log:
HIVE-6211: WebHCat job status E2E tests fail in presence of other jobs

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1558700r1=1558699r2=1558700view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Thu 
Jan 16 05:58:23 2014
@@ -650,13 +650,22 @@ sub compare
 
 # decode $testResult-{'body'} to an array of hash
 my $body = decode_json $testResult-{'body'};
-my @sorted_body;
+my @filtered_body;
+if (defined $testCmd-{'filter_job_names'}) {
+  foreach my $filter (@{$testCmd-{'filter_job_names'}}) {
+my @filtered_body_tmp = grep { $_-{detail}{profile}{jobName} eq 
$filter } @$body;
+@filtered_body = (@filtered_body, @filtered_body_tmp);
+  }
+} else {
+  @filtered_body = @$body;
+}
+my @sorted_filtered_body;
 if (ref @$body[0] eq 'HASH') {
-  @sorted_body = sort { $a-{id} cmp $b-{id} } @$body;
+  @sorted_filtered_body = sort { $a-{id} cmp $b-{id} } 
@filtered_body;
 } else {
-  @sorted_body = sort { $a cmp $b } @$body;
+  @sorted_filtered_body = sort { $a cmp $b } @filtered_body;
 }
-my $value = $path-value(\@sorted_body);
+my $value = $path-value(\@sorted_filtered_body);
 
 if ($value !~ /$regex_expected_value/s) {
   print $log $0::$subName INFO check failed:

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf?rev=1558700r1=1558699r2=1558700view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf Thu Jan 16 
05:58:23 2014
@@ -86,6 +86,7 @@ $cfg = 
  'method' = 'GET',
  'url' = 
':TEMPLETON_URL:/templeton/v1/jobs?user.name=:UNAME_OTHER:showall=truefields=*',
  'format_header' = 'Content-Type: application/json',
+ 'filter_job_names' = ['TempletonControllerJob', 
'PigLatin:loadstore.pig'],
  'json_path' = {'$[-1:].detail.status.username' = ':UNAME_OTHER:', 
'$[-2:].detail.status.username' = ':UNAME_OTHER:', 
'$[-3:].detail.status.username' = ':UNAME:',
  '$[-4:].detail.status.username' = ':UNAME:', 
'$[-5:].detail.status.username' = ':UNAME_OTHER:', 
'$[-6:].detail.status.username' = ':UNAME_OTHER:'},
  'status_code' = 200,
@@ -97,6 +98,7 @@ $cfg = 
  'method' = 'GET',
  'url' = 
':TEMPLETON_URL:/templeton/v1/jobs?user.name=:UNAME_OTHER:fields=*',
  'format_header' = 'Content-Type: application/json',
+ 'filter_job_names' = ['TempletonControllerJob', 
'PigLatin:loadstore.pig'],
  'json_path' = {'$[-1:].detail.status.username' = ':UNAME_OTHER:', 
'$[-2:].detail.status.username' = ':UNAME_OTHER:', 
'$[-3:].detail.status.username' = ':UNAME_OTHER:',
  '$[-4:].detail.status.username' = ':UNAME_OTHER:'},
  'status_code' = 200,
@@ -118,6 +120,7 @@ $cfg = 
  'method' = 'GET',
  'url' = 
':TEMPLETON_URL:/templeton/v1/jobs?user.name=:UNAME_OTHER:fields=*',
  'format_header' = 'Content-Type: application/json',
+ 'filter_job_names' = ['PigLatin:loadstore.pig'],
  'json_path' = {'$[-1:].id' = 'job_.*', 
  '$[-1:].detail.status.jobId' = 'job_.*',
  '$[-1:].detail.status.runState' = '\\d+',




svn commit: r1559632 - /hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf

2014-01-19 Thread daijy
Author: daijy
Date: Mon Jan 20 07:03:13 2014
New Revision: 1559632

URL: http://svn.apache.org/r1559632
Log:
HIVE-6227: WebHCat E2E test JOBS_7 fails (Deepesh Khandelwal via Daniel Dai)

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf?rev=1559632r1=1559631r2=1559632view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf Mon Jan 20 
07:03:13 2014
@@ -120,7 +120,7 @@ $cfg = 
  'method' = 'GET',
  'url' = 
':TEMPLETON_URL:/templeton/v1/jobs?user.name=:UNAME_OTHER:fields=*',
  'format_header' = 'Content-Type: application/json',
- 'filter_job_names' = ['PigLatin:loadstore.pig'],
+ 'filter_job_names' = ['TempletonControllerJob', 
'PigLatin:loadstore.pig'],
  'json_path' = {'$[-1:].id' = 'job_.*', 
  '$[-1:].detail.status.jobId' = 'job_.*',
  '$[-1:].detail.status.runState' = '\\d+',




svn commit: r1497858 - in /hive/trunk: RELEASE_NOTES.txt hcatalog/build-support/ant/test.xml hcatalog/build.properties hcatalog/build.xml hcatalog/storage-handlers/hbase/build.xml

2013-06-28 Thread daijy
Author: daijy
Date: Fri Jun 28 17:42:50 2013
New Revision: 1497858

URL: http://svn.apache.org/r1497858
Log:
HIVE-4784:ant testreport doesn't include any HCatalog tests

Modified:
hive/trunk/RELEASE_NOTES.txt
hive/trunk/hcatalog/build-support/ant/test.xml
hive/trunk/hcatalog/build.properties
hive/trunk/hcatalog/build.xml
hive/trunk/hcatalog/storage-handlers/hbase/build.xml

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1497858r1=1497857r2=1497858view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Fri Jun 28 17:42:50 2013
@@ -268,6 +268,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4500] - HS2 holding too many file handles of 
hive_job_log_hive_*.txt files
 * [HIVE-4505] - Hive can't load transforms added using 'ADD FILE'
 * [HIVE-4527] - Fix eclipse project template
+* [HIVE-4784] - ant testreport doesn't include any HCatalog tests
 
 ** Improvement
 * [HIVE-581] - improve group by syntax

Modified: hive/trunk/hcatalog/build-support/ant/test.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support/ant/test.xml?rev=1497858r1=1497857r2=1497858view=diff
==
--- hive/trunk/hcatalog/build-support/ant/test.xml (original)
+++ hive/trunk/hcatalog/build-support/ant/test.xml Fri Jun 28 17:42:50 2013
@@ -62,6 +62,12 @@
   enable/
 /assertions
   /junit
+  copy todir=${test.result.dir}
+!--make sure hive's 'ant testreport' includes them--
+fileset dir=${test.logs}
+  include name=**/TEST-*.xml/
+/fileset
+  /copy
   fail if=tests.failedTests failed!/fail
 /sequential
   /macrodef

Modified: hive/trunk/hcatalog/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/build.properties?rev=1497858r1=1497857r2=1497858view=diff
==
--- hive/trunk/hcatalog/build.properties (original)
+++ hive/trunk/hcatalog/build.properties Fri Jun 28 17:42:50 2013
@@ -39,7 +39,7 @@ test.timeout=270
 test.warehouse.dir=${test.dir}/hcat_junit_warehouse
 mvnrepo=http://repo2.maven.org/maven2
 test.src.dir=${basedir}/src/test
-test.junit.output.format=plain
+test.junit.output.format=xml
 test.output=no
 test.excludes=e2e/**
 clover.jar=${clover.home}/lib/clover.jar

Modified: hive/trunk/hcatalog/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/build.xml?rev=1497858r1=1497857r2=1497858view=diff
==
--- hive/trunk/hcatalog/build.xml (original)
+++ hive/trunk/hcatalog/build.xml Fri Jun 28 17:42:50 2013
@@ -26,7 +26,8 @@
   !-- import file=../build-common.xml/ --
 
 property name=path.to.basedir location=${basedir}/
-
+property name=test.result.dir location=${build.dir.hive}/hcatalog/test
+  description=location to place TEST-*.xml files so that hive's 
testreport includes them/
 loadproperties srcfile=${basedir}/build.properties/
 
 !--
@@ -91,16 +92,29 @@
 target name=gen-test description=Generate tests, a no-op for hcat/
 
 target name=test depends=jar description=run unit tests
+mkdir dir=${test.result.dir}/
 !-- Placed in a parallel structure so that the tests keep going
  even if some fail.  Otherwise a failure in one of the earlier ant
  call terminates the target and the rest do not run.  --
 parallel threadCount=1
-ant target=test dir=core inheritAll=false/
-ant target=test dir=hcatalog-pig-adapter inheritAll=false/
-ant target=test dir=server-extensions inheritAll=false/
-ant target=test dir=webhcat/svr inheritAll=false/
-ant target=test dir=webhcat/java-client inheritAll=false/
-ant target=test dir=storage-handlers/hbase 
inheritAll=false/
+ant target=test dir=core inheritAll=false
+property name=test.result.dir 
location=${test.result.dir}/
+/ant
+ant target=test dir=hcatalog-pig-adapter inheritAll=false
+property name=test.result.dir 
location=${test.result.dir}/
+/ant
+ant target=test dir=server-extensions inheritAll=false
+property name=test.result.dir 
location=${test.result.dir}/
+/ant
+ant target=test dir=webhcat/svr inheritAll=false
+property name=test.result.dir 
location=${test.result.dir}/
+/ant
+ant target=test dir=webhcat/java-client inheritAll=false
+property name=test.result.dir 
location=${test.result.dir}/
+/ant
+ant target=test dir=storage-handlers/hbase inheritAll=false
+property name

svn commit: r1497859 - in /hive/trunk: RELEASE_NOTES.txt hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh

2013-06-28 Thread daijy
Author: daijy
Date: Fri Jun 28 17:45:44 2013
New Revision: 1497859

URL: http://svn.apache.org/r1497859
Log:
HIVE-4591: Making changes to webhcat-site.xml have no effect

Modified:
hive/trunk/RELEASE_NOTES.txt
hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1497859r1=1497858r2=1497859view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Fri Jun 28 17:45:44 2013
@@ -269,6 +269,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4505] - Hive can't load transforms added using 'ADD FILE'
 * [HIVE-4527] - Fix eclipse project template
 * [HIVE-4784] - ant testreport doesn't include any HCatalog tests
+* [HIVE-4591] - Making changes to webhcat-site.xml have no effect
 
 ** Improvement
 * [HIVE-581] - improve group by syntax

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh?rev=1497859r1=1497858r2=1497859view=diff
==
--- hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh (original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh Fri Jun 28 
17:45:44 2013
@@ -46,7 +46,7 @@ SLEEP_TIME_AFTER_START=10
 #
 
 #These parameters can be overriden by webhcat-env.sh
-# the root of the WEBHCAT installation
+# the root of the WEBHCAT installation  ('this' is defined in 
webhcat_server.sh)
 export WEBHCAT_PREFIX=`dirname $this`/..
 
 #check to see if the conf dir is given as an optional argument
@@ -62,9 +62,9 @@ then
 fi
 
 # Allow alternate conf dir location.
-if [ -e ${WEBHCAT_PREFIX}/etc/webhcat/webhcat-env.sh ]; then
+if [ -e ${WEBHCAT_PREFIX}/etc/webhcat/webhcat-env.sh -o -e 
${WEBHCAT_PREFIX}/etc/webhcat/webhcat-site.xml ]; then
   DEFAULT_CONF_DIR=${WEBHCAT_PREFIX}/etc/webhcat
-elif [ -e ${WEBHCAT_PREFIX}/conf/webhcat-env.sh ]; then
+elif [ -e ${WEBHCAT_PREFIX}/conf/webhcat-env.sh -o -e 
${WEBHCAT_PREFIX}/etc/webhcat/webhcat-site.xml ]; then
   DEFAULT_CONF_DIR=${WEBHCAT_PREFIX}/conf
 else
   DEFAULT_CONF_DIR=/etc/webhcat




svn commit: r1503940 - in /hive/trunk: ./ hcatalog/ hcatalog/bin/ hcatalog/build-support/ant/ hcatalog/webhcat/svr/src/main/bin/ hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/

2013-07-16 Thread daijy
Author: daijy
Date: Tue Jul 16 23:03:19 2013
New Revision: 1503940

URL: http://svn.apache.org/r1503940
Log:
HIVE-4820 : webhcat_config.sh should set default values for HIVE_HOME and 
HCAT_PREFIX that work with default build tree structure (Eugene Koifman via 
Jianyong Dai)

Modified:
hive/trunk/RELEASE_NOTES.txt
hive/trunk/hcatalog/bin/hcat
hive/trunk/hcatalog/build-support/ant/deploy.xml
hive/trunk/hcatalog/build-support/ant/test.xml
hive/trunk/hcatalog/build.xml
hive/trunk/hcatalog/webhcat/svr/src/main/bin/webhcat_config.sh

hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1503940r1=1503939r2=1503940view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Tue Jul 16 23:03:19 2013
@@ -15,6 +15,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4326] - Clean up remaining items in hive/hcatalog/historical/trunk
 
 ** Bug
+* [HIVE-4820] - webhcat_config.sh should set default values for HIVE_HOME 
and HCAT_PREFIX that work with default build tree structure
 * [HIVE-2264] - Hive server is SHUTTING DOWN when invalid queries beeing 
executed.
 * [HIVE-2332] - If all of the parameters of distinct functions are exists 
in group by columns, query fails in runtime
 * [HIVE-2689] - ObjectInspectorConverters cannot convert Void types to 
Array/Map/Struct types.

Modified: hive/trunk/hcatalog/bin/hcat
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/bin/hcat?rev=1503940r1=1503939r2=1503940view=diff
==
--- hive/trunk/hcatalog/bin/hcat (original)
+++ hive/trunk/hcatalog/bin/hcat Tue Jul 16 23:03:19 2013
@@ -34,8 +34,14 @@ done
 bin=`dirname $this`  
  
 script=`basename $this`  
  
 bin=`unset CDPATH; cd $bin; pwd` 
  
-this=$bin/$script
  
-
+this=$bin/$script
+#to preserve value of 'this' since any other file that defines 'this' and is 
sourced
+#here (e.g. hcat-config.sh) will overwrite it
+this_hcat=$this
+
+function echoerr() {
+echo ${this_hcat}: $@ 12
+}
 
 if [ -e $bin/../libexec/hcat-config.sh ]; then
   . $bin/../libexec/hcat-config.sh
@@ -60,7 +66,8 @@ done
 
 # check for hive in the path
 HIVE_IN_PATH=`which hive 2/dev/null`
-if [ -f ${HIVE_IN_PATH} ]; then
+# looks like [ -f '' ] is true...
+if [ -n ${HIVE_IN_PATH} ]; then
   #dir of hive scrip
   HIVE_DIR=`dirname $HIVE_IN_PATH`
   #one level up for base dir
@@ -70,8 +77,14 @@ fi
 # HIVE_HOME env variable overrides hive in the path
 HIVE_HOME=${HIVE_HOME:-$HIVE_DIR}
 
+#if hive is not in path and not set by env, set it to default in build tree
+if [ -n ${HIVE_HOME} ]; then
+  HIVE_HOME=${bin}/../..
+  echoerr HIVE_HOME is not defined; assuming ${HIVE_HOME};
+fi
+
 if [ $HIVE_HOME ==  ]; then
-  echo Cannot find hive installation: \$HIVE_HOME must be set or hive must be 
in the path;
+  echo ${this_hcat}: Cannot find hive installation: \$HIVE_HOME must be set 
or hive must be in the path;
   exit 4;
 fi
 
@@ -87,13 +100,13 @@ fi
 
 HIVE_LIB_DIR=${HIVE_HOME}/lib
 if [ ! -d $HIVE_LIB_DIR ]; then
-  echo Cannot find lib dir within HIVE_HOME : $HIVE_LIB_DIR;
+  echo ${this_hcat}: Cannot find lib dir within HIVE_HOME : $HIVE_LIB_DIR;
   exit 4;
 fi
 
 HIVE_CONF_DIR=${HIVE_CONF_DIR:-$HIVE_HOME/conf}
 if [ ! -d $HIVE_CONF_DIR ]; then
-  echo Cannot find conf dir within HIVE_HOME : $HIVE_CONF_DIR;
+  echo ${this_hcat}: Cannot find conf dir within HIVE_HOME : $HIVE_CONF_DIR;
   exit 4;
 fi
 

Modified: hive/trunk/hcatalog/build-support/ant/deploy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support/ant/deploy.xml?rev=1503940r1=1503939r2=1503940view=diff
==
--- hive/trunk/hcatalog/build-support/ant/deploy.xml (original)
+++ hive/trunk/hcatalog/build-support/ant/deploy.xml Tue Jul 16 23:03:19 2013
@@ -69,7 +69,7 @@
   _mvnpublish module=testutils /
 /target
 
-target name=mvn-init unless=mvn-init.complete
+target name=mvn-init unless=mvn-init.complete description=Get Maven 
Ant Tasts jar and deploy all Hive jars to local Maven repo
 echo message=${ant.project.name}/
 get 
src=${mvnrepo}/org/apache/maven/maven-ant-tasks/${maven-ant-tasks.version}/maven-ant-tasks-${maven-ant-tasks.version}.jar
  
dest=${path.to.basedir}/build/maven-ant-tasks-${maven-ant-tasks.version}.jar

Modified: hive/trunk/hcatalog/build-support/ant/test.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/build-support

svn commit: r1505894 - in /hive/trunk/hcatalog: shims/src/20/java/org/apache/hadoop/mapred/ webhcat/svr/src/main/java/org/apache/hcatalog/templeton/

2013-07-22 Thread daijy
Author: daijy
Date: Tue Jul 23 04:47:05 2013
New Revision: 1505894

URL: http://svn.apache.org/r1505894
Log:
HIVE-4677 WebHCat e2e tests fail on Hadoop 2 (Fix checkstyle failure)

Modified:

hive/trunk/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/TempletonDelegator.java

Modified: 
hive/trunk/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java?rev=1505894r1=1505893r2=1505894view=diff
==
--- 
hive/trunk/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java
 (original)
+++ 
hive/trunk/hcatalog/shims/src/20/java/org/apache/hadoop/mapred/TempletonJobTracker.java
 Tue Jul 23 04:47:05 2013
@@ -19,7 +19,6 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.net.InetSocketAddress;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.ipc.RPC;

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java?rev=1505894r1=1505893r2=1505894view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
 Tue Jul 23 04:47:05 2013
@@ -22,7 +22,6 @@ import java.io.IOException;
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.hcatalog.templeton.tool.JobState;
 
 /**

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java?rev=1505894r1=1505893r2=1505894view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
 Tue Jul 23 04:47:05 2013
@@ -25,7 +25,6 @@ import java.util.ArrayList;
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.hcatalog.templeton.tool.JobState;
 
 /**

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java?rev=1505894r1=1505893r2=1505894view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java
 Tue Jul 23 04:47:05 2013
@@ -25,7 +25,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.mapred.JobID;
 import org.apache.hadoop.mapred.JobProfile;
 import org.apache.hadoop.mapred.JobStatus;
-import org.apache.hcatalog.shims.HCatHadoopShims;
 import org.apache.hadoop.mapred.TempletonJobTracker;
 import org.apache.hcatalog.templeton.tool.JobState;
 

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/TempletonDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/TempletonDelegator.java?rev=1505894r1=1505893r2=1505894view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/TempletonDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/TempletonDelegator.java
 Tue Jul 23 04:47:05 2013
@@ -18,10 +18,6 @@
  */
 package org.apache.hcatalog.templeton;
 
-import java.net.InetSocketAddress;
-
-import org.apache.hadoop.conf.Configuration

svn commit: r1505895 - /hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java

2013-07-22 Thread daijy
Author: daijy
Date: Tue Jul 23 04:49:50 2013
New Revision: 1505895

URL: http://svn.apache.org/r1505895
Log:
HIVE-4677 WebHCat e2e tests fail on Hadoop 2 (Fix checkstyle failure)

Modified:

hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java

Modified: 
hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java?rev=1505895r1=1505894r2=1505895view=diff
==
--- 
hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java
 (original)
+++ 
hive/trunk/hcatalog/shims/src/23/java/org/apache/hadoop/mapred/TempletonJobTracker.java
 Tue Jul 23 04:49:50 2013
@@ -19,8 +19,6 @@
 package org.apache.hadoop.mapred;
 
 import java.io.IOException;
-import java.net.InetSocketAddress;
-
 import org.apache.hadoop.conf.Configuration;
 
 /*




svn commit: r1519818 - in /hive/trunk: RELEASE_NOTES.txt hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java hcatalog/webhcat/svr/src/test/java/org/apache/hca

2013-09-03 Thread daijy
Author: daijy
Date: Tue Sep  3 19:56:01 2013
New Revision: 1519818

URL: http://svn.apache.org/r1519818
Log:
[HIVE-4586] - WebHCat should return 404 error for undefined resource (Daniel 
Dai)

Modified:
hive/trunk/RELEASE_NOTES.txt

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java

hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1519818r1=1519817r2=1519818view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Tue Sep  3 19:56:01 2013
@@ -15,6 +15,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4326] - Clean up remaining items in hive/hcatalog/historical/trunk
 
 ** Bug
+* [HIVE-4586] - WebHCat should return 404 error for undefined resource
 * [HIVE-4820] - webhcat_config.sh should set default values for HIVE_HOME 
and HCAT_PREFIX that work with default build tree structure
 * [HIVE-2264] - Hive server is SHUTTING DOWN when invalid queries beeing 
executed.
 * [HIVE-2332] - If all of the parameters of distinct functions are exists 
in group by columns, query fails in runtime

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java?rev=1519818r1=1519817r2=1519818view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/CatchallExceptionMapper.java
 Tue Sep  3 19:56:01 2013
@@ -26,6 +26,8 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.eclipse.jetty.http.HttpStatus;
 
+import com.sun.jersey.api.NotFoundException;
+
 /**
  * Map all exceptions to the Jersey response.  This lets us have nice
  * results in the error body.
@@ -37,6 +39,9 @@ public class CatchallExceptionMapper
 
 public Response toResponse(Exception e) {
 LOG.error(e.getMessage(), e);
+if (e instanceof NotFoundException) {
+return SimpleWebException.buildMessage(HttpStatus.NOT_FOUND_404, 
null, e.getMessage());
+}
 return 
SimpleWebException.buildMessage(HttpStatus.INTERNAL_SERVER_ERROR_500, null, 
e.getMessage());
 }
 }

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java?rev=1519818r1=1519817r2=1519818view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/TestWebHCatE2e.java
 Tue Sep  3 19:56:01 2013
@@ -102,7 +102,7 @@ public class TestWebHCatE2e {
 @Test
 public void invalidPath() throws IOException {
 MethodCallRetVal p = doHttpCall(templetonBaseUrl + 
/no_such_mapping/database, HTTP_METHOD_TYPE.GET);
-Assert.assertEquals(p.getAssertMsg(), 
HttpStatus.INTERNAL_SERVER_ERROR_500, p.httpStatusCode);
+Assert.assertEquals(p.getAssertMsg(), HttpStatus.NOT_FOUND_404, 
p.httpStatusCode);
 }
 /**
  * tries to drop table in a DB that doesn't exist




svn commit: r1519881 - in /hive/trunk: ./ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/ hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/

2013-09-03 Thread daijy
Author: daijy
Date: Tue Sep  3 22:56:58 2013
New Revision: 1519881

URL: http://svn.apache.org/r1519881
Log:
WebHCat does not honor user home directory (Daniel Dai)

Modified:
hive/trunk/RELEASE_NOTES.txt

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java

hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1519881r1=1519880r2=1519881view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Tue Sep  3 22:56:58 2013
@@ -15,6 +15,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4326] - Clean up remaining items in hive/hcatalog/historical/trunk
 
 ** Bug
+* [HIVE-4441] - WebHCat does not honor user home directory
 * [HIVE-4442] - WebHCat should not override user.name parameter for Queue 
call
 * [HIVE-4586] - WebHCat should return 404 error for undefined resource
 * [HIVE-4820] - webhcat_config.sh should set default values for HIVE_HOME 
and HCAT_PREFIX that work with default build tree structure

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java?rev=1519881r1=1519880r2=1519881view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonControllerJob.java
 Tue Sep  3 22:56:58 2013
@@ -154,6 +154,10 @@ public class TempletonControllerJob exte
 
 String statusdir = conf.get(STATUSDIR_NAME);
 
+if (statusdir != null) {
+statusdir = 
TempletonUtils.addUserHomeDirectoryIfApplicable(statusdir, 
conf.get(user.name), conf);
+}
+
 ExecutorService pool = Executors.newCachedThreadPool();
 executeWatcher(pool, conf, context.getJobID(),
 proc.getInputStream(), statusdir, STDOUT_FNAME);

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java?rev=1519881r1=1519880r2=1519881view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/tool/TempletonUtils.java
 Tue Sep  3 22:56:58 2013
@@ -36,6 +36,7 @@ import java.util.regex.Pattern;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hcatalog.templeton.UgiFactory;
@@ -210,6 +211,23 @@ public class TempletonUtils {
 return false;
 }
 }
+
+public static String addUserHomeDirectoryIfApplicable(String origPathStr, 
String user, Configuration conf) throws IOException {
+Path path = new Path(origPathStr);
+String result = origPathStr;
+
+// shortcut for s3/asv
+// If path contains scheme, user should mean an absolute path,
+// However, path.isAbsolute tell us otherwise.
+// So we skip conversion for non-hdfs.
+if (!(path.getFileSystem(conf) instanceof DistributedFileSystem)) {
+return result;
+}
+if (!path.isAbsolute()) {
+result = /user/ + user + / + origPathStr;
+}
+return result;
+}
 
 public static Path hadoopFsPath(final String fname, final Configuration 
conf, String user)
 throws URISyntaxException, IOException,
@@ -227,6 +245,7 @@ public class TempletonUtils {
 }
 });
 
+fname = addUserHomeDirectoryIfApplicable(fname, user, conf);
 URI u = new URI(fname);
 Path p = new Path(u).makeQualified(defaultFs);
 

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/test/java/org/apache/hcatalog/templeton/tool/TestTempletonUtils.java?rev=1519881r1=1519880r2=1519881view=diff

svn commit: r1519875 - in /hive/trunk: ./ hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ shims/src/0.20/java/org/apache/hadoop/hive/shims/ shims/src/0.20S/java/org/apache/hadoop/hiv

2013-09-03 Thread daijy
Author: daijy
Date: Tue Sep  3 22:42:48 2013
New Revision: 1519875

URL: http://svn.apache.org/r1519875
Log:
HIVE-4442: WebHCat should not override user.name parameter for Queue call 
(Daniel Dai)

Modified:
hive/trunk/RELEASE_NOTES.txt

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/Server.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/StatusDelegator.java

hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java

hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java

hive/trunk/shims/src/0.20S/java/org/apache/hadoop/mapred/WebHCatJTShim20S.java

hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/mapred/WebHCatJTShim23.java

hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/RELEASE_NOTES.txt?rev=1519875r1=1519874r2=1519875view=diff
==
--- hive/trunk/RELEASE_NOTES.txt (original)
+++ hive/trunk/RELEASE_NOTES.txt Tue Sep  3 22:42:48 2013
@@ -15,6 +15,7 @@ Release Notes - Hive - Version 0.11.0
 * [HIVE-4326] - Clean up remaining items in hive/hcatalog/historical/trunk
 
 ** Bug
+* [HIVE-4442] - WebHCat should not override user.name parameter for Queue 
call
 * [HIVE-4586] - WebHCat should return 404 error for undefined resource
 * [HIVE-4820] - webhcat_config.sh should set default values for HIVE_HOME 
and HCAT_PREFIX that work with default build tree structure
 * [HIVE-2264] - Hive server is SHUTTING DOWN when invalid queries beeing 
executed.

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java?rev=1519875r1=1519874r2=1519875view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/DeleteDelegator.java
 Tue Sep  3 22:42:48 2013
@@ -37,11 +37,11 @@ public class DeleteDelegator extends Tem
 public QueueStatusBean run(String user, String id)
 throws NotAuthorizedException, BadParam, IOException, 
InterruptedException
 {
-UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
+UserGroupInformation ugi = UgiFactory.getUgi(user);
 WebHCatJTShim tracker = null;
 JobState state = null;
 try {
-tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
+tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf, ugi);
 JobID jobid = StatusDelegator.StringToJobID(id);
 if (jobid == null)
 throw new BadParam(Invalid jobid:  + id);

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java?rev=1519875r1=1519874r2=1519875view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hcatalog/templeton/ListDelegator.java
 Tue Sep  3 22:42:48 2013
@@ -36,13 +36,13 @@ public class ListDelegator extends Templ
 super(appConf);
 }
 
-public ListString run(String user)
+public ListString run(String user, boolean showall)
 throws NotAuthorizedException, BadParam, IOException, 
InterruptedException {
 
-UserGroupInformation ugi = UserGroupInformation.createRemoteUser(user);
+UserGroupInformation ugi = UgiFactory.getUgi(user);
 WebHCatJTShim tracker = null;
 try {
-tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf);
+tracker = ShimLoader.getHadoopShims().getWebHCatShim(appConf, ugi);
 
 ArrayListString ids = new ArrayListString();
 
@@ -54,7 +54,7 @@ public class ListDelegator extends Templ
 try {
 String id = job.getJobID().toString();
 state = new JobState(id, Main.getAppConfigInstance());
-if (user.equals(state.getUser()))
+if (showall || user.equals

svn commit: r1669422 - /hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/

2015-03-26 Thread daijy
Author: daijy
Date: Thu Mar 26 19:49:57 2015
New Revision: 1669422

URL: http://svn.apache.org/r1669422
Log:
HIVE-9766: Add JavaConstantXXXObjectInspector

Added:

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBinaryObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantByteObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDateObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantDoubleObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantFloatObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantHiveCharObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantHiveDecimalObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantHiveVarcharObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantIntObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantLongObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantShortObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantStringObjectInspector.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantTimestampObjectInspector.java

Added: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBinaryObjectInspector.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBinaryObjectInspector.java?rev=1669422view=auto
==
--- 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBinaryObjectInspector.java
 (added)
+++ 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBinaryObjectInspector.java
 Thu Mar 26 19:49:57 2015
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.objectinspector.primitive;
+
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.io.BytesWritable;
+
+public class JavaConstantBinaryObjectInspector extends
+JavaBinaryObjectInspector implements ConstantObjectInspector {
+  private byte[] value;
+
+  public JavaConstantBinaryObjectInspector(byte[] value) {
+super();
+this.value = value;
+  }
+
+  @Override
+  public Object getWritableConstantValue() {
+if (value==null) {
+  return null;
+}
+return new BytesWritable(value);
+  }
+}

Added: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java?rev=1669422view=auto
==
--- 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java
 (added)
+++ 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/JavaConstantBooleanObjectInspector.java
 Thu Mar 26 19:49:57 2015
@@ -0,0 +1,39 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file

hive git commit: HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel Dai

2015-06-08 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 82335bf51 - 77b2c2000


HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel 
Dai


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/77b2c200
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/77b2c200
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/77b2c200

Branch: refs/heads/branch-1.2
Commit: 77b2c20009a3b3ed195f157d00b7dc3e802eafc0
Parents: 82335bf
Author: Daniel Dai da...@hortonworks.com
Authored: Mon Jun 8 13:42:16 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Mon Jun 8 13:42:16 2015 -0700

--
 itests/pom.xml  |   1 +
 itests/qtest-accumulo/pom.xml   | 508 +++
 .../test/java/org/apache/hive/TestDummy.java|  31 ++
 itests/qtest/pom.xml|  41 --
 4 files changed, 540 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/77b2c200/itests/pom.xml
--
diff --git a/itests/pom.xml b/itests/pom.xml
index c505525..69435de 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -38,6 +38,7 @@
moduleutil/module
moduletest-serde/module
moduleqtest/module
+   moduleqtest-accumulo/module
modulehive-jmh/module
   /modules
 

http://git-wip-us.apache.org/repos/asf/hive/blob/77b2c200/itests/qtest-accumulo/pom.xml
--
diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml
new file mode 100644
index 000..3298af3
--- /dev/null
+++ b/itests/qtest-accumulo/pom.xml
@@ -0,0 +1,508 @@
+?xml version=1.0 encoding=UTF-8?
+!--
+  Licensed under the Apache License, Version 2.0 (the License);
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an AS IS BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+--
+project xmlns=http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;
+  modelVersion4.0.0/modelVersion
+
+  parent
+groupIdorg.apache.hive/groupId
+artifactIdhive-it/artifactId
+version1.3.0-SNAPSHOT/version
+relativePath../pom.xml/relativePath
+  /parent
+
+  artifactIdhive-it-qfile-accumulo/artifactId
+  packagingjar/packaging
+  nameHive Integration - QFile Accumulo Tests/name
+
+  properties
+hive.path.to.root../../hive.path.to.root
+
+qfile/qfile
+qfile_regex/qfile_regex
+run_disabledfalse/run_disabled
+clustermode/clustermode
+!-- Profile activation clause for accumulo-tests will flip 
skip.accumulo.tests to false
+ as long as -DskipAccumuloTests is not specified --
+skip.accumulo.teststrue/skip.accumulo.tests
+accumulo-thrift.version0.9.0/accumulo-thrift.version
+  /properties
+
+  dependencies
+!-- dependencies are always listed in sorted order by groupId, artifectId 
--
+!-- test intra-project --
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-ant/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-common/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-contrib/artifactId
+  version${project.version}/version
+  scopetest/scope
+  exclusions
+exclusion
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-exec/artifactId
+/exclusion
+  /exclusions
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  classifiertests/classifier
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-custom-serde/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-util/artifactId
+  

hive git commit: HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel Dai

2015-06-08 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master b099e5b9c - a80210434


HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel 
Dai


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a8021043
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a8021043
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a8021043

Branch: refs/heads/master
Commit: a80210434df182e410a851c7aa7d8cd5f1de6825
Parents: b099e5b
Author: Daniel Dai da...@hortonworks.com
Authored: Mon Jun 8 13:40:56 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Mon Jun 8 13:40:56 2015 -0700

--
 itests/pom.xml  |   1 +
 itests/qtest-accumulo/pom.xml   | 508 +++
 .../test/java/org/apache/hive/TestDummy.java|  31 ++
 itests/qtest/pom.xml|  41 --
 4 files changed, 540 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a8021043/itests/pom.xml
--
diff --git a/itests/pom.xml b/itests/pom.xml
index 8321e00..f156cc4 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -38,6 +38,7 @@
moduleutil/module
moduletest-serde/module
moduleqtest/module
+   moduleqtest-accumulo/module
modulehive-jmh/module
   /modules
 

http://git-wip-us.apache.org/repos/asf/hive/blob/a8021043/itests/qtest-accumulo/pom.xml
--
diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml
new file mode 100644
index 000..3298af3
--- /dev/null
+++ b/itests/qtest-accumulo/pom.xml
@@ -0,0 +1,508 @@
+?xml version=1.0 encoding=UTF-8?
+!--
+  Licensed under the Apache License, Version 2.0 (the License);
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an AS IS BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+--
+project xmlns=http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;
+  modelVersion4.0.0/modelVersion
+
+  parent
+groupIdorg.apache.hive/groupId
+artifactIdhive-it/artifactId
+version1.3.0-SNAPSHOT/version
+relativePath../pom.xml/relativePath
+  /parent
+
+  artifactIdhive-it-qfile-accumulo/artifactId
+  packagingjar/packaging
+  nameHive Integration - QFile Accumulo Tests/name
+
+  properties
+hive.path.to.root../../hive.path.to.root
+
+qfile/qfile
+qfile_regex/qfile_regex
+run_disabledfalse/run_disabled
+clustermode/clustermode
+!-- Profile activation clause for accumulo-tests will flip 
skip.accumulo.tests to false
+ as long as -DskipAccumuloTests is not specified --
+skip.accumulo.teststrue/skip.accumulo.tests
+accumulo-thrift.version0.9.0/accumulo-thrift.version
+  /properties
+
+  dependencies
+!-- dependencies are always listed in sorted order by groupId, artifectId 
--
+!-- test intra-project --
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-ant/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-common/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-contrib/artifactId
+  version${project.version}/version
+  scopetest/scope
+  exclusions
+exclusion
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-exec/artifactId
+/exclusion
+  /exclusions
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  classifiertests/classifier
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-custom-serde/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-util/artifactId
+  

hive git commit: HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel Dai)

2015-06-09 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 0be14eac8 - 658491e1f


HIVE-8931: Test TestAccumuloCliDriver is not completing (Josh Elser via Daniel 
Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/658491e1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/658491e1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/658491e1

Branch: refs/heads/branch-1.2
Commit: 658491e1ff03535d8d676b6cf1a22df827233074
Parents: 0be14ea
Author: Daniel Dai da...@hortonworks.com
Authored: Tue Jun 9 14:53:18 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Tue Jun 9 14:53:18 2015 -0700

--
 itests/qtest-accumulo/pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/658491e1/itests/qtest-accumulo/pom.xml
--
diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml
index 3298af3..3477b5f 100644
--- a/itests/qtest-accumulo/pom.xml
+++ b/itests/qtest-accumulo/pom.xml
@@ -20,7 +20,7 @@
   parent
 groupIdorg.apache.hive/groupId
 artifactIdhive-it/artifactId
-version1.3.0-SNAPSHOT/version
+version1.2.0/version
 relativePath../pom.xml/relativePath
   /parent
 



hive git commit: HIVE-8931: Test TestAccumuloCliDriver is not completing

2015-06-09 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 0e47786f5 - 82244ce67


HIVE-8931: Test TestAccumuloCliDriver is not completing


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/82244ce6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/82244ce6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/82244ce6

Branch: refs/heads/branch-1
Commit: 82244ce6764ac62b6d2c10863d315a523ab168a7
Parents: 0e47786
Author: Daniel Dai da...@hortonworks.com
Authored: Tue Jun 9 20:20:45 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Tue Jun 9 20:20:45 2015 -0700

--
 itests/pom.xml  |   1 +
 itests/qtest-accumulo/pom.xml   | 508 +++
 .../test/java/org/apache/hive/TestDummy.java|  31 ++
 itests/qtest/pom.xml|  41 --
 4 files changed, 540 insertions(+), 41 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/82244ce6/itests/pom.xml
--
diff --git a/itests/pom.xml b/itests/pom.xml
index bf909eb..867fd87 100644
--- a/itests/pom.xml
+++ b/itests/pom.xml
@@ -38,6 +38,7 @@
moduleutil/module
moduletest-serde/module
moduleqtest/module
+   moduleqtest-accumulo/module
modulehive-jmh/module
   /modules
 

http://git-wip-us.apache.org/repos/asf/hive/blob/82244ce6/itests/qtest-accumulo/pom.xml
--
diff --git a/itests/qtest-accumulo/pom.xml b/itests/qtest-accumulo/pom.xml
new file mode 100644
index 000..3298af3
--- /dev/null
+++ b/itests/qtest-accumulo/pom.xml
@@ -0,0 +1,508 @@
+?xml version=1.0 encoding=UTF-8?
+!--
+  Licensed under the Apache License, Version 2.0 (the License);
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an AS IS BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+--
+project xmlns=http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;
+  modelVersion4.0.0/modelVersion
+
+  parent
+groupIdorg.apache.hive/groupId
+artifactIdhive-it/artifactId
+version1.3.0-SNAPSHOT/version
+relativePath../pom.xml/relativePath
+  /parent
+
+  artifactIdhive-it-qfile-accumulo/artifactId
+  packagingjar/packaging
+  nameHive Integration - QFile Accumulo Tests/name
+
+  properties
+hive.path.to.root../../hive.path.to.root
+
+qfile/qfile
+qfile_regex/qfile_regex
+run_disabledfalse/run_disabled
+clustermode/clustermode
+!-- Profile activation clause for accumulo-tests will flip 
skip.accumulo.tests to false
+ as long as -DskipAccumuloTests is not specified --
+skip.accumulo.teststrue/skip.accumulo.tests
+accumulo-thrift.version0.9.0/accumulo-thrift.version
+  /properties
+
+  dependencies
+!-- dependencies are always listed in sorted order by groupId, artifectId 
--
+!-- test intra-project --
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-ant/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-common/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-contrib/artifactId
+  version${project.version}/version
+  scopetest/scope
+  exclusions
+exclusion
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-exec/artifactId
+/exclusion
+  /exclusions
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-metastore/artifactId
+  version${project.version}/version
+  classifiertests/classifier
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-custom-serde/artifactId
+  version${project.version}/version
+  scopetest/scope
+/dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-it-util/artifactId
+  version${project.version}/version
+  

[1/3] hive git commit: HIVE-10289: Support filter on non-first partition key and non-string partition key (Daniel Dai reviewed by Alan Gates)

2015-08-24 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/hbase-metastore 9d9dd72a0 - 5e16d53e9


http://git-wip-us.apache.org/repos/asf/hive/blob/5e16d53e/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseFilterPlanUtil.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseFilterPlanUtil.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseFilterPlanUtil.java
index ec99685..9762309 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseFilterPlanUtil.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseFilterPlanUtil.java
@@ -20,15 +20,30 @@ package org.apache.hadoop.hive.metastore.hbase;
 
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
 import java.util.IdentityHashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.hbase.filter.CompareFilter;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.RowFilter;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeNode;
 import org.apache.hadoop.hive.metastore.parser.ExpressionTree.TreeVisitor;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
@@ -147,7 +162,7 @@ class HBaseFilterPlanUtil {
   public static class ScanPlan extends FilterPlan {
 
 public static class ScanMarker {
-  final byte[] bytes;
+  final String value;
   /**
* If inclusive = true, it means that the
* marker includes those bytes.
@@ -155,20 +170,24 @@ class HBaseFilterPlanUtil {
* or ends at the next possible byte array
*/
   final boolean isInclusive;
-  ScanMarker(byte [] b, boolean i){
-this.bytes = b;
+  final String type;
+  ScanMarker(String obj, boolean i, String type){
+this.value = obj;
 this.isInclusive = i;
+this.type = type;
   }
   @Override
   public String toString() {
-return ScanMarker [bytes= + Arrays.toString(bytes) + , 
isInclusive= + isInclusive + ];
+return ScanMarker [ + value= + value.toString() + , isInclusive= 
+ isInclusive +
+, type= + type + ];
   }
   @Override
   public int hashCode() {
 final int prime = 31;
 int result = 1;
-result = prime * result + Arrays.hashCode(bytes);
+result = prime * result + value.hashCode();
 result = prime * result + (isInclusive ? 1231 : 1237);
+result = prime * result + type.hashCode();
 return result;
   }
   @Override
@@ -180,48 +199,118 @@ class HBaseFilterPlanUtil {
 if (getClass() != obj.getClass())
   return false;
 ScanMarker other = (ScanMarker) obj;
-if (!Arrays.equals(bytes, other.bytes))
+if (!value.equals(other.value))
   return false;
 if (isInclusive != other.isInclusive)
   return false;
+if (type != other.type)
+  return false;
 return true;
   }
 }
-// represent Scan start
-private ScanMarker startMarker = new ScanMarker(null, false);
-// represent Scan end
-private ScanMarker endMarker = new ScanMarker(null, false);
-
-private ScanFilter filter;
-
-public ScanFilter getFilter() {
-  return filter;
+public static class ScanMarkerPair {
+  public ScanMarkerPair(ScanMarker startMarker, ScanMarker endMarker) {
+this.startMarker = startMarker;
+this.endMarker = endMarker;
+  }
+  ScanMarker startMarker;
+  ScanMarker endMarker;
+}
+// represent Scan start, partition key name - scanMarkerPair
+MapString, ScanMarkerPair markers = new HashMapString, 
ScanMarkerPair();
+ListOperator ops = new ArrayListOperator();
+
+// Get the number of partition key prefixes which can be used in the scan 
range.
+// For example, if partition key is (year, month, state)
+// 1. year = 2015 and month = 1 and month  5
+//year + month can 

[3/3] hive git commit: HIVE-10289: Support filter on non-first partition key and non-string partition key (Daniel Dai reviewed by Alan Gates)

2015-08-24 Thread daijy
HIVE-10289: Support filter on non-first partition key and non-string partition 
key (Daniel Dai reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e16d53e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e16d53e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e16d53e

Branch: refs/heads/hbase-metastore
Commit: 5e16d53e98e44567bbfa1b291f8a927a3e3e4b9b
Parents: 9d9dd72
Author: Daniel Dai da...@hortonworks.com
Authored: Mon Aug 24 11:20:55 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Mon Aug 24 11:20:55 2015 -0700

--
 .../metastore/hbase/HbaseMetastoreProto.java| 3732 +-
 .../metastore/hbase/HBaseFilterPlanUtil.java|  341 +-
 .../hive/metastore/hbase/HBaseReadWrite.java|  114 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   11 +-
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |  129 +-
 .../metastore/hbase/PartitionKeyComparator.java |  292 ++
 .../metastore/hbase/hbase_metastore_proto.proto |   25 +
 .../hbase/TestHBaseFilterPlanUtil.java  |  278 +-
 .../BinarySortableSerDeWithEndPrefix.java   |   41 +
 9 files changed, 4702 insertions(+), 261 deletions(-)
--




[2/3] hive git commit: HIVE-10289: Support filter on non-first partition key and non-string partition key (Daniel Dai reviewed by Alan Gates)

2015-08-24 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/5e16d53e/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
--
diff --git 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
index 5c5818a..39a7278 100644
--- 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
+++ 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
@@ -30617,6 +30617,3675 @@ public final class HbaseMetastoreProto {
 // 
@@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Table)
   }
 
+  public interface PartitionKeyComparatorOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string names = 1;
+/**
+ * coderequired string names = 1;/code
+ */
+boolean hasNames();
+/**
+ * coderequired string names = 1;/code
+ */
+java.lang.String getNames();
+/**
+ * coderequired string names = 1;/code
+ */
+com.google.protobuf.ByteString
+getNamesBytes();
+
+// required string types = 2;
+/**
+ * coderequired string types = 2;/code
+ */
+boolean hasTypes();
+/**
+ * coderequired string types = 2;/code
+ */
+java.lang.String getTypes();
+/**
+ * coderequired string types = 2;/code
+ */
+com.google.protobuf.ByteString
+getTypesBytes();
+
+// repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;/code
+ */
+
java.util.Listorg.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator
 
+getOpList();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;/code
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator
 getOp(int index);
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;/code
+ */
+int getOpCount();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;/code
+ */
+java.util.List? extends 
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder
 
+getOpOrBuilderList();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;/code
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder
 getOpOrBuilder(
+int index);
+
+// repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;/code
+ */
+
java.util.Listorg.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range
 
+getRangeList();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;/code
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range
 getRange(int index);
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;/code
+ */
+int getRangeCount();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;/code
+ */
+java.util.List? extends 
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder
 
+getRangeOrBuilderList();
+/**
+ * coderepeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;/code
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder
 getRangeOrBuilder(
+int index);
+  }
+  /**
+   * Protobuf type {@code 
org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator}
+   */
+  public static final class PartitionKeyComparator extends
+  com.google.protobuf.GeneratedMessage
+  implements PartitionKeyComparatorOrBuilder {
+// Use PartitionKeyComparator.newBuilder() to construct.
+private 
PartitionKeyComparator(com.google.protobuf.GeneratedMessage.Builder? builder) 
{
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private PartitionKeyComparator(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final PartitionKeyComparator defaultInstance;
+

hive git commit: HIVE-11456: HCatStorer should honor mapreduce.output.basename (Mithun Radhakrishnan, reviewed by Daniel Dai)

2015-08-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 fb736ab78 - 7330d9113


HIVE-11456: HCatStorer should honor mapreduce.output.basename (Mithun 
Radhakrishnan, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7330d911
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7330d911
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7330d911

Branch: refs/heads/branch-1
Commit: 7330d9113f14b98a5aedf9eb36e6063fae1631e7
Parents: fb736ab
Author: Daniel Dai da...@hortonworks.com
Authored: Wed Aug 5 13:50:19 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Wed Aug 5 13:51:06 2015 -0700

--
 .../hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java | 7 ---
 .../mapreduce/DynamicPartitionFileRecordWriterContainer.java  | 3 ++-
 .../hive/hcatalog/mapreduce/FileOutputFormatContainer.java| 3 ++-
 3 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7330d911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
index 3a07b0c..f620b83 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
@@ -51,8 +51,9 @@ class DefaultOutputFormatContainer extends 
OutputFormatContainer {
 super(of);
   }
 
-  static synchronized String getOutputName(int partition) {
-return part- + NUMBER_FORMAT.format(partition);
+  static synchronized String getOutputName(TaskAttemptContext context) {
+return context.getConfiguration().get(mapreduce.output.basename, part)
++ - + 
NUMBER_FORMAT.format(context.getTaskAttemptID().getTaskID().getId());
   }
 
   /**
@@ -65,7 +66,7 @@ class DefaultOutputFormatContainer extends 
OutputFormatContainer {
   @Override
   public RecordWriterWritableComparable?, HCatRecord
   getRecordWriter(TaskAttemptContext context) throws IOException, 
InterruptedException {
-String name = 
getOutputName(context.getTaskAttemptID().getTaskID().getId());
+String name = getOutputName(context);
 return new DefaultRecordWriterContainer(context,
   getBaseOutputFormat().getRecordWriter(null, new 
JobConf(context.getConfiguration()), name, 
InternalUtil.createReporter(context)));
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/7330d911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
index 60f1b60..320ace4 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
@@ -205,7 +205,8 @@ class DynamicPartitionFileRecordWriterContainer extends 
FileRecordWriterContaine
 
   Path parentDir = new 
Path(currTaskContext.getConfiguration().get(mapred.work.output.dir));
   Path childPath =
-  new Path(parentDir, FileOutputFormat.getUniqueFile(currTaskContext, 
part, ));
+  new Path(parentDir, FileOutputFormat.getUniqueFile(currTaskContext,
+  
currTaskContext.getConfiguration().get(mapreduce.output.basename, part), 
));
 
   RecordWriter baseRecordWriter =
   
baseOF.getRecordWriter(parentDir.getFileSystem(currTaskContext.getConfiguration()),

http://git-wip-us.apache.org/repos/asf/hive/blob/7330d911/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
index 001b59b..95ee3b4 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
@@ -97,7 +97,8 @@ class FileOutputFormatContainer extends OutputFormatContainer 
{
   

hive git commit: HIVE-11456: HCatStorer should honor mapreduce.output.basename (Mithun Radhakrishnan, reviewed by Daniel Dai)

2015-08-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 96f591446 - 0f934d29b


HIVE-11456: HCatStorer should honor mapreduce.output.basename (Mithun 
Radhakrishnan, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0f934d29
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0f934d29
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0f934d29

Branch: refs/heads/master
Commit: 0f934d29bb6bfc992eca050337b8f48fb47aaed8
Parents: 96f5914
Author: Daniel Dai da...@hortonworks.com
Authored: Wed Aug 5 13:50:19 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Wed Aug 5 13:50:19 2015 -0700

--
 .../hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java | 7 ---
 .../mapreduce/DynamicPartitionFileRecordWriterContainer.java  | 3 ++-
 .../hive/hcatalog/mapreduce/FileOutputFormatContainer.java| 3 ++-
 3 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0f934d29/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
index 3a07b0c..f620b83 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputFormatContainer.java
@@ -51,8 +51,9 @@ class DefaultOutputFormatContainer extends 
OutputFormatContainer {
 super(of);
   }
 
-  static synchronized String getOutputName(int partition) {
-return part- + NUMBER_FORMAT.format(partition);
+  static synchronized String getOutputName(TaskAttemptContext context) {
+return context.getConfiguration().get(mapreduce.output.basename, part)
++ - + 
NUMBER_FORMAT.format(context.getTaskAttemptID().getTaskID().getId());
   }
 
   /**
@@ -65,7 +66,7 @@ class DefaultOutputFormatContainer extends 
OutputFormatContainer {
   @Override
   public RecordWriterWritableComparable?, HCatRecord
   getRecordWriter(TaskAttemptContext context) throws IOException, 
InterruptedException {
-String name = 
getOutputName(context.getTaskAttemptID().getTaskID().getId());
+String name = getOutputName(context);
 return new DefaultRecordWriterContainer(context,
   getBaseOutputFormat().getRecordWriter(null, new 
JobConf(context.getConfiguration()), name, 
InternalUtil.createReporter(context)));
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0f934d29/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
index 60f1b60..320ace4 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
@@ -205,7 +205,8 @@ class DynamicPartitionFileRecordWriterContainer extends 
FileRecordWriterContaine
 
   Path parentDir = new 
Path(currTaskContext.getConfiguration().get(mapred.work.output.dir));
   Path childPath =
-  new Path(parentDir, FileOutputFormat.getUniqueFile(currTaskContext, 
part, ));
+  new Path(parentDir, FileOutputFormat.getUniqueFile(currTaskContext,
+  
currTaskContext.getConfiguration().get(mapreduce.output.basename, part), 
));
 
   RecordWriter baseRecordWriter =
   
baseOF.getRecordWriter(parentDir.getFileSystem(currTaskContext.getConfiguration()),

http://git-wip-us.apache.org/repos/asf/hive/blob/0f934d29/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
index 001b59b..95ee3b4 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
@@ -97,7 +97,8 @@ class FileOutputFormatContainer extends OutputFormatContainer 
{
   

hive git commit: HIVE-11438: Join a ACID table with non-ACID table fail with MR on 1.0.0 (Daniel Dai reviewed by Jason Dere)

2015-08-10 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1.0 06f10fe41 - b71f6aaa9


HIVE-11438: Join a ACID table with non-ACID table fail with MR on 1.0.0 (Daniel 
Dai reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b71f6aaa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b71f6aaa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b71f6aaa

Branch: refs/heads/branch-1.0
Commit: b71f6aaa9a7d669130618c9adce1b820570b50fb
Parents: 06f10fe
Author: Daniel Dai da...@hortonworks.com
Authored: Mon Aug 10 14:30:56 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Mon Aug 10 14:31:39 2015 -0700

--
 .../test/resources/testconfiguration.properties |  1 +
 .../apache/hadoop/hive/ql/exec/Utilities.java   | 10 +++-
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |  2 +-
 .../queries/clientpositive/join_acid_non_acid.q | 24 
 .../clientpositive/join_acid_non_acid.q.out | 58 
 5 files changed, 93 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b71f6aaa/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 3690e5c..18289f7 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -25,6 +25,7 @@ minimr.query.files=auto_sortmerge_join_16.q,\
   infer_bucket_sort_reducers_power_two.q,\
   input16_cc.q,\
   join1.q,\
+  join_acid_non_acid.q,\
   leftsemijoin_mr.q,\
   list_bucket_dml_10.q,\
   load_fs2.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/b71f6aaa/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index 913288f..b2db584 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -382,7 +382,15 @@ public final class Utilities {
   in = new ByteArrayInputStream(planBytes);
   in = new InflaterInputStream(in);
 } else {
-  in = new FileInputStream(localPath.toUri().getPath());
+  try {
+in = new FileInputStream(localPath.toUri().getPath());
+  } catch (FileNotFoundException fnf) {
+  }
+  // If it is on frontend, localPath does not exist, try
+  // to fetch it on hdfs
+  if (in == null) {
+in = path.getFileSystem(conf).open(path);
+  }
 }
 
 if(MAP_PLAN_NAME.equals(name)){

http://git-wip-us.apache.org/repos/asf/hive/blob/b71f6aaa/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
index 200daa5..be0c947 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
@@ -132,7 +132,7 @@ public class OrcInputFormat  implements 
InputFormatNullWritable, OrcStruct,
   @Override
   public boolean shouldSkipCombine(Path path,
Configuration conf) throws IOException {
-return (conf.get(AcidUtils.CONF_ACID_KEY) != null) || 
AcidUtils.isAcid(path, conf);
+return (conf.getBoolean(AcidUtils.CONF_ACID_KEY, false)) || 
AcidUtils.isAcid(path, conf);
   }
 
   private static class OrcRecordReader

http://git-wip-us.apache.org/repos/asf/hive/blob/b71f6aaa/ql/src/test/queries/clientpositive/join_acid_non_acid.q
--
diff --git a/ql/src/test/queries/clientpositive/join_acid_non_acid.q 
b/ql/src/test/queries/clientpositive/join_acid_non_acid.q
new file mode 100644
index 000..43d768f
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/join_acid_non_acid.q
@@ -0,0 +1,24 @@
+set hive.support.concurrency=true;
+set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
+
+CREATE TABLE orc_update_table (k1 INT, f1 STRING, op_code STRING)
+CLUSTERED BY (k1) INTO 2 BUCKETS
+STORED AS ORC TBLPROPERTIES(transactional=true);
+
+INSERT INTO TABLE orc_update_table VALUES (1, 'a', 'I');
+
+CREATE TABLE orc_table (k1 INT, f1 STRING)
+CLUSTERED BY (k1) SORTED BY (k1) INTO 2 BUCKETS
+STORED AS ORC;
+
+INSERT OVERWRITE TABLE orc_table VALUES (1, 'x');
+
+set hive.cbo.enable=true;
+SET hive.execution.engine=mr;

[1/2] hive git commit: HIVE-11441: No DDL allowed on table if user accidentally set table location wrong (Daniel Dai reviewed by Thejas Nair)

2015-08-14 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 b9af10d28 - 878e75b9e


HIVE-11441: No DDL allowed on table if user accidentally set table location 
wrong (Daniel Dai reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b55b6d60
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b55b6d60
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b55b6d60

Branch: refs/heads/branch-1
Commit: b55b6d6000e6606a18e4108c404db3c8170c111e
Parents: 89f16ab
Author: Daniel Dai da...@hortonworks.com
Authored: Fri Aug 14 15:49:13 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Fri Aug 14 15:49:13 2015 -0700

--
 .../apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java | 11 +++
 .../queries/clientnegative/alter_table_wrong_location.q  |  4 
 .../clientnegative/alter_table_wrong_location.q.out  |  9 +
 3 files changed, 24 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b55b6d60/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 24ca663..ba8cdd3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASELOCATION;
 import static 
org.apache.hadoop.hive.ql.parse.HiveParser.TOK_DATABASEPROPERTIES;
 
+import java.io.FileNotFoundException;
 import java.io.Serializable;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
@@ -44,6 +45,7 @@ import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -1467,6 +1469,15 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   HashMapString, String partSpec) throws SemanticException {
 
 String newLocation = unescapeSQLString(ast.getChild(0).getText());
+try {
+  // To make sure host/port pair is valid, the status of the location
+  // does not matter
+  FileSystem.get(new URI(newLocation), conf).getFileStatus(new 
Path(newLocation));
+} catch (FileNotFoundException e) {
+  // Only check host/port pair is valid, wheter the file exist or not does 
not matter
+} catch (Exception e) {
+  throw new SemanticException(Cannot connect to namenode, please check if 
host/port pair for  + newLocation +  is valid, e);
+}
 addLocationToOutputs(newLocation);
 AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, newLocation, 
partSpec);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b55b6d60/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
--
diff --git a/ql/src/test/queries/clientnegative/alter_table_wrong_location.q 
b/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
new file mode 100644
index 000..3721867
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
@@ -0,0 +1,4 @@
+create table testwrongloc(id int);
+
+-- Assume port 12345 is not open
+alter table testwrongloc set location 
hdfs://localhost:12345/tmp/testwrongloc;

http://git-wip-us.apache.org/repos/asf/hive/blob/b55b6d60/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
--
diff --git 
a/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out 
b/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
new file mode 100644
index 000..d788d55
--- /dev/null
+++ b/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
@@ -0,0 +1,9 @@
+PREHOOK: query: create table testwrongloc(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@testwrongloc
+POSTHOOK: query: create table testwrongloc(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@testwrongloc
+ A masked pattern was here 



[2/2] hive git commit: Merge branch 'branch-1' of https://git-wip-us.apache.org/repos/asf/hive into branch-1

2015-08-14 Thread daijy
Merge branch 'branch-1' of https://git-wip-us.apache.org/repos/asf/hive into 
branch-1


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/878e75b9
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/878e75b9
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/878e75b9

Branch: refs/heads/branch-1
Commit: 878e75b9e8f7f4cfee8818483782a1427c952b8d
Parents: b55b6d6 b9af10d
Author: Daniel Dai da...@hortonworks.com
Authored: Fri Aug 14 15:49:34 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Fri Aug 14 15:49:34 2015 -0700

--
 .../hive/metastore/TestHiveMetaStore.java   | 66 
 .../hadoop/hive/metastore/ObjectStore.java  |  7 ++-
 .../hive/spark/client/SparkClientImpl.java  |  4 ++
 3 files changed, 75 insertions(+), 2 deletions(-)
--




hive git commit: HIVE-11441: No DDL allowed on table if user accidentally set table location wrong (Daniel Dai reviewed by Thejas Nair)

2015-08-14 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 17e95c7c7 - 2ccd06169


HIVE-11441: No DDL allowed on table if user accidentally set table location 
wrong (Daniel Dai reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2ccd0616
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2ccd0616
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2ccd0616

Branch: refs/heads/master
Commit: 2ccd061691cd52ed9fa341b61590edb2c022b031
Parents: 17e95c7
Author: Daniel Dai da...@hortonworks.com
Authored: Fri Aug 14 15:28:44 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Fri Aug 14 15:28:44 2015 -0700

--
 .../apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java | 11 +++
 .../queries/clientnegative/alter_table_wrong_location.q  |  4 
 .../clientnegative/alter_table_wrong_location.q.out  |  9 +
 3 files changed, 24 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2ccd0616/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 21625bc..9f8c756 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -23,6 +23,7 @@ import org.antlr.runtime.tree.CommonTree;
 import org.antlr.runtime.tree.Tree;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -137,6 +138,7 @@ import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
 
+import java.io.FileNotFoundException;
 import java.io.Serializable;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
@@ -1464,6 +1466,15 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   HashMapString, String partSpec) throws SemanticException {
 
 String newLocation = unescapeSQLString(ast.getChild(0).getText());
+try {
+  // To make sure host/port pair is valid, the status of the location
+  // does not matter
+  FileSystem.get(new URI(newLocation), conf).getFileStatus(new 
Path(newLocation));
+} catch (FileNotFoundException e) {
+  // Only check host/port pair is valid, wheter the file exist or not does 
not matter
+} catch (Exception e) {
+  throw new SemanticException(Cannot connect to namenode, please check if 
host/port pair for  + newLocation +  is valid, e);
+}
 addLocationToOutputs(newLocation);
 AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, newLocation, 
partSpec);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/2ccd0616/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
--
diff --git a/ql/src/test/queries/clientnegative/alter_table_wrong_location.q 
b/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
new file mode 100644
index 000..3721867
--- /dev/null
+++ b/ql/src/test/queries/clientnegative/alter_table_wrong_location.q
@@ -0,0 +1,4 @@
+create table testwrongloc(id int);
+
+-- Assume port 12345 is not open
+alter table testwrongloc set location 
hdfs://localhost:12345/tmp/testwrongloc;

http://git-wip-us.apache.org/repos/asf/hive/blob/2ccd0616/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
--
diff --git 
a/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out 
b/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
new file mode 100644
index 000..d788d55
--- /dev/null
+++ b/ql/src/test/results/clientnegative/alter_table_wrong_location.q.out
@@ -0,0 +1,9 @@
+PREHOOK: query: create table testwrongloc(id int)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@testwrongloc
+POSTHOOK: query: create table testwrongloc(id int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@testwrongloc
+ A masked pattern was here 



hive git commit: HIVE-11442: Remove commons-configuration.jar from Hive distribution

2015-08-12 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 a6d3070ec - 425273eed


HIVE-11442: Remove commons-configuration.jar from Hive distribution


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/425273ee
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/425273ee
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/425273ee

Branch: refs/heads/branch-1
Commit: 425273eed3213abbd08404d1aa743b38155e202e
Parents: a6d3070
Author: Daniel Dai da...@hortonworks.com
Authored: Wed Aug 12 10:12:02 2015 -0700
Committer: Daniel Dai da...@hortonworks.com
Committed: Wed Aug 12 10:12:02 2015 -0700

--
 jdbc/pom.xml| 1 +
 packaging/src/main/assembly/bin.xml | 3 ++-
 2 files changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/425273ee/jdbc/pom.xml
--
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index 322b672..8cf5210 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -189,6 +189,7 @@
   artifactSet
 excludes
   excludeorg.apache.commons:commons-compress/exclude
+  
excludecommons-configuration:commons-configuration/exclude
   excludeorg.apache.hadoop:*/exclude
   excludeorg.apache.hive:hive-ant/exclude
   excludeorg.apache.ant:*/exclude

http://git-wip-us.apache.org/repos/asf/hive/blob/425273ee/packaging/src/main/assembly/bin.xml
--
diff --git a/packaging/src/main/assembly/bin.xml 
b/packaging/src/main/assembly/bin.xml
index a1c176f..63253c5 100644
--- a/packaging/src/main/assembly/bin.xml
+++ b/packaging/src/main/assembly/bin.xml
@@ -41,7 +41,8 @@
   excludes
 excludeorg.apache.hadoop:*/exclude
 excludeorg.apache.hive.hcatalog:*/exclude
-   excludeorg.slf4j:*/exclude
+excludeorg.slf4j:*/exclude
+excludecommons-configuration:commons-configuration/exclude
   /excludes
 /dependencySet
 dependencySet



hive git commit: HIVE-12282: beeline - update command printing in verbose mode (Daniel Dai, reviewed by Thejas Nair, Lefty Leverenz)

2015-10-28 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 99a043a05 -> 63dc1fa61


HIVE-12282: beeline - update command printing in verbose mode (Daniel Dai, 
reviewed by Thejas Nair, Lefty Leverenz)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/63dc1fa6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/63dc1fa6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/63dc1fa6

Branch: refs/heads/master
Commit: 63dc1fa61d071b64664c5b7dfb700b9c18bcca50
Parents: 99a043a
Author: Daniel Dai 
Authored: Wed Oct 28 21:24:42 2015 -0700
Committer: Daniel Dai 
Committed: Wed Oct 28 21:24:42 2015 -0700

--
 .../java/org/apache/hive/beeline/BeeLine.java   | 22 ++--
 .../hive/beeline/TestBeelineArgParsing.java | 18 +++-
 2 files changed, 33 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/63dc1fa6/beeline/src/java/org/apache/hive/beeline/BeeLine.java
--
diff --git a/beeline/src/java/org/apache/hive/beeline/BeeLine.java 
b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
index 4e04997..377703f 100644
--- a/beeline/src/java/org/apache/hive/beeline/BeeLine.java
+++ b/beeline/src/java/org/apache/hive/beeline/BeeLine.java
@@ -151,6 +151,7 @@ public class BeeLine implements Closeable {
 
   private static final String HIVE_VAR_PREFIX = "--hivevar";
   private static final String HIVE_CONF_PREFIX = "--hiveconf";
+  static final String PASSWD_MASK = "[passwd stripped]";
 
   private final Map formats = map(new Object[] {
   "vertical", new VerticalOutputFormat(this),
@@ -768,12 +769,9 @@ public class BeeLine implements Closeable {
 */
 
 if (url != null) {
-  String com = "!connect "
-  + url + " "
-  + (user == null || user.length() == 0 ? "''" : user) + " "
-  + (pass == null || pass.length() == 0 ? "''" : pass) + " "
-  + (driver == null ? "" : driver);
-  debug("issuing: " + com);
+  String com = constructCmd(url, user, pass, driver, false);
+  String comForDebug = constructCmd(url, user, pass, driver, true);
+  debug("issuing: " + comForDebug);
   dispatch(com);
 }
 
@@ -796,6 +794,18 @@ public class BeeLine implements Closeable {
 return code;
   }
 
+  private String constructCmd(String url, String user, String pass, String 
driver, boolean stripPasswd) {
+String com = "!connect "
++ url + " "
++ (user == null || user.length() == 0 ? "''" : user) + " ";
+if (stripPasswd) {
+  com += PASSWD_MASK + " ";
+} else {
+  com += (pass == null || pass.length() == 0 ? "''" : pass) + " ";
+}
+com += (driver == null ? "" : driver);
+return com;
+  }
   /**
* Obtains a password from the passed file path.
*/

http://git-wip-us.apache.org/repos/asf/hive/blob/63dc1fa6/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
--
diff --git 
a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java 
b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
index 06d6ffe..80c6e06 100644
--- a/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
+++ b/beeline/src/test/org/apache/hive/beeline/TestBeelineArgParsing.java
@@ -23,9 +23,11 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
-
 import java.io.File;
 import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -244,4 +246,18 @@ public class TestBeelineArgParsing {
   
Assert.assertEquals(bl.findLocalDriver(connectionString).getClass().getName(), 
driverClazzName);
 }
   }
+
+  @Test
+  public void testBeelinePasswordMask() throws Exception {
+TestBeeline bl = new TestBeeline();
+File errFile = File.createTempFile("test", "tmp");
+bl.setErrorStream(new PrintStream(new FileOutputStream(errFile)));
+String args[] =
+new String[] { "-u", "url", "-n", "name", "-p", "password", "-d", 
"driver",
+"--autoCommit=true", "--verbose", "--truncateTable" };
+bl.initArgs(args);
+bl.close();
+String errContents = new 
String(Files.readAllBytes(Paths.get(errFile.toString(;
+Assert.assertTrue(errContents.contains(BeeLine.PASSWD_MASK));
+  }
 }



hive git commit: HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair)

2015-10-25 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 e654efeb3 -> af07b4171


HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/af07b417
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/af07b417
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/af07b417

Branch: refs/heads/branch-1
Commit: af07b417108743568301e3fd99d54a8361065367
Parents: e654efe
Author: Daniel Dai 
Authored: Sun Oct 25 09:46:27 2015 -0700
Committer: Daniel Dai 
Committed: Sun Oct 25 09:46:27 2015 -0700

--
 .../hive/service/cli/session/HiveSessionImpl.java   | 12 
 1 file changed, 12 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/af07b417/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index 8635646..bb302fc 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -231,6 +231,18 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public void setOperationLogSessionDir(File operationLogRootDir) {
+if (!operationLogRootDir.exists()) {
+  LOG.warn("The operation log root directory is removed, recreating:" +
+  operationLogRootDir.getAbsolutePath());
+  if (!operationLogRootDir.mkdirs()) {
+LOG.warn("Unable to create operation log root directory: " +
+operationLogRootDir.getAbsolutePath());
+  }
+}
+if (!operationLogRootDir.canWrite()) {
+  LOG.warn("The operation log root directory is not writable: " +
+  operationLogRootDir.getAbsolutePath());
+}
 sessionLogDir = new File(operationLogRootDir, 
sessionHandle.getHandleIdentifier().toString());
 isOperationLogEnabled = true;
 if (!sessionLogDir.exists()) {



hive git commit: HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, reviewed by Thejas Nair)

2015-10-25 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master e3ef96f2b -> 48a1e1f7b


HIVE-12262: Session log dir cannot be created in some cases (Daniel Dai, 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/48a1e1f7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/48a1e1f7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/48a1e1f7

Branch: refs/heads/master
Commit: 48a1e1f7b051e6ca2a5bbb4a55eda16f5349d369
Parents: e3ef96f
Author: Daniel Dai 
Authored: Sun Oct 25 09:46:27 2015 -0700
Committer: Daniel Dai 
Committed: Sun Oct 25 09:48:00 2015 -0700

--
 .../hive/service/cli/session/HiveSessionImpl.java   | 12 
 1 file changed, 12 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/48a1e1f7/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
--
diff --git 
a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java 
b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
index a600309..3eaab9a 100644
--- a/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
+++ b/service/src/java/org/apache/hive/service/cli/session/HiveSessionImpl.java
@@ -241,6 +241,18 @@ public class HiveSessionImpl implements HiveSession {
 
   @Override
   public void setOperationLogSessionDir(File operationLogRootDir) {
+if (!operationLogRootDir.exists()) {
+  LOG.warn("The operation log root directory is removed, recreating:" +
+  operationLogRootDir.getAbsolutePath());
+  if (!operationLogRootDir.mkdirs()) {
+LOG.warn("Unable to create operation log root directory: " +
+operationLogRootDir.getAbsolutePath());
+  }
+}
+if (!operationLogRootDir.canWrite()) {
+  LOG.warn("The operation log root directory is not writable: " +
+  operationLogRootDir.getAbsolutePath());
+}
 sessionLogDir = new File(operationLogRootDir, 
sessionHandle.getHandleIdentifier().toString());
 isOperationLogEnabled = true;
 if (!sessionLogDir.exists()) {



hive git commit: HIVE-12327: WebHCat e2e tests TestJob_1 and TestJob_2 fail (Daniel Dai, reviewed by Thejas Nair)

2015-11-03 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 13f8cfece -> 16a86b261


HIVE-12327: WebHCat e2e tests TestJob_1 and TestJob_2 fail (Daniel Dai, 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/16a86b26
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/16a86b26
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/16a86b26

Branch: refs/heads/master
Commit: 16a86b2612439d6ba1f920deaea822ae4da3dedc
Parents: 13f8cfe
Author: Daniel Dai 
Authored: Tue Nov 3 21:29:46 2015 -0800
Committer: Daniel Dai 
Committed: Tue Nov 3 21:29:46 2015 -0800

--
 .../src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/16a86b26/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
--
diff --git 
a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java 
b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
index 288043f..b020ffe 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
@@ -41,6 +41,7 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.regex.Pattern;
 
 public class WebHCatJTShim23 implements WebHCatJTShim {
   private static final Logger LOG = 
LoggerFactory.getLogger(WebHCatJTShim23.class);
@@ -139,7 +140,8 @@ public class WebHCatJTShim23 implements WebHCatJTShim {
 }
 catch(IOException ex) {
   String msg = ex.getMessage();
-  if(msg != null && msg.contains("ApplicationNotFoundException")) {
+  if(msg != null && (msg.contains("ApplicationNotFoundException") ||
+  Pattern.compile("History file.*not found").matcher(msg).find())) {
 LOG.info("Job(" + jobid + ") not found: " + msg);
 return null;
   }



hive git commit: HIVE-12327: WebHCat e2e tests TestJob_1 and TestJob_2 fail (Daniel Dai, reviewed by Thejas Nair)

2015-11-03 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 d794ff9e0 -> e3f72d52b


HIVE-12327: WebHCat e2e tests TestJob_1 and TestJob_2 fail (Daniel Dai, 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e3f72d52
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e3f72d52
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e3f72d52

Branch: refs/heads/branch-1
Commit: e3f72d52b5ba2e7448bf46a6d3590642196c8767
Parents: d794ff9
Author: Daniel Dai 
Authored: Tue Nov 3 21:29:46 2015 -0800
Committer: Daniel Dai 
Committed: Tue Nov 3 21:30:28 2015 -0800

--
 .../src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java  | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e3f72d52/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
--
diff --git 
a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java 
b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
index c85a739..f1947bb 100644
--- a/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
+++ b/shims/0.23/src/main/java/org/apache/hadoop/mapred/WebHCatJTShim23.java
@@ -41,6 +41,7 @@ import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
+import java.util.regex.Pattern;
 
 public class WebHCatJTShim23 implements WebHCatJTShim {
   private static final Log LOG = LogFactory.getLog(WebHCatJTShim23.class);
@@ -139,7 +140,8 @@ public class WebHCatJTShim23 implements WebHCatJTShim {
 }
 catch(IOException ex) {
   String msg = ex.getMessage();
-  if(msg != null && msg.contains("ApplicationNotFoundException")) {
+  if(msg != null && (msg.contains("ApplicationNotFoundException") ||
+  Pattern.compile("History file.*not found").matcher(msg).find())) {
 LOG.info("Job(" + jobid + ") not found: " + msg);
 return null;
   }



hive git commit: HIVE-11694: Exclude hbase-metastore for hadoop-1 (Daniel Dai reviewed by Thejas Nair)

2015-09-01 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/hbase-metastore fbbb7cf1f -> 3d170cae5


HIVE-11694: Exclude hbase-metastore for hadoop-1 (Daniel Dai reviewed by Thejas 
Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3d170cae
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3d170cae
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3d170cae

Branch: refs/heads/hbase-metastore
Commit: 3d170cae5f41b0a3928117328aeaac3aefbc7cef
Parents: fbbb7cf
Author: Daniel Dai 
Authored: Tue Sep 1 12:43:42 2015 -0700
Committer: Daniel Dai 
Committed: Tue Sep 1 12:43:42 2015 -0700

--
 metastore/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3d170cae/metastore/pom.xml
--
diff --git a/metastore/pom.xml b/metastore/pom.xml
index ccec9f1..255726c 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -159,6 +159,23 @@
   
 
   hadoop-1
+  
+
+  
+org.apache.maven.plugins
+maven-compiler-plugin
+2.3.2
+
+  
+**/hbase/**
+  
+  
+**/hbase/**
+  
+
+  
+
+  
   
 
   org.apache.hadoop



hive git commit: HIVE-11692: Fix UT regressions on hbase-metastore branch (Daniel Dai reviewed by Thejas Nair)

2015-09-01 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/hbase-metastore 3d170cae5 -> 8b0ededf5


HIVE-11692: Fix UT regressions on hbase-metastore branch (Daniel Dai reviewed 
by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8b0ededf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8b0ededf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8b0ededf

Branch: refs/heads/hbase-metastore
Commit: 8b0ededf574ff33c7fe4a952aad42ece1467237d
Parents: 3d170ca
Author: Daniel Dai 
Authored: Tue Sep 1 13:18:35 2015 -0700
Committer: Daniel Dai 
Committed: Tue Sep 1 13:18:35 2015 -0700

--
 data/conf/hbase/hive-site.xml   | 263 ---
 .../hive/metastore/TestHiveMetaStore.java   |   3 +
 .../hive/metastore/hbase/TestHBaseImport.java   |  18 +-
 itests/qtest/pom.xml|  19 --
 .../test/resources/testconfiguration.properties |  46 
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   4 +
 metastore/pom.xml   |   6 +
 .../hadoop/hive/metastore/TestObjectStore.java  |  43 ++-
 .../dynpart_sort_opt_vectorization.q.out|  12 +-
 .../dynpart_sort_optimization.q.out |  12 +-
 10 files changed, 76 insertions(+), 350 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8b0ededf/data/conf/hbase/hive-site.xml
--
diff --git a/data/conf/hbase/hive-site.xml b/data/conf/hbase/hive-site.xml
deleted file mode 100644
index 2cde40f..000
--- a/data/conf/hbase/hive-site.xml
+++ /dev/null
@@ -1,263 +0,0 @@
-
-
-
-
-
-
-
-  hive.in.test
-  true
-  Internal marker for test. Used for masking env-dependent 
values
-
-
-
-
-
-
-
-
-
-
-  hadoop.tmp.dir
-  ${test.tmp.dir}/hadoop-tmp
-  A base for other temporary directories.
-
-
-
-
-
-  hive.exec.scratchdir
-  ${test.tmp.dir}/scratchdir
-  Scratch space for Hive jobs
-
-
-
-  hive.exec.local.scratchdir
-  ${test.tmp.dir}/localscratchdir/
-  Local scratch space for Hive jobs
-
-
-
-  javax.jdo.option.ConnectionURL
-  
jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true
-
-
-
-  hive.stats.dbconnectionstring
-  
jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true
-
-
-
-
-  javax.jdo.option.ConnectionDriverName
-  org.apache.derby.jdbc.EmbeddedDriver
-
-
-
-  javax.jdo.option.ConnectionUserName
-  APP
-
-
-
-  javax.jdo.option.ConnectionPassword
-  mine
-
-
-
-  
-  hive.metastore.warehouse.dir
-  ${test.warehouse.dir}
-  
-
-
-
-  hive.metastore.metadb.dir
-  file://${test.tmp.dir}/metadb/
-  
-  Required by metastore server or if the uris argument below is not supplied
-  
-
-
-
-  test.log.dir
-  ${test.tmp.dir}/log/
-  
-
-
-
-  test.data.files
-  ${hive.root}/data/files
-  
-
-
-
-  test.data.scripts
-  ${hive.root}/data/scripts
-  
-
-
-
-  hive.jar.path
-  
${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar
-  
-
-
-
-  hive.querylog.location
-  ${test.tmp.dir}/tmp
-  Location of the structured hive logs
-
-
-
-  hive.exec.pre.hooks
-  org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, 
org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables
-  Pre Execute Hook for Tests
-
-
-
-  hive.exec.post.hooks
-  org.apache.hadoop.hive.ql.hooks.PostExecutePrinter
-  Post Execute Hook for Tests
-
-
-
-  hive.support.concurrency
-  false
-  Whether hive supports concurrency or not. A zookeeper instance 
must be up and running for the default hive lock manager to support read-write 
locks.
-
-
-
-  hive.unlock.numretries
-  2
-  The number of times you want to retry to do one 
unlock
-
-
-
-  hive.lock.sleep.between.retries
-  2
-  The sleep time (in seconds) between various 
retries
-
-
-
-
-  fs.pfile.impl
-  org.apache.hadoop.fs.ProxyLocalFileSystem
-  A proxy for local file system used for cross file system 
testing
-
-
-
-  hive.exec.mode.local.auto
-  false
-  
-Let hive determine whether to run in local mode automatically
-Disabling this for tests so that minimr is not affected
-  
-
-
-
-  hive.auto.convert.join
-  false
-  Whether Hive enable the optimization about converting common 
join into mapjoin based on the input file size
-
-
-
-  hive.ignore.mapjoin.hint
-  false
-  Whether Hive ignores the mapjoin hint
-
-
-
-  hive.input.format
-  org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
-  The default input format, if it is not specified, the system 
assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, 
whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always 
overwrite it - if there is a bug in CombineHiveInputFormat, it can always be 
manually set to HiveInputFormat. 
-
-
-
-  

hive git commit: HIVE-11743: HBase Port conflict for MiniHBaseCluster

2015-09-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/hbase-metastore 757553e64 -> 76828e0ad


HIVE-11743: HBase Port conflict for MiniHBaseCluster


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/76828e0a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/76828e0a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/76828e0a

Branch: refs/heads/hbase-metastore
Commit: 76828e0ade2e55d30b0680c7312737a8212a158f
Parents: 757553e
Author: Daniel Dai 
Authored: Sat Sep 5 11:12:21 2015 -0700
Committer: Daniel Dai 
Committed: Sat Sep 5 11:12:21 2015 -0700

--
 .../hadoop/hive/metastore/hbase/HBaseIntegrationTests.java   | 8 +++-
 .../src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java   | 6 +-
 2 files changed, 12 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/76828e0a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
index 02e481a..5b82579 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
@@ -19,8 +19,11 @@
 package org.apache.hadoop.hive.metastore.hbase;
 
 import co.cask.tephra.hbase10.coprocessor.TransactionProcessor;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -32,6 +35,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
@@ -60,7 +64,9 @@ public class HBaseIntegrationTests {
 if (testingTephra) {
   LOG.info("Testing with Tephra");
 }
-utility = new HBaseTestingUtility();
+Configuration hbaseConf = HBaseConfiguration.create();
+hbaseConf.setInt("hbase.master.info.port", -1);
+utility = new HBaseTestingUtility(hbaseConf);
 utility.startMiniCluster();
 conf = new HiveConf(utility.getConfiguration(), 
HBaseIntegrationTests.class);
 admin = utility.getHBaseAdmin();

http://git-wip-us.apache.org/repos/asf/hive/blob/76828e0a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 24a85cf..16e73c9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -62,9 +62,11 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
@@ -348,7 +350,9 @@ public class QTestUtil {
   }
 
   private void startMiniHBaseCluster() throws Exception {
-utility = new HBaseTestingUtility();
+Configuration hbaseConf = HBaseConfiguration.create();
+hbaseConf.setInt("hbase.master.info.port", -1);
+utility = new HBaseTestingUtility(hbaseConf);
 utility.startMiniCluster();
 conf = new HiveConf(utility.getConfiguration(), Driver.class);
 HBaseAdmin admin = utility.getHBaseAdmin();



hive git commit: HIVE-11731: Exclude hbase-metastore in itests for hadoop-1

2015-09-03 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/hbase-metastore 848b9770a -> 757553e64


HIVE-11731: Exclude hbase-metastore in itests for hadoop-1


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/757553e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/757553e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/757553e6

Branch: refs/heads/hbase-metastore
Commit: 757553e64280088bb2fc1546ac1259a519d064a6
Parents: 848b977
Author: Daniel Dai 
Authored: Thu Sep 3 21:57:42 2015 -0700
Committer: Daniel Dai 
Committed: Thu Sep 3 21:57:42 2015 -0700

--
 .../metastore/hbase/HBaseStoreTestUtil.java | 45 
 1 file changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/757553e6/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
new file mode 100644
index 000..1f42007
--- /dev/null
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.hbase;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class HBaseStoreTestUtil {
+  public static void initHBaseMetastore(HBaseAdmin admin, HiveConf conf) 
throws Exception {
+for (String tableName : HBaseReadWrite.tableNames) {
+  List families = HBaseReadWrite.columnFamilies.get(tableName);
+  HTableDescriptor desc = new 
HTableDescriptor(TableName.valueOf(tableName));
+  for (byte[] family : families) {
+HColumnDescriptor columnDesc = new HColumnDescriptor(family);
+desc.addFamily(columnDesc);
+  }
+  admin.createTable(desc);
+}
+admin.close();
+if (conf != null) {
+  HBaseReadWrite.getInstance(conf);
+}
+  }
+}
\ No newline at end of file



hive git commit: HIVE-10752: Revert HIVE-5193 (Aihua Xu via Chaoyu Tang)

2015-10-01 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 f953fc3cf -> fda7c5175


HIVE-10752: Revert HIVE-5193 (Aihua Xu via Chaoyu Tang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fda7c517
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fda7c517
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fda7c517

Branch: refs/heads/branch-1
Commit: fda7c517586621c0319f86e691cdfc3520f984ed
Parents: f953fc3
Author: Daniel Dai 
Authored: Thu Oct 1 10:31:02 2015 -0700
Committer: Daniel Dai 
Committed: Thu Oct 1 10:31:02 2015 -0700

--
 .../apache/hive/hcatalog/pig/HCatLoader.java|  8 
 .../hive/hcatalog/pig/TestHCatLoader.java   | 44 
 2 files changed, 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fda7c517/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
index c951847..0685790 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/HCatLoader.java
@@ -19,7 +19,6 @@
 package org.apache.hive.hcatalog.pig;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
@@ -32,7 +31,6 @@ import 
org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.security.Credentials;
@@ -163,12 +161,6 @@ public class HCatLoader extends HCatBaseLoader {
 if (requiredFieldsInfo != null) {
   // convert to hcatschema and pass to HCatInputFormat
   try {
-//push down projections to columnar store works for RCFile and ORCFile
-ArrayList list = new 
ArrayList(requiredFieldsInfo.getFields().size());
-for (RequiredField rf : requiredFieldsInfo.getFields()) {
-  list.add(rf.getIndex());
-}
-ColumnProjectionUtils.appendReadColumns(job.getConfiguration(), list);
 outputSchema = phutil.getHCatSchema(requiredFieldsInfo.getFields(), 
signature, this.getClass());
 HCatInputFormat.setOutputSchema(job, outputSchema);
   } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/fda7c517/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
index fc18a3b..45a219c 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoader.java
@@ -19,8 +19,6 @@
 package org.apache.hive.hcatalog.pig;
 
 import java.io.File;
-import java.io.FileWriter;
-import java.io.PrintWriter;
 import java.io.IOException;
 import java.io.RandomAccessFile;
 import java.sql.Date;
@@ -36,10 +34,7 @@ import java.util.Properties;
 import java.util.Set;
 
 import org.apache.commons.io.FileUtils;
-
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
@@ -49,7 +44,6 @@ import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.mapreduce.Job;
 
 import org.apache.hadoop.util.Shell;
@@ -66,10 +60,6 @@ import org.apache.pig.data.DataType;
 import org.apache.pig.data.Tuple;
 import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
-import org.apache.pig.PigRunner;
-import org.apache.pig.tools.pigstats.OutputStats;
-import 

[16/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
index ae39507,a877338..fe60838
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnAbortedException.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class TxnAbortedException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class TxnAbortedException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TxnAbortedException");
  
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("message", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
index 18cbe53,8b255b9..266fbe1
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnInfo.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class TxnInfo implements org.apache.thrift.TBase, java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class TxnInfo implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TxnInfo");
  
private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new 
org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, 
(short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
index 4f5d02d,05af505..18db1b8
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TxnOpenException.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class TxnOpenException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class TxnOpenException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TxnOpenException");
  
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("message", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java

[01/50] [abbrv] hive git commit: HIVE-11294 Use HBase to cache aggregated stats (gates)

2015-09-21 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 514ab795f -> 523830338


http://git-wip-us.apache.org/repos/asf/hive/blob/c53c6f45/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
new file mode 100644
index 000..89c3e7b
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/AggrStatsInvalidatorFilter.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.hbase;
+
+import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.filter.Filter;
+import org.apache.hadoop.hbase.filter.FilterBase;
+import org.apache.hive.common.util.BloomFilter;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Filter for scanning aggregates stats table
+ */
+public class AggrStatsInvalidatorFilter extends FilterBase {
+  private static final Log LOG =
+  LogFactory.getLog(AggrStatsInvalidatorFilter.class.getName());
+  private final List 
entries;
+  private final long runEvery;
+  private final long maxCacheEntryLife;
+  // This class is not serializable, so I realize transient doesn't mean 
anything.  It's just to
+  // comunicate that we don't serialize this and ship it across to the filter 
on the other end.
+  // We use the time the filter is actually instantiated in HBase.
+  private transient long now;
+
+  public static Filter parseFrom(byte[] serialized) throws 
DeserializationException {
+try {
+  return new AggrStatsInvalidatorFilter(
+  
HbaseMetastoreProto.AggrStatsInvalidatorFilter.parseFrom(serialized));
+} catch (InvalidProtocolBufferException e) {
+  throw new DeserializationException(e);
+}
+  }
+
+  /**
+   * @param proto Protocol buffer representation of this filter.
+   */
+  AggrStatsInvalidatorFilter(HbaseMetastoreProto.AggrStatsInvalidatorFilter 
proto) {
+this.entries = proto.getToInvalidateList();
+this.runEvery = proto.getRunEvery();
+this.maxCacheEntryLife = proto.getMaxCacheEntryLife();
+now = System.currentTimeMillis();
+  }
+
+  @Override
+  public byte[] toByteArray() throws IOException {
+return HbaseMetastoreProto.AggrStatsInvalidatorFilter.newBuilder()
+.addAllToInvalidate(entries)
+.setRunEvery(runEvery)
+.setMaxCacheEntryLife(maxCacheEntryLife)
+.build()
+.toByteArray();
+  }
+
+  @Override
+  public boolean filterAllRemaining() throws IOException {
+return false;
+  }
+
+  @Override
+  public ReturnCode filterKeyValue(Cell cell) throws IOException {
+// Is this the partition we want?
+if (Arrays.equals(CellUtil.cloneQualifier(cell), 
HBaseReadWrite.AGGR_STATS_BLOOM_COL)) {
+  HbaseMetastoreProto.AggrStatsBloomFilter fromCol =
+  
HbaseMetastoreProto.AggrStatsBloomFilter.parseFrom(CellUtil.cloneValue(cell));
+  BloomFilter bloom = null;
+  if (now - maxCacheEntryLife > fromCol.getAggregatedAt()) {
+// It's too old, kill it regardless of whether we were asked to or not.
+return ReturnCode.INCLUDE;
+  } else if (now - runEvery * 2 <= fromCol.getAggregatedAt()) {
+// It's too new.  We might be stomping on something that was just 
created.  Skip it.
+return ReturnCode.NEXT_ROW;
+  } else {
+// Look through each of our entries and see if any of them match.
+for (HbaseMetastoreProto.AggrStatsInvalidatorFilter.Entry entry : 
entries) {
+  // First check if we match on db and table match
+  if (entry.getDbName().equals(fromCol.getDbName()) &&
+  entry.getTableName().equals(fromCol.getTableName())) {
+if (bloom == null) {
+ 

[11/50] [abbrv] hive git commit: HIVE-11379 Bump Tephra version to 0.6.0 (gates)

2015-09-21 Thread daijy
HIVE-11379 Bump Tephra version to 0.6.0 (gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9d3d4ebf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9d3d4ebf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9d3d4ebf

Branch: refs/heads/master
Commit: 9d3d4ebfefa96f442150989c8842a1df8a438dc2
Parents: 5acf458
Author: Alan Gates 
Authored: Thu Jul 30 10:00:37 2015 -0700
Committer: Alan Gates 
Committed: Thu Jul 30 10:00:37 2015 -0700

--
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9d3d4ebf/pom.xml
--
diff --git a/pom.xml b/pom.xml
index bb629b2..d03fb5f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -174,7 +174,7 @@
 2.4.0
 2.6.0
 3.0.0
-0.5.1-SNAPSHOT
+0.6.0
 2.2.4
   
 



[38/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
index fcc4f0b..2872f85 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
@@ -1235,14 +1235,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::read(::apache::thrift::protoc
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size649;
-::apache::thrift::protocol::TType _etype652;
-xfer += iprot->readListBegin(_etype652, _size649);
-this->success.resize(_size649);
-uint32_t _i653;
-for (_i653 = 0; _i653 < _size649; ++_i653)
+uint32_t _size719;
+::apache::thrift::protocol::TType _etype722;
+xfer += iprot->readListBegin(_etype722, _size719);
+this->success.resize(_size719);
+uint32_t _i723;
+for (_i723 = 0; _i723 < _size719; ++_i723)
 {
-  xfer += iprot->readString(this->success[_i653]);
+  xfer += iprot->readString(this->success[_i723]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1281,10 +1281,10 @@ uint32_t 
ThriftHiveMetastore_get_databases_result::write(::apache::thrift::proto
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter654;
-  for (_iter654 = this->success.begin(); _iter654 != this->success.end(); 
++_iter654)
+  std::vector ::const_iterator _iter724;
+  for (_iter724 = this->success.begin(); _iter724 != this->success.end(); 
++_iter724)
   {
-xfer += oprot->writeString((*_iter654));
+xfer += oprot->writeString((*_iter724));
   }
   xfer += oprot->writeListEnd();
 }
@@ -1328,14 +1328,14 @@ uint32_t 
ThriftHiveMetastore_get_databases_presult::read(::apache::thrift::proto
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 (*(this->success)).clear();
-uint32_t _size655;
-::apache::thrift::protocol::TType _etype658;
-xfer += iprot->readListBegin(_etype658, _size655);
-(*(this->success)).resize(_size655);
-uint32_t _i659;
-for (_i659 = 0; _i659 < _size655; ++_i659)
+uint32_t _size725;
+::apache::thrift::protocol::TType _etype728;
+xfer += iprot->readListBegin(_etype728, _size725);
+(*(this->success)).resize(_size725);
+uint32_t _i729;
+for (_i729 = 0; _i729 < _size725; ++_i729)
 {
-  xfer += iprot->readString((*(this->success))[_i659]);
+  xfer += iprot->readString((*(this->success))[_i729]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1452,14 +1452,14 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::read(::apache::thrift::pr
 if (ftype == ::apache::thrift::protocol::T_LIST) {
   {
 this->success.clear();
-uint32_t _size660;
-::apache::thrift::protocol::TType _etype663;
-xfer += iprot->readListBegin(_etype663, _size660);
-this->success.resize(_size660);
-uint32_t _i664;
-for (_i664 = 0; _i664 < _size660; ++_i664)
+uint32_t _size730;
+::apache::thrift::protocol::TType _etype733;
+xfer += iprot->readListBegin(_etype733, _size730);
+this->success.resize(_size730);
+uint32_t _i734;
+for (_i734 = 0; _i734 < _size730; ++_i734)
 {
-  xfer += iprot->readString(this->success[_i664]);
+  xfer += iprot->readString(this->success[_i734]);
 }
 xfer += iprot->readListEnd();
   }
@@ -1498,10 +1498,10 @@ uint32_t 
ThriftHiveMetastore_get_all_databases_result::write(::apache::thrift::p
 xfer += oprot->writeFieldBegin("success", 
::apache::thrift::protocol::T_LIST, 0);
 {
   xfer += oprot->writeListBegin(::apache::thrift::protocol::T_STRING, 
static_cast(this->success.size()));
-  std::vector ::const_iterator _iter665;
-  for (_iter665 = this->success.begin(); _iter665 != this->success.end(); 
++_iter665)
+  std::vector ::const_iterator _iter735;
+  for (_iter735 = this->success.begin(); _iter735 != this->success.end(); 
++_iter735)
   {
-xfer += oprot->writeString((*_iter665));
+xfer += oprot->writeString((*_iter735));
   }
   xfer += oprot->writeListEnd();
 }
@@ 

[44/50] [abbrv] hive git commit: HIVE-11694: Exclude hbase-metastore for hadoop-1 (Daniel Dai reviewed by Thejas Nair)

2015-09-21 Thread daijy
HIVE-11694: Exclude hbase-metastore for hadoop-1 (Daniel Dai reviewed by Thejas 
Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3d170cae
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3d170cae
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3d170cae

Branch: refs/heads/master
Commit: 3d170cae5f41b0a3928117328aeaac3aefbc7cef
Parents: fbbb7cf
Author: Daniel Dai 
Authored: Tue Sep 1 12:43:42 2015 -0700
Committer: Daniel Dai 
Committed: Tue Sep 1 12:43:42 2015 -0700

--
 metastore/pom.xml | 17 +
 1 file changed, 17 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3d170cae/metastore/pom.xml
--
diff --git a/metastore/pom.xml b/metastore/pom.xml
index ccec9f1..255726c 100644
--- a/metastore/pom.xml
+++ b/metastore/pom.xml
@@ -159,6 +159,23 @@
   
 
   hadoop-1
+  
+
+  
+org.apache.maven.plugins
+maven-compiler-plugin
+2.3.2
+
+  
+**/hbase/**
+  
+  
+**/hbase/**
+  
+
+  
+
+  
   
 
   org.apache.hadoop



[46/50] [abbrv] hive git commit: HIVE-11731: Exclude hbase-metastore in itests for hadoop-1

2015-09-21 Thread daijy
HIVE-11731: Exclude hbase-metastore in itests for hadoop-1


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/848b9770
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/848b9770
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/848b9770

Branch: refs/heads/master
Commit: 848b9770acb5cf47dd1f7307c88a1f80490fcf76
Parents: 8b0eded
Author: Daniel Dai 
Authored: Thu Sep 3 21:56:59 2015 -0700
Committer: Daniel Dai 
Committed: Thu Sep 3 21:56:59 2015 -0700

--
 itests/hive-unit/pom.xml| 14 +++
 .../metastore/hbase/HBaseIntegrationTests.java  | 13 +-
 itests/qtest/pom.xml|  7 ++
 itests/util/pom.xml | 20 
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 25 ++--
 5 files changed, 49 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/848b9770/itests/hive-unit/pom.xml
--
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
index 26b5751..5295840 100644
--- a/itests/hive-unit/pom.xml
+++ b/itests/hive-unit/pom.xml
@@ -173,6 +173,20 @@
 
 
   hadoop-1
+  
+
+  
+org.apache.maven.plugins
+maven-compiler-plugin
+2.3.2
+
+  
+**/metastore/hbase/**
+  
+
+  
+
+  
   
 
   org.apache.hadoop

http://git-wip-us.apache.org/repos/asf/hive/blob/848b9770/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
index c369058..02e481a 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
@@ -64,18 +64,7 @@ public class HBaseIntegrationTests {
 utility.startMiniCluster();
 conf = new HiveConf(utility.getConfiguration(), 
HBaseIntegrationTests.class);
 admin = utility.getHBaseAdmin();
-for (String tableName : HBaseReadWrite.tableNames) {
-  List families = HBaseReadWrite.columnFamilies.get(tableName);
-  HTableDescriptor desc = new 
HTableDescriptor(TableName.valueOf(tableName));
-  for (byte[] family : families) {
-HColumnDescriptor columnDesc = new HColumnDescriptor(family);
-if (testingTephra) columnDesc.setMaxVersions(Integer.MAX_VALUE);
-desc.addFamily(columnDesc);
-  }
-  if (testingTephra) 
desc.addCoprocessor(TransactionProcessor.class.getName());
-  admin.createTable(desc);
-}
-admin.close();
+HBaseStoreTestUtil.initHBaseMetastore(admin, null);
   }
 
   protected static void shutdownMiniCluster() throws Exception {

http://git-wip-us.apache.org/repos/asf/hive/blob/848b9770/itests/qtest/pom.xml
--
diff --git a/itests/qtest/pom.xml b/itests/qtest/pom.xml
index 122e3f6..739d06a 100644
--- a/itests/qtest/pom.xml
+++ b/itests/qtest/pom.xml
@@ -80,6 +80,13 @@
 
 
   org.apache.hive
+  hive-it-unit
+  ${project.version}
+  tests
+  test
+
+
+  org.apache.hive
   hive-it-util
   ${project.version}
   test

http://git-wip-us.apache.org/repos/asf/hive/blob/848b9770/itests/util/pom.xml
--
diff --git a/itests/util/pom.xml b/itests/util/pom.xml
index b0818d6..fdab72c 100644
--- a/itests/util/pom.xml
+++ b/itests/util/pom.xml
@@ -97,6 +97,20 @@
   
 
   hadoop-1
+
+
+  
+org.apache.maven.plugins
+maven-compiler-plugin
+2.3.2
+   
+  
+**/metastore/hbase/**
+  
+
+  
+
+
   
 
   org.apache.hadoop
@@ -120,6 +134,12 @@
 
 
   org.apache.hbase
+  hbase-common
+  ${hbase.hadoop1.version}
+  tests
+
+
+  org.apache.hbase
   hbase-server
   ${hbase.hadoop1.version}
 

http://git-wip-us.apache.org/repos/asf/hive/blob/848b9770/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git 

[28/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
--
diff --git 
a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py 
b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
index dd75b01..f89320f 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
@@ -1024,6 +1024,34 @@ class Iface(fb303.FacebookService.Iface):
   def flushCache(self):
 pass
 
+  def get_file_metadata_by_expr(self, req):
+"""
+Parameters:
+ - req
+"""
+pass
+
+  def get_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+pass
+
+  def put_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+pass
+
+  def clear_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+pass
+
 
 class Client(fb303.FacebookService.Client, Iface):
   """
@@ -5616,6 +5644,130 @@ class Client(fb303.FacebookService.Client, Iface):
 iprot.readMessageEnd()
 return
 
+  def get_file_metadata_by_expr(self, req):
+"""
+Parameters:
+ - req
+"""
+self.send_get_file_metadata_by_expr(req)
+return self.recv_get_file_metadata_by_expr()
+
+  def send_get_file_metadata_by_expr(self, req):
+self._oprot.writeMessageBegin('get_file_metadata_by_expr', 
TMessageType.CALL, self._seqid)
+args = get_file_metadata_by_expr_args()
+args.req = req
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_get_file_metadata_by_expr(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = get_file_metadata_by_expr_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.success is not None:
+  return result.success
+raise TApplicationException(TApplicationException.MISSING_RESULT, 
"get_file_metadata_by_expr failed: unknown result");
+
+  def get_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+self.send_get_file_metadata(req)
+return self.recv_get_file_metadata()
+
+  def send_get_file_metadata(self, req):
+self._oprot.writeMessageBegin('get_file_metadata', TMessageType.CALL, 
self._seqid)
+args = get_file_metadata_args()
+args.req = req
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_get_file_metadata(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = get_file_metadata_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.success is not None:
+  return result.success
+raise TApplicationException(TApplicationException.MISSING_RESULT, 
"get_file_metadata failed: unknown result");
+
+  def put_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+self.send_put_file_metadata(req)
+return self.recv_put_file_metadata()
+
+  def send_put_file_metadata(self, req):
+self._oprot.writeMessageBegin('put_file_metadata', TMessageType.CALL, 
self._seqid)
+args = put_file_metadata_args()
+args.req = req
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_put_file_metadata(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = put_file_metadata_result()
+result.read(iprot)
+iprot.readMessageEnd()
+if result.success is not None:
+  return result.success
+raise TApplicationException(TApplicationException.MISSING_RESULT, 
"put_file_metadata failed: unknown result");
+
+  def clear_file_metadata(self, req):
+"""
+Parameters:
+ - req
+"""
+self.send_clear_file_metadata(req)
+return self.recv_clear_file_metadata()
+
+  def send_clear_file_metadata(self, req):
+self._oprot.writeMessageBegin('clear_file_metadata', TMessageType.CALL, 
self._seqid)
+args = clear_file_metadata_args()
+args.req = req
+args.write(self._oprot)
+self._oprot.writeMessageEnd()
+self._oprot.trans.flush()
+
+  def recv_clear_file_metadata(self):
+iprot = self._iprot
+(fname, mtype, rseqid) = iprot.readMessageBegin()
+if mtype == TMessageType.EXCEPTION:
+  x = TApplicationException()
+  x.read(iprot)
+  iprot.readMessageEnd()
+  raise x
+result = 

[26/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
--
diff --git 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
index 5469108..f821459 100644
--- 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
+++ 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelDelegationTokenResp.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class TCancelDelegationTokenResp implements 
org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TCancelDelegationTokenResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
--
diff --git 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
index 83d191e..e63145a 100644
--- 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
+++ 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationReq.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class TCancelOperationReq implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TCancelOperationReq");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
--
diff --git 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
index b8d96df..56c9e76 100644
--- 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
+++ 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCancelOperationResp.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class TCancelOperationResp implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("TCancelOperationResp");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
--
diff --git 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
index ca68866..6ad5446 100644
--- 
a/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
+++ 
b/service/src/gen/thrift/gen-javabean/org/apache/hive/service/cli/thrift/TCloseOperationReq.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class TCloseOperationReq implements 

[20/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
HIVE-11568 : merge master into branch (Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c528294b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c528294b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c528294b

Branch: refs/heads/master
Commit: c528294bc99c8ba05dd0a3c3f39cc06fb27b9473
Parents: 0fa45e4 e8b2c60
Author: Sergey Shelukhin 
Authored: Fri Aug 14 15:58:43 2015 -0700
Committer: Sergey Shelukhin 
Committed: Fri Aug 14 15:58:43 2015 -0700

--
 accumulo-handler/pom.xml| 4 -
 .../apache/hadoop/hive/ant/GenVectorCode.java   |   105 +
 .../src/main/resources/beeline-log4j.properties |24 -
 beeline/src/main/resources/beeline-log4j2.xml   |40 +
 bin/ext/beeline.sh  | 2 +-
 bin/hive| 3 +
 .../hadoop/hive/cli/TestOptionsProcessor.java   | 1 -
 common/pom.xml  |27 +-
 .../apache/hadoop/hive/common/JavaUtils.java|11 +-
 .../org/apache/hadoop/hive/common/LogUtils.java |18 +-
 .../hadoop/hive/common/ValidReadTxnList.java| 2 +-
 .../hadoop/hive/common/type/HiveDecimal.java|   306 -
 .../org/apache/hadoop/hive/conf/HiveConf.java   |27 +-
 common/src/main/resources/hive-log4j.properties |88 -
 common/src/main/resources/hive-log4j2.xml   |   111 +
 .../hadoop/hive/conf/TestHiveLogging.java   | 8 +-
 .../resources/hive-exec-log4j-test.properties   |59 -
 .../test/resources/hive-exec-log4j2-test.xml|86 +
 .../test/resources/hive-log4j-test.properties   |71 -
 common/src/test/resources/hive-log4j2-test.xml  |95 +
 data/conf/hive-log4j-old.properties |82 -
 data/conf/hive-log4j.properties |97 -
 data/conf/hive-log4j2.xml   |   148 +
 data/conf/spark/log4j.properties|24 -
 data/conf/spark/log4j2.xml  |74 +
 docs/xdocs/language_manual/cli.xml  | 2 +-
 .../test/results/positive/hbase_timestamp.q.out | 8 +-
 hcatalog/bin/hcat_server.sh | 2 +-
 hcatalog/bin/templeton.cmd  | 4 +-
 .../mapreduce/DefaultOutputFormatContainer.java | 7 +-
 ...namicPartitionFileRecordWriterContainer.java | 3 +-
 .../mapreduce/FileOutputFormatContainer.java| 3 +-
 .../hive/hcatalog/mapreduce/PartInfo.java   |32 +-
 .../hive/hcatalog/mapreduce/SpecialCases.java   | 8 +-
 .../mapreduce/TestHCatMultiOutputFormat.java| 6 +-
 hcatalog/scripts/hcat_server_start.sh   | 2 +-
 .../content/xdocs/configuration.xml | 2 +-
 .../src/documentation/content/xdocs/install.xml | 2 +-
 .../deployers/config/hive/hive-log4j.properties |88 -
 .../deployers/config/hive/hive-log4j2.xml   |   111 +
 .../templeton/deployers/start_hive_services.sh  | 2 +-
 .../webhcat/svr/src/main/bin/webhcat_server.sh  | 4 +-
 .../src/main/config/webhcat-log4j.properties|45 -
 .../svr/src/main/config/webhcat-log4j2.xml  |75 +
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4 |   164 +-
 .../main/java/org/apache/hive/hplsql/Conn.java  | 6 +
 .../java/org/apache/hive/hplsql/Converter.java  |41 +-
 .../main/java/org/apache/hive/hplsql/Exec.java  |65 +-
 .../java/org/apache/hive/hplsql/Expression.java |73 +-
 .../main/java/org/apache/hive/hplsql/Meta.java  |98 +
 .../main/java/org/apache/hive/hplsql/Query.java |55 +
 .../java/org/apache/hive/hplsql/Select.java |47 +-
 .../java/org/apache/hive/hplsql/Signal.java | 2 +-
 .../main/java/org/apache/hive/hplsql/Stmt.java  |97 +-
 .../main/java/org/apache/hive/hplsql/Var.java   |43 +-
 .../apache/hive/hplsql/functions/Function.java  |49 +-
 .../hive/hplsql/functions/FunctionDatetime.java |14 +-
 .../hive/hplsql/functions/FunctionMisc.java |22 +-
 .../hive/hplsql/functions/FunctionOra.java  |31 +-
 .../hive/hplsql/functions/FunctionString.java   |46 +-
 .../org/apache/hive/hplsql/TestHplsqlLocal.java | 5 +
 .../apache/hive/hplsql/TestHplsqlOffline.java   |76 +
 .../test/queries/db/create_procedure_mssql.sql  |52 +
 .../src/test/queries/db/cursor_attributes.sql   |60 +
 hplsql/src/test/queries/db/map_object.sql   | 9 +
 hplsql/src/test/queries/db/select_into.sql  |17 +
 .../src/test/queries/db/set_current_schema.sql  | 6 +
 hplsql/src/test/queries/db/sys_refcursor.sql|65 +
 hplsql/src/test/queries/db/use.sql  | 2 +
 .../queries/local/exception_divide_by_zero.sql  |11 +
 .../test/queries/offline/create_table_mssql.sql |43 +
 .../test/queries/offline/create_table_ora.sql   | 4 +
 .../results/db/create_procedure_mssql.out.txt   |

[48/50] [abbrv] hive git commit: HIVE-11743: HBase Port conflict for MiniHBaseCluster

2015-09-21 Thread daijy
HIVE-11743: HBase Port conflict for MiniHBaseCluster


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/76828e0a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/76828e0a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/76828e0a

Branch: refs/heads/master
Commit: 76828e0ade2e55d30b0680c7312737a8212a158f
Parents: 757553e
Author: Daniel Dai 
Authored: Sat Sep 5 11:12:21 2015 -0700
Committer: Daniel Dai 
Committed: Sat Sep 5 11:12:21 2015 -0700

--
 .../hadoop/hive/metastore/hbase/HBaseIntegrationTests.java   | 8 +++-
 .../src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java   | 6 +-
 2 files changed, 12 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/76828e0a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
index 02e481a..5b82579 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/HBaseIntegrationTests.java
@@ -19,8 +19,11 @@
 package org.apache.hadoop.hive.metastore.hbase;
 
 import co.cask.tephra.hbase10.coprocessor.TransactionProcessor;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -32,6 +35,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 import org.apache.hadoop.hive.ql.session.SessionState;
+
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.List;
@@ -60,7 +64,9 @@ public class HBaseIntegrationTests {
 if (testingTephra) {
   LOG.info("Testing with Tephra");
 }
-utility = new HBaseTestingUtility();
+Configuration hbaseConf = HBaseConfiguration.create();
+hbaseConf.setInt("hbase.master.info.port", -1);
+utility = new HBaseTestingUtility(hbaseConf);
 utility.startMiniCluster();
 conf = new HiveConf(utility.getConfiguration(), 
HBaseIntegrationTests.class);
 admin = utility.getHBaseAdmin();

http://git-wip-us.apache.org/repos/asf/hive/blob/76828e0a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 24a85cf..16e73c9 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -62,9 +62,11 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
@@ -348,7 +350,9 @@ public class QTestUtil {
   }
 
   private void startMiniHBaseCluster() throws Exception {
-utility = new HBaseTestingUtility();
+Configuration hbaseConf = HBaseConfiguration.create();
+hbaseConf.setInt("hbase.master.info.port", -1);
+utility = new HBaseTestingUtility(hbaseConf);
 utility.startMiniCluster();
 conf = new HiveConf(utility.getConfiguration(), Driver.class);
 HBaseAdmin admin = utility.getHBaseAdmin();



[50/50] [abbrv] hive git commit: HIVE-11711: Merge hbase-metastore branch to trunk

2015-09-21 Thread daijy
HIVE-11711: Merge hbase-metastore branch to trunk


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/52383033
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/52383033
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/52383033

Branch: refs/heads/master
Commit: 52383033822b87b02853eaaf15db1b2904617615
Parents: 514ab79 4c17ecf
Author: Daniel Dai 
Authored: Mon Sep 21 22:02:22 2015 -0700
Committer: Daniel Dai 
Committed: Mon Sep 21 22:02:22 2015 -0700

--
 .../apache/hadoop/hive/ant/QTestGenTask.java|11 +
 bin/ext/hbaseimport.cmd |35 +
 bin/ext/hbaseimport.sh  |27 +
 bin/ext/hbaseschematool.sh  |27 +
 .../apache/hadoop/hive/common/ObjectPair.java   | 5 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |98 +-
 .../apache/hive/common/util/BloomFilter.java|20 +-
 data/conf/tez/hive-site.xml |10 +
 itests/hive-unit/pom.xml|35 +
 .../hadoop/hive/metastore/TestAdminUser.java| 4 +-
 .../hive/metastore/TestHiveMetaStore.java   | 3 +
 .../metastore/hbase/HBaseIntegrationTests.java  |   117 +
 .../TestHBaseAggrStatsCacheIntegration.java |   691 +
 .../hive/metastore/hbase/TestHBaseImport.java   |   650 +
 .../metastore/hbase/TestHBaseMetastoreSql.java  |   223 +
 .../hbase/TestHBaseStoreIntegration.java|  1794 +
 .../hbase/TestStorageDescriptorSharing.java |   191 +
 itests/qtest/pom.xml|10 +-
 itests/util/pom.xml |32 +
 .../metastore/hbase/HBaseStoreTestUtil.java |45 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java|41 +-
 metastore/if/hive_metastore.thrift  |54 +
 metastore/pom.xml   |82 +
 .../metastore/hbase/HbaseMetastoreProto.java| 34901 +
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  |  6919 ++--
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h|   664 +
 .../ThriftHiveMetastore_server.skeleton.cpp |25 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp |  1294 +-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |   371 +
 .../hive/metastore/api/AbortTxnRequest.java | 2 +-
 .../metastore/api/AddDynamicPartitions.java | 2 +-
 .../metastore/api/AddPartitionsRequest.java | 2 +-
 .../hive/metastore/api/AddPartitionsResult.java | 2 +-
 .../hadoop/hive/metastore/api/AggrStats.java| 2 +-
 .../metastore/api/AlreadyExistsException.java   | 2 +-
 .../metastore/api/BinaryColumnStatsData.java| 2 +-
 .../metastore/api/BooleanColumnStatsData.java   | 2 +-
 .../hive/metastore/api/CheckLockRequest.java| 2 +-
 .../metastore/api/ClearFileMetadataRequest.java |   438 +
 .../metastore/api/ClearFileMetadataResult.java  |   283 +
 .../hive/metastore/api/ColumnStatistics.java| 2 +-
 .../metastore/api/ColumnStatisticsDesc.java | 2 +-
 .../hive/metastore/api/ColumnStatisticsObj.java | 2 +-
 .../hive/metastore/api/CommitTxnRequest.java| 2 +-
 .../hive/metastore/api/CompactionRequest.java   | 2 +-
 .../api/ConfigValSecurityException.java | 2 +-
 .../api/CurrentNotificationEventId.java | 2 +-
 .../hadoop/hive/metastore/api/Database.java | 2 +-
 .../apache/hadoop/hive/metastore/api/Date.java  | 2 +-
 .../hive/metastore/api/DateColumnStatsData.java | 2 +-
 .../hadoop/hive/metastore/api/Decimal.java  | 2 +-
 .../metastore/api/DecimalColumnStatsData.java   | 2 +-
 .../metastore/api/DoubleColumnStatsData.java| 2 +-
 .../hive/metastore/api/DropPartitionsExpr.java  | 2 +-
 .../metastore/api/DropPartitionsRequest.java| 2 +-
 .../metastore/api/DropPartitionsResult.java | 2 +-
 .../hive/metastore/api/EnvironmentContext.java  | 2 +-
 .../hadoop/hive/metastore/api/FieldSchema.java  | 2 +-
 .../hive/metastore/api/FireEventRequest.java| 2 +-
 .../hive/metastore/api/FireEventResponse.java   | 2 +-
 .../hadoop/hive/metastore/api/Function.java | 2 +-
 .../metastore/api/GetAllFunctionsResponse.java  |38 +-
 .../api/GetFileMetadataByExprRequest.java   |   548 +
 .../api/GetFileMetadataByExprResult.java|   703 +
 .../metastore/api/GetFileMetadataRequest.java   |   438 +
 .../metastore/api/GetFileMetadataResult.java|   540 +
 .../metastore/api/GetOpenTxnsInfoResponse.java  | 2 +-
 .../hive/metastore/api/GetOpenTxnsResponse.java | 2 +-
 .../api/GetPrincipalsInRoleRequest.java | 2 +-
 .../api/GetPrincipalsInRoleResponse.java| 2 +-
 .../api/GetRoleGrantsForPrincipalRequest.java   | 2 +-
 .../api/GetRoleGrantsForPrincipalResponse.java  | 2 +-
 

[33/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
new file mode 100644
index 000..874ea82
--- /dev/null
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PutFileMetadataRequest.java
@@ -0,0 +1,588 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
+public class PutFileMetadataRequest implements 
org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("PutFileMetadataRequest");
+
+  private static final org.apache.thrift.protocol.TField FILE_IDS_FIELD_DESC = 
new org.apache.thrift.protocol.TField("fileIds", 
org.apache.thrift.protocol.TType.LIST, (short)1);
+  private static final org.apache.thrift.protocol.TField METADATA_FIELD_DESC = 
new org.apache.thrift.protocol.TField("metadata", 
org.apache.thrift.protocol.TType.LIST, (short)2);
+
+  private static final Map schemes = 
new HashMap();
+  static {
+schemes.put(StandardScheme.class, new 
PutFileMetadataRequestStandardSchemeFactory());
+schemes.put(TupleScheme.class, new 
PutFileMetadataRequestTupleSchemeFactory());
+  }
+
+  private List fileIds; // required
+  private List metadata; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+FILE_IDS((short)1, "fileIds"),
+METADATA((short)2, "metadata");
+
+private static final Map byName = new HashMap();
+
+static {
+  for (_Fields field : EnumSet.allOf(_Fields.class)) {
+byName.put(field.getFieldName(), field);
+  }
+}
+
+/**
+ * Find the _Fields constant that matches fieldId, or null if its not 
found.
+ */
+public static _Fields findByThriftId(int fieldId) {
+  switch(fieldId) {
+case 1: // FILE_IDS
+  return FILE_IDS;
+case 2: // METADATA
+  return METADATA;
+default:
+  return null;
+  }
+}
+
+/**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+public static _Fields findByThriftIdOrThrow(int fieldId) {
+  _Fields fields = findByThriftId(fieldId);
+  if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+  return fields;
+}
+
+/**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+public static _Fields findByName(String name) {
+  return byName.get(name);
+}
+
+private final short _thriftId;
+private final String _fieldName;
+
+_Fields(short thriftId, String fieldName) {
+  _thriftId = thriftId;
+  _fieldName = fieldName;
+}
+
+public short getThriftFieldId() {
+  return _thriftId;
+}
+
+public String getFieldName() {
+  return _fieldName;
+}
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+

[17/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index e58c146,cdbae95..72b2cc3
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@@ -31,6 -33,8 +33,8 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
  public class ThriftHiveMetastore {
  
/**
@@@ -290,254 -294,254 +296,256 @@@
  
public interface AsyncIface extends com.facebook.fb303.FacebookService 
.AsyncIface {
  
- public void getMetaConf(String key, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void getMetaConf(String key, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+ 
+ public void setMetaConf(String key, String value, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void setMetaConf(String key, String value, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void create_database(Database database, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void create_database(Database database, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_database(String name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void get_database(String name, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void drop_database(String name, boolean deleteData, boolean 
cascade, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void drop_database(String name, boolean deleteData, boolean 
cascade, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_databases(String pattern, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void get_databases(String pattern, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_all_databases(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
  
- public void 
get_all_databases(org.apache.thrift.async.AsyncMethodCallback
 resultHandler) throws org.apache.thrift.TException;
+ public void alter_database(String dbname, Database db, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void alter_database(String dbname, Database db, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_type(String name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void get_type(String name, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void create_type(Type type, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void create_type(Type type, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void drop_type(String type, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void drop_type(String type, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_type_all(String name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void get_type_all(String name, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
+ public void get_fields(String db_name, String table_name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
  
- public void get_fields(String db_name, String table_name, 
org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws 

[32/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index 72b2cc3..9d72cd0 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class ThriftHiveMetastore {
 
   /**
@@ -292,6 +292,14 @@ public class ThriftHiveMetastore {
 
 public void flushCache() throws org.apache.thrift.TException;
 
+public GetFileMetadataByExprResult 
get_file_metadata_by_expr(GetFileMetadataByExprRequest req) throws 
org.apache.thrift.TException;
+
+public GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req) 
throws org.apache.thrift.TException;
+
+public PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req) 
throws org.apache.thrift.TException;
+
+public ClearFileMetadataResult 
clear_file_metadata(ClearFileMetadataRequest req) throws 
org.apache.thrift.TException;
+
   }
 
   public interface AsyncIface extends com.facebook.fb303.FacebookService 
.AsyncIface {
@@ -546,6 +554,14 @@ public class ThriftHiveMetastore {
 
 public void flushCache(org.apache.thrift.async.AsyncMethodCallback 
resultHandler) throws org.apache.thrift.TException;
 
+public void get_file_metadata_by_expr(GetFileMetadataByExprRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void get_file_metadata(GetFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void put_file_metadata(PutFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+public void clear_file_metadata(ClearFileMetadataRequest req, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
   }
 
   public static class Client extends com.facebook.fb303.FacebookService.Client 
implements Iface {
@@ -4242,6 +4258,98 @@ public class ThriftHiveMetastore {
   return;
 }
 
+public GetFileMetadataByExprResult 
get_file_metadata_by_expr(GetFileMetadataByExprRequest req) throws 
org.apache.thrift.TException
+{
+  send_get_file_metadata_by_expr(req);
+  return recv_get_file_metadata_by_expr();
+}
+
+public void send_get_file_metadata_by_expr(GetFileMetadataByExprRequest 
req) throws org.apache.thrift.TException
+{
+  get_file_metadata_by_expr_args args = new 
get_file_metadata_by_expr_args();
+  args.setReq(req);
+  sendBase("get_file_metadata_by_expr", args);
+}
+
+public GetFileMetadataByExprResult recv_get_file_metadata_by_expr() throws 
org.apache.thrift.TException
+{
+  get_file_metadata_by_expr_result result = new 
get_file_metadata_by_expr_result();
+  receiveBase(result, "get_file_metadata_by_expr");
+  if (result.isSetSuccess()) {
+return result.success;
+  }
+  throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_file_metadata_by_expr failed: unknown result");
+}
+
+public GetFileMetadataResult get_file_metadata(GetFileMetadataRequest req) 
throws org.apache.thrift.TException
+{
+  send_get_file_metadata(req);
+  return recv_get_file_metadata();
+}
+
+public void send_get_file_metadata(GetFileMetadataRequest req) throws 
org.apache.thrift.TException
+{
+  get_file_metadata_args args = new get_file_metadata_args();
+  args.setReq(req);
+  sendBase("get_file_metadata", args);
+}
+
+public GetFileMetadataResult recv_get_file_metadata() throws 
org.apache.thrift.TException
+{
+  get_file_metadata_result result = new get_file_metadata_result();
+  receiveBase(result, "get_file_metadata");
+  if (result.isSetSuccess()) {
+return result.success;
+  }
+  throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_file_metadata failed: unknown result");
+}
+
+public PutFileMetadataResult put_file_metadata(PutFileMetadataRequest req) 
throws org.apache.thrift.TException
+{
+  send_put_file_metadata(req);
+  return 

[12/50] [abbrv] hive git commit: HIVE-11382 Invalidate aggregate column stats on alter partition (gates)

2015-09-21 Thread daijy
HIVE-11382 Invalidate aggregate column stats on alter partition (gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7e7f461b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7e7f461b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7e7f461b

Branch: refs/heads/master
Commit: 7e7f461b0ba86e40224564e0ad1e320c4f6d62b3
Parents: 9d3d4eb
Author: Alan Gates 
Authored: Thu Jul 30 10:12:35 2015 -0700
Committer: Alan Gates 
Committed: Thu Jul 30 10:12:35 2015 -0700

--
 .../TestHBaseAggrStatsCacheIntegration.java | 192 +++
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   7 +
 2 files changed, 199 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7e7f461b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
index 7e6a2ef..ad76b2e 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
@@ -496,4 +496,196 @@ public class TestHBaseAggrStatsCacheIntegration extends 
HBaseIntegrationTests {
   store.backdoor().getStatsCache().wakeInvalidator();
 }
   }
+
+  @Test
+  public void alterInvalidation() throws Exception {
+try {
+  String dbName = "default";
+  String tableName = "ai";
+  List partVals1 = Arrays.asList("today");
+  List partVals2 = Arrays.asList("yesterday");
+  List partVals3 = Arrays.asList("tomorrow");
+  long now = System.currentTimeMillis();
+
+  List cols = new ArrayList<>();
+  cols.add(new FieldSchema("col1", "boolean", "nocomment"));
+  SerDeInfo serde = new SerDeInfo("serde", "seriallib", null);
+  StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", 
"output", false, 0,
+  serde, null, null, Collections.emptyMap());
+  List partCols = new ArrayList<>();
+  partCols.add(new FieldSchema("ds", "string", ""));
+  Table table = new Table(tableName, dbName, "me", (int) now, (int) now, 
0, sd, partCols,
+  Collections.emptyMap(), null, null, null);
+  store.createTable(table);
+
+  Partition[] partitions = new Partition[3];
+  int partnum = 0;
+  for (List partVals : Arrays.asList(partVals1, partVals2, 
partVals3)) {
+StorageDescriptor psd = new StorageDescriptor(sd);
+psd.setLocation("file:/tmp/default/invalidation/ds=" + 
partVals.get(0));
+Partition part = new Partition(partVals, dbName, tableName, (int) now, 
(int) now, psd,
+Collections.emptyMap());
+partitions[partnum++] = part;
+store.addPartition(part);
+
+ColumnStatistics cs = new ColumnStatistics();
+ColumnStatisticsDesc desc = new ColumnStatisticsDesc(false, dbName, 
tableName);
+desc.setLastAnalyzed(now);
+desc.setPartName("ds=" + partVals.get(0));
+cs.setStatsDesc(desc);
+ColumnStatisticsObj obj = new ColumnStatisticsObj();
+obj.setColName("col1");
+obj.setColType("boolean");
+ColumnStatisticsData data = new ColumnStatisticsData();
+BooleanColumnStatsData bcsd = new BooleanColumnStatsData();
+bcsd.setNumFalses(10);
+bcsd.setNumTrues(20);
+bcsd.setNumNulls(30);
+data.setBooleanStats(bcsd);
+obj.setStatsData(data);
+cs.addToStatsObj(obj);
+
+store.updatePartitionColumnStatistics(cs, partVals);
+  }
+
+  AggrStats aggrStats = store.get_aggr_stats_for(dbName, tableName,
+  Arrays.asList("ds=today", "ds=tomorrow"), Arrays.asList("col1"));
+  aggrStats = store.get_aggr_stats_for(dbName, tableName,
+  Arrays.asList("ds=today", "ds=yesterday"), Arrays.asList("col1"));
+
+  // Check that we had to build it from the stats
+  Assert.assertEquals(0, 
store.backdoor().getStatsCache().hbaseHits.getCnt());
+  Assert.assertEquals(2, 
store.backdoor().getStatsCache().totalGets.getCnt());
+  Assert.assertEquals(2, store.backdoor().getStatsCache().misses.getCnt());
+
+  // wake the invalidator and check again to make sure it isn't too 
aggressive about
+  // removing our stuff.
+  store.backdoor().getStatsCache().wakeInvalidator();
+
+  Partition newPart = new Partition(partitions[2]);
+  

[03/50] [abbrv] hive git commit: HIVE-11294 Use HBase to cache aggregated stats (gates)

2015-09-21 Thread daijy
HIVE-11294 Use HBase to cache aggregated stats (gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c53c6f45
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c53c6f45
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c53c6f45

Branch: refs/heads/master
Commit: c53c6f45988db869d56abe3b1d831ff775f4fa73
Parents: 1a1c0d8
Author: Alan Gates 
Authored: Wed Jul 22 11:17:01 2015 -0700
Committer: Alan Gates 
Committed: Wed Jul 22 11:17:01 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   51 +-
 .../apache/hive/common/util/BloomFilter.java|   20 +-
 .../TestHBaseAggrStatsCacheIntegration.java |  499 +++
 .../metastore/hbase/HbaseMetastoreProto.java| 4189 +-
 .../hbase/AggrStatsInvalidatorFilter.java   |  121 +
 .../hadoop/hive/metastore/hbase/Counter.java|6 +
 .../hive/metastore/hbase/HBaseReadWrite.java|  316 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   47 +-
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |   81 +-
 .../hadoop/hive/metastore/hbase/StatsCache.java |  326 ++
 .../stats/ColumnStatsAggregatorFactory.java |   51 +
 .../metastore/hbase/hbase_metastore_proto.proto |   30 +
 .../hbase/TestHBaseAggregateStatsCache.java |  316 ++
 .../hive/metastore/hbase/TestHBaseStore.java|2 +-
 14 files changed, 5717 insertions(+), 338 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c53c6f45/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 5eb11c2..c42b030 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -18,25 +18,7 @@
 
 package org.apache.hadoop.hive.conf;
 
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.PrintStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Properties;
-import java.util.concurrent.TimeUnit;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import javax.security.auth.login.LoginException;
-
+import com.google.common.base.Joiner;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -54,7 +36,23 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.apache.hive.common.HiveCompat;
 
-import com.google.common.base.Joiner;
+import javax.security.auth.login.LoginException;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.PrintStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+import java.util.concurrent.TimeUnit;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 /**
  * Hive Configuration.
@@ -417,6 +415,19 @@ public class HiveConf extends Configuration {
 METASTORE_HBASE_CONNECTION_CLASS("hive.metastore.hbase.connection.class",
 "org.apache.hadoop.hive.metastore.hbase.VanillaHBaseConnection",
 "Class used to connection to HBase"),
+
METASTORE_HBASE_AGGR_STATS_CACHE_ENTRIES("hive.metastore.hbase.aggr.stats.cache.entries",
+1, "How many in stats objects to cache in memory"),
+
METASTORE_HBASE_AGGR_STATS_MEMORY_TTL("hive.metastore.hbase.aggr.stats.memory.ttl",
 "60s",
+new TimeValidator(TimeUnit.SECONDS),
+"Number of seconds stats objects live in memory after they are read 
from HBase."),
+METASTORE_HBASE_AGGR_STATS_INVALIDATOR_FREQUENCY(
+"hive.metastore.hbase.aggr.stats.invalidator.frequency", "5s",
+new TimeValidator(TimeUnit.SECONDS),
+"How often the stats cache scans its HBase entries and looks for 
expired entries"),
+
METASTORE_HBASE_AGGR_STATS_HBASE_TTL("hive.metastore.hbase.aggr.stats.hbase.ttl",
 "604800s",
+new TimeValidator(TimeUnit.SECONDS),
+"Number of seconds stats entries live in HBase cache after they are 
created.  They may be" +
+" invalided by updates or partition drops before this.  Default is 
one week."),
 
 METASTORETHRIFTCONNECTIONRETRIES("hive.metastore.connect.retries", 3,
 "Number of retries while opening a connection to 

[18/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
index 305e979,1292a64..e8cb821
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class NoSuchObjectException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class NoSuchObjectException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("NoSuchObjectException");
  
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("message", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
index 92dbb7f,d1c430d..9997b93
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchTxnException.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class NoSuchTxnException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class NoSuchTxnException extends TException implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("NoSuchTxnException");
  
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = 
new org.apache.thrift.protocol.TField("message", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
--
diff --cc 
metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
index f196c1c,bcf4f51..6f594c5
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class NotificationEvent implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class NotificationEvent implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("NotificationEvent");
  
private static final org.apache.thrift.protocol.TField EVENT_ID_FIELD_DESC 
= new org.apache.thrift.protocol.TField("eventId", 
org.apache.thrift.protocol.TType.I64, (short)1);


[29/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-php/metastore/Types.php
--
diff --git a/metastore/src/gen/thrift/gen-php/metastore/Types.php 
b/metastore/src/gen/thrift/gen-php/metastore/Types.php
index 949a6e9..0baeef3 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/Types.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/Types.php
@@ -13861,6 +13861,995 @@ class FireEventResponse {
 
 }
 
+class MetadataPpdResult {
+  static $_TSPEC;
+
+  /**
+   * @var string
+   */
+  public $metadata = null;
+  /**
+   * @var string
+   */
+  public $includeBitset = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 => array(
+  'var' => 'metadata',
+  'type' => TType::STRING,
+  ),
+2 => array(
+  'var' => 'includeBitset',
+  'type' => TType::STRING,
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['metadata'])) {
+$this->metadata = $vals['metadata'];
+  }
+  if (isset($vals['includeBitset'])) {
+$this->includeBitset = $vals['includeBitset'];
+  }
+}
+  }
+
+  public function getName() {
+return 'MetadataPpdResult';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input->readStructBegin($fname);
+while (true)
+{
+  $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  }
+  switch ($fid)
+  {
+case 1:
+  if ($ftype == TType::STRING) {
+$xfer += $input->readString($this->metadata);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+case 2:
+  if ($ftype == TType::STRING) {
+$xfer += $input->readString($this->includeBitset);
+  } else {
+$xfer += $input->skip($ftype);
+  }
+  break;
+default:
+  $xfer += $input->skip($ftype);
+  break;
+  }
+  $xfer += $input->readFieldEnd();
+}
+$xfer += $input->readStructEnd();
+return $xfer;
+  }
+
+  public function write($output) {
+$xfer = 0;
+$xfer += $output->writeStructBegin('MetadataPpdResult');
+if ($this->metadata !== null) {
+  $xfer += $output->writeFieldBegin('metadata', TType::STRING, 1);
+  $xfer += $output->writeString($this->metadata);
+  $xfer += $output->writeFieldEnd();
+}
+if ($this->includeBitset !== null) {
+  $xfer += $output->writeFieldBegin('includeBitset', TType::STRING, 2);
+  $xfer += $output->writeString($this->includeBitset);
+  $xfer += $output->writeFieldEnd();
+}
+$xfer += $output->writeFieldStop();
+$xfer += $output->writeStructEnd();
+return $xfer;
+  }
+
+}
+
+class GetFileMetadataByExprResult {
+  static $_TSPEC;
+
+  /**
+   * @var array
+   */
+  public $metadata = null;
+  /**
+   * @var bool
+   */
+  public $isSupported = null;
+  /**
+   * @var int[]
+   */
+  public $unknownFileIds = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 => array(
+  'var' => 'metadata',
+  'type' => TType::MAP,
+  'ktype' => TType::I64,
+  'vtype' => TType::STRUCT,
+  'key' => array(
+'type' => TType::I64,
+  ),
+  'val' => array(
+'type' => TType::STRUCT,
+'class' => '\metastore\MetadataPpdResult',
+),
+  ),
+2 => array(
+  'var' => 'isSupported',
+  'type' => TType::BOOL,
+  ),
+3 => array(
+  'var' => 'unknownFileIds',
+  'type' => TType::LST,
+  'etype' => TType::I64,
+  'elem' => array(
+'type' => TType::I64,
+),
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['metadata'])) {
+$this->metadata = $vals['metadata'];
+  }
+  if (isset($vals['isSupported'])) {
+$this->isSupported = $vals['isSupported'];
+  }
+  if (isset($vals['unknownFileIds'])) {
+$this->unknownFileIds = $vals['unknownFileIds'];
+  }
+}
+  }
+
+  public function getName() {
+return 'GetFileMetadataByExprResult';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input->readStructBegin($fname);
+while (true)
+{
+  $xfer += $input->readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  }
+  switch ($fid)
+  {
+case 1:
+  if ($ftype == TType::MAP) {
+$this->metadata = array();
+$_size465 = 0;
+$_ktype466 = 0;
+$_vtype467 = 0;
+ 

[14/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
--
diff --cc 
ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
index 35aa6cb,e621cfa..0a13175
--- 
a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
+++ 
b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class Graph implements org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class Graph implements org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("Graph");
  
private static final org.apache.thrift.protocol.TField NODE_TYPE_FIELD_DESC 
= new org.apache.thrift.protocol.TField("nodeType", 
org.apache.thrift.protocol.TType.I32, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
--
diff --cc 
ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
index f1c9e2d,1b18aab..991974c
--- 
a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
+++ 
b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class Operator implements org.apache.thrift.TBase, java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class Operator implements org.apache.thrift.TBase, java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("Operator");
  
private static final org.apache.thrift.protocol.TField 
OPERATOR_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("operatorId", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
--
diff --cc 
ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
index e0d77e8,5c5e0f8..f98a7e1
--- 
a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
+++ 
b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class Query implements org.apache.thrift.TBase, 
java.io.Serializable, Cloneable {
+ @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
 -@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-3")
++@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+ public class Query implements org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("Query");
  
private static final org.apache.thrift.protocol.TField QUERY_ID_FIELD_DESC 
= new org.apache.thrift.protocol.TField("queryId", 
org.apache.thrift.protocol.TType.STRING, (short)1);

http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
--
diff --cc 
ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
index e8566a5,d340d58..0994fda
--- 
a/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
+++ 
b/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java
@@@ -31,7 -33,9 +33,9 @@@ import javax.annotation.Generated
  import org.slf4j.Logger;
  import org.slf4j.LoggerFactory;
  
- public class QueryPlan implements org.apache.thrift.TBase, 

[06/50] [abbrv] hive git commit: HIVE-11300 HBase metastore: Support token and master key methods (gates)

2015-09-21 Thread daijy
HIVE-11300 HBase metastore: Support token and master key methods (gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a310524c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a310524c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a310524c

Branch: refs/heads/master
Commit: a310524c48f54cb3071395e11fd97538816261d8
Parents: c53c6f4
Author: Alan Gates 
Authored: Wed Jul 22 11:57:32 2015 -0700
Committer: Alan Gates 
Committed: Wed Jul 22 11:57:32 2015 -0700

--
 .../hbase/TestHBaseStoreIntegration.java|   44 +
 .../metastore/hbase/HbaseMetastoreProto.java| 3754 +++---
 .../hive/metastore/hbase/HBaseReadWrite.java|  221 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |  101 +-
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |  111 +-
 .../metastore/hbase/hbase_metastore_proto.proto |   13 +-
 .../hive/metastore/hbase/TestHBaseStore.java|   51 -
 7 files changed, 2769 insertions(+), 1526 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a310524c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
index 4ff01a4..8b0b431 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java
@@ -1747,4 +1747,48 @@ public class TestHBaseStoreIntegration extends 
HBaseIntegrationTests {
   
statsList.get(i).getStatsObj().get(1).getStatsData().getStringStats().getNumDVs());
 }
   }
+
+  @Test
+  public void delegationToken() throws Exception {
+store.addToken("abc", "def");
+store.addToken("ghi", "jkl");
+
+Assert.assertEquals("def", store.getToken("abc"));
+Assert.assertEquals("jkl", store.getToken("ghi"));
+Assert.assertNull(store.getToken("wabawaba"));
+String[] allToks = store.getAllTokenIdentifiers().toArray(new String[2]);
+Arrays.sort(allToks);
+Assert.assertArrayEquals(new String[]{"abc", "ghi"}, allToks);
+
+store.removeToken("abc");
+store.removeToken("wabawaba");
+
+Assert.assertNull(store.getToken("abc"));
+Assert.assertEquals("jkl", store.getToken("ghi"));
+allToks = store.getAllTokenIdentifiers().toArray(new String[1]);
+Assert.assertArrayEquals(new String[]{"ghi"}, allToks);
+  }
+
+  @Test
+  public void masterKey() throws Exception {
+Assert.assertEquals(0, store.addMasterKey("k1"));
+Assert.assertEquals(1, store.addMasterKey("k2"));
+
+String[] keys = store.getMasterKeys();
+Arrays.sort(keys);
+Assert.assertArrayEquals(new String[]{"k1", "k2"}, keys);
+
+store.updateMasterKey(0, "k3");
+keys = store.getMasterKeys();
+Arrays.sort(keys);
+Assert.assertArrayEquals(new String[]{"k2", "k3"}, keys);
+
+store.removeMasterKey(1);
+keys = store.getMasterKeys();
+Assert.assertArrayEquals(new String[]{"k3"}, keys);
+
+thrown.expect(NoSuchObjectException.class);
+store.updateMasterKey(72, "whatever");
+  }
+
 }



[45/50] [abbrv] hive git commit: HIVE-11692: Fix UT regressions on hbase-metastore branch (Daniel Dai reviewed by Thejas Nair)

2015-09-21 Thread daijy
HIVE-11692: Fix UT regressions on hbase-metastore branch (Daniel Dai reviewed 
by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8b0ededf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8b0ededf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8b0ededf

Branch: refs/heads/master
Commit: 8b0ededf574ff33c7fe4a952aad42ece1467237d
Parents: 3d170ca
Author: Daniel Dai 
Authored: Tue Sep 1 13:18:35 2015 -0700
Committer: Daniel Dai 
Committed: Tue Sep 1 13:18:35 2015 -0700

--
 data/conf/hbase/hive-site.xml   | 263 ---
 .../hive/metastore/TestHiveMetaStore.java   |   3 +
 .../hive/metastore/hbase/TestHBaseImport.java   |  18 +-
 itests/qtest/pom.xml|  19 --
 .../test/resources/testconfiguration.properties |  46 
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   4 +
 metastore/pom.xml   |   6 +
 .../hadoop/hive/metastore/TestObjectStore.java  |  43 ++-
 .../dynpart_sort_opt_vectorization.q.out|  12 +-
 .../dynpart_sort_optimization.q.out |  12 +-
 10 files changed, 76 insertions(+), 350 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8b0ededf/data/conf/hbase/hive-site.xml
--
diff --git a/data/conf/hbase/hive-site.xml b/data/conf/hbase/hive-site.xml
deleted file mode 100644
index 2cde40f..000
--- a/data/conf/hbase/hive-site.xml
+++ /dev/null
@@ -1,263 +0,0 @@
-
-
-
-
-
-
-
-  hive.in.test
-  true
-  Internal marker for test. Used for masking env-dependent 
values
-
-
-
-
-
-
-
-
-
-
-  hadoop.tmp.dir
-  ${test.tmp.dir}/hadoop-tmp
-  A base for other temporary directories.
-
-
-
-
-
-  hive.exec.scratchdir
-  ${test.tmp.dir}/scratchdir
-  Scratch space for Hive jobs
-
-
-
-  hive.exec.local.scratchdir
-  ${test.tmp.dir}/localscratchdir/
-  Local scratch space for Hive jobs
-
-
-
-  javax.jdo.option.ConnectionURL
-  
jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true
-
-
-
-  hive.stats.dbconnectionstring
-  
jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true
-
-
-
-
-  javax.jdo.option.ConnectionDriverName
-  org.apache.derby.jdbc.EmbeddedDriver
-
-
-
-  javax.jdo.option.ConnectionUserName
-  APP
-
-
-
-  javax.jdo.option.ConnectionPassword
-  mine
-
-
-
-  
-  hive.metastore.warehouse.dir
-  ${test.warehouse.dir}
-  
-
-
-
-  hive.metastore.metadb.dir
-  file://${test.tmp.dir}/metadb/
-  
-  Required by metastore server or if the uris argument below is not supplied
-  
-
-
-
-  test.log.dir
-  ${test.tmp.dir}/log/
-  
-
-
-
-  test.data.files
-  ${hive.root}/data/files
-  
-
-
-
-  test.data.scripts
-  ${hive.root}/data/scripts
-  
-
-
-
-  hive.jar.path
-  
${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar
-  
-
-
-
-  hive.querylog.location
-  ${test.tmp.dir}/tmp
-  Location of the structured hive logs
-
-
-
-  hive.exec.pre.hooks
-  org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, 
org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables
-  Pre Execute Hook for Tests
-
-
-
-  hive.exec.post.hooks
-  org.apache.hadoop.hive.ql.hooks.PostExecutePrinter
-  Post Execute Hook for Tests
-
-
-
-  hive.support.concurrency
-  false
-  Whether hive supports concurrency or not. A zookeeper instance 
must be up and running for the default hive lock manager to support read-write 
locks.
-
-
-
-  hive.unlock.numretries
-  2
-  The number of times you want to retry to do one 
unlock
-
-
-
-  hive.lock.sleep.between.retries
-  2
-  The sleep time (in seconds) between various 
retries
-
-
-
-
-  fs.pfile.impl
-  org.apache.hadoop.fs.ProxyLocalFileSystem
-  A proxy for local file system used for cross file system 
testing
-
-
-
-  hive.exec.mode.local.auto
-  false
-  
-Let hive determine whether to run in local mode automatically
-Disabling this for tests so that minimr is not affected
-  
-
-
-
-  hive.auto.convert.join
-  false
-  Whether Hive enable the optimization about converting common 
join into mapjoin based on the input file size
-
-
-
-  hive.ignore.mapjoin.hint
-  false
-  Whether Hive ignores the mapjoin hint
-
-
-
-  hive.input.format
-  org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
-  The default input format, if it is not specified, the system 
assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, 
whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always 
overwrite it - if there is a bug in CombineHiveInputFormat, it can always be 
manually set to HiveInputFormat. 
-
-
-
-  hive.default.rcfile.serde
-  org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
-  The default SerDe hive will use for 

[34/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
index c538b72..8355cee 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleRequest.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class GrantRevokeRoleRequest implements 
org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("GrantRevokeRoleRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
index dd7ee80..f360916 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GrantRevokeRoleResponse.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class GrantRevokeRoleResponse implements 
org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("GrantRevokeRoleResponse");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
index f0e2a60..44c7958 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatRequest.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
 public class HeartbeatRequest implements 
org.apache.thrift.TBase, 
java.io.Serializable, Cloneable, Comparable {
   private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("HeartbeatRequest");
 

http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
index de4fe40..bae4cda 100644
--- 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HeartbeatTxnRangeRequest.java
@@ -34,7 +34,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
-@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-14")
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")

[21/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin) ADDENDUM MERGE

2015-09-21 Thread daijy
HIVE-11568 : merge master into branch (Sergey Shelukhin) ADDENDUM MERGE


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2fe60861
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2fe60861
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2fe60861

Branch: refs/heads/master
Commit: 2fe60861db72a4128448245f8031d1839e5e3f8e
Parents: c528294 3071ce9
Author: Sergey Shelukhin 
Authored: Fri Aug 14 16:13:32 2015 -0700
Committer: Sergey Shelukhin 
Committed: Fri Aug 14 16:13:32 2015 -0700

--
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java   | 11 +++
 .../clientnegative/alter_table_wrong_location.q |  4 
 .../clientnegative/alter_table_wrong_location.q.out |  9 +
 .../apache/hive/spark/client/SparkClientImpl.java   |  4 
 testutils/ptest2/pom.xml| 16 +---
 testutils/ptest2/src/main/resources/log4j2.xml  |  1 +
 6 files changed, 30 insertions(+), 15 deletions(-)
--




[08/50] [abbrv] hive git commit: HIVE-11343 Merge branch 'master' into hbase-metastore

2015-09-21 Thread daijy
HIVE-11343 Merge branch 'master' into hbase-metastore

Conflicts:
metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
pom.xml


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/61db7b80
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/61db7b80
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/61db7b80

Branch: refs/heads/master
Commit: 61db7b80cbefa5761afd904dfef2eb25e759d6f4
Parents: a310524 2240dbd
Author: Alan Gates 
Authored: Wed Jul 22 13:45:28 2015 -0700
Committer: Alan Gates 
Committed: Wed Jul 22 13:45:28 2015 -0700

--
 .gitignore  |1 +
 NOTICE  |2 +-
 RELEASE_NOTES.txt   |  132 +
 accumulo-handler/pom.xml|2 +-
 .../accumulo/AccumuloConnectionParameters.java  |  111 +-
 .../hive/accumulo/AccumuloStorageHandler.java   |   50 +
 .../hive/accumulo/HiveAccumuloHelper.java   |  280 +
 .../mr/HiveAccumuloTableInputFormat.java|   74 +-
 .../mr/HiveAccumuloTableOutputFormat.java   |   63 +-
 .../TestAccumuloConnectionParameters.java   |   19 +
 .../hive/accumulo/TestHiveAccumuloHelper.java   |   75 +
 .../mr/TestHiveAccumuloTableInputFormat.java|8 +-
 .../mr/TestHiveAccumuloTableOutputFormat.java   |2 +-
 .../positive/accumulo_predicate_pushdown.q.out  |   76 +-
 .../results/positive/accumulo_queries.q.out |   70 +-
 ant/pom.xml |2 +-
 beeline/pom.xml |2 +-
 .../java/org/apache/hive/beeline/BeeLine.java   |7 +-
 .../org/apache/hive/beeline/BeeLineOpts.java|   13 +-
 .../java/org/apache/hive/beeline/Commands.java  |   23 +-
 .../apache/hive/beeline/DatabaseConnection.java |9 +
 beeline/src/main/resources/BeeLine.properties   |2 +
 bin/ext/hiveserver2.cmd |   51 +-
 bin/ext/hplsql.sh   |   37 +
 bin/hplsql  |   25 +
 bin/hplsql.cmd  |   58 +
 cli/pom.xml |2 +-
 common/pom.xml  |2 +-
 .../apache/hadoop/hive/common/FileUtils.java|9 +-
 .../hadoop/hive/common/JvmPauseMonitor.java |   19 +-
 .../hive/common/jsonexplain/tez/Attr.java   |6 +-
 .../hive/common/jsonexplain/tez/Connection.java |6 +-
 .../hadoop/hive/common/jsonexplain/tez/Op.java  |   64 +-
 .../hive/common/jsonexplain/tez/Printer.java|   41 +
 .../hive/common/jsonexplain/tez/Stage.java  |   95 +-
 .../common/jsonexplain/tez/TezJsonParser.java   |   61 +-
 .../hive/common/jsonexplain/tez/Vertex.java |   75 +-
 .../hive/common/metrics/LegacyMetrics.java  |   81 +-
 .../hive/common/metrics/common/Metrics.java |   35 +-
 .../common/metrics/common/MetricsConstant.java  |   35 +
 .../common/metrics/common/MetricsFactory.java   |   30 +-
 .../common/metrics/common/MetricsVariable.java  |   26 +
 .../metrics/metrics2/CodahaleMetrics.java   |   99 +-
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   40 +-
 .../hive/conf/LoopingByteArrayInputStream.java  |2 +-
 .../hive/common/metrics/TestLegacyMetrics.java  |6 +-
 .../metrics/metrics2/TestCodahaleMetrics.java   |   58 +-
 contrib/pom.xml |2 +-
 .../util/typedbytes/TypedBytesInput.java|2 +-
 .../util/typedbytes/TypedBytesOutput.java   |2 +-
 .../util/typedbytes/TypedBytesRecordInput.java  |2 +-
 .../util/typedbytes/TypedBytesRecordOutput.java |2 +-
 .../typedbytes/TypedBytesWritableInput.java |2 +-
 .../typedbytes/TypedBytesWritableOutput.java|2 +-
 data/conf/spark/standalone/hive-site.xml|6 +
 data/conf/spark/yarn-client/hive-site.xml   |6 +
 data/files/ct_events_clean.txt  |   76 +
 data/files/emp2.txt |   16 +
 data/files/encoding-utf8.txt|   12 +
 data/files/encoding_iso-8859-1.txt  |4 +
 data/files/service_request_clean.txt|   76 +
 data/files/sortdp.txt   |   32 +
 data/scripts/q_test_cleanup.sql |2 +-
 dev-support/jenkins-submit-build.sh |   10 +-
 hbase-handler/pom.xml   |2 +-
 .../results/positive/external_table_ppd.q.out   |1 -
 .../positive/hbase_binary_storage_queries.q.out |2 -
 .../src/test/results/positive/hbase_stats.q.out |7 -
 .../test/results/positive/hbase_stats2.q.out|7 -
 .../test/results/positive/hbase_stats3.q.out|   12 -
 .../positive/hbase_stats_empty_partition.q.out  |2 -
 hcatalog/core/pom.xml   |2 +-
 

[35/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
--
diff --git 
a/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
new file mode 100644
index 000..3d69606
--- /dev/null
+++ 
b/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetFileMetadataByExprRequest.java
@@ -0,0 +1,548 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ *  @generated
+ */
+package org.apache.hadoop.hive.metastore.api;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = 
"2015-8-17")
+public class GetFileMetadataByExprRequest implements 
org.apache.thrift.TBase, java.io.Serializable, Cloneable, 
Comparable {
+  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new 
org.apache.thrift.protocol.TStruct("GetFileMetadataByExprRequest");
+
+  private static final org.apache.thrift.protocol.TField FILE_IDS_FIELD_DESC = 
new org.apache.thrift.protocol.TField("fileIds", 
org.apache.thrift.protocol.TType.LIST, (short)1);
+  private static final org.apache.thrift.protocol.TField EXPR_FIELD_DESC = new 
org.apache.thrift.protocol.TField("expr", 
org.apache.thrift.protocol.TType.STRING, (short)2);
+
+  private static final Map schemes = 
new HashMap();
+  static {
+schemes.put(StandardScheme.class, new 
GetFileMetadataByExprRequestStandardSchemeFactory());
+schemes.put(TupleScheme.class, new 
GetFileMetadataByExprRequestTupleSchemeFactory());
+  }
+
+  private List fileIds; // required
+  private ByteBuffer expr; // required
+
+  /** The set of fields this struct contains, along with convenience methods 
for finding and manipulating them. */
+  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+FILE_IDS((short)1, "fileIds"),
+EXPR((short)2, "expr");
+
+private static final Map byName = new HashMap();
+
+static {
+  for (_Fields field : EnumSet.allOf(_Fields.class)) {
+byName.put(field.getFieldName(), field);
+  }
+}
+
+/**
+ * Find the _Fields constant that matches fieldId, or null if its not 
found.
+ */
+public static _Fields findByThriftId(int fieldId) {
+  switch(fieldId) {
+case 1: // FILE_IDS
+  return FILE_IDS;
+case 2: // EXPR
+  return EXPR;
+default:
+  return null;
+  }
+}
+
+/**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+public static _Fields findByThriftIdOrThrow(int fieldId) {
+  _Fields fields = findByThriftId(fieldId);
+  if (fields == null) throw new IllegalArgumentException("Field " + 
fieldId + " doesn't exist!");
+  return fields;
+}
+
+/**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+public static _Fields findByName(String name) {
+  return byName.get(name);
+}
+
+private final short _thriftId;
+private final String _fieldName;
+
+_Fields(short thriftId, String fieldName) {
+  _thriftId = thriftId;
+  _fieldName = fieldName;
+}
+
+public short getThriftFieldId() {
+  return _thriftId;
+}
+
+public String getFieldName() {
+  return _fieldName;
+}
+  }
+
+  // isset id assignments
+  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> 
metaDataMap;
+  static {
+Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new 
EnumMap<_Fields, 

[36/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
--
diff --git a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h 
b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
index 36110e6..e072866 100644
--- a/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
+++ b/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h
@@ -321,6 +321,24 @@ class FireEventRequest;
 
 class FireEventResponse;
 
+class MetadataPpdResult;
+
+class GetFileMetadataByExprResult;
+
+class GetFileMetadataByExprRequest;
+
+class GetFileMetadataResult;
+
+class GetFileMetadataRequest;
+
+class PutFileMetadataResult;
+
+class PutFileMetadataRequest;
+
+class ClearFileMetadataResult;
+
+class ClearFileMetadataRequest;
+
 class GetAllFunctionsResponse;
 
 class MetaException;
@@ -5401,6 +5419,359 @@ class FireEventResponse {
 
 void swap(FireEventResponse , FireEventResponse );
 
+
+class MetadataPpdResult {
+ public:
+
+  static const char* ascii_fingerprint; // = 
"07A9615F837F7D0A952B595DD3020972";
+  static const uint8_t binary_fingerprint[16]; // = 
{0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
+
+  MetadataPpdResult(const MetadataPpdResult&);
+  MetadataPpdResult& operator=(const MetadataPpdResult&);
+  MetadataPpdResult() : metadata(), includeBitset() {
+  }
+
+  virtual ~MetadataPpdResult() throw();
+  std::string metadata;
+  std::string includeBitset;
+
+  void __set_metadata(const std::string& val);
+
+  void __set_includeBitset(const std::string& val);
+
+  bool operator == (const MetadataPpdResult & rhs) const
+  {
+if (!(metadata == rhs.metadata))
+  return false;
+if (!(includeBitset == rhs.includeBitset))
+  return false;
+return true;
+  }
+  bool operator != (const MetadataPpdResult ) const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const MetadataPpdResult & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  friend std::ostream& operator<<(std::ostream& out, const MetadataPpdResult& 
obj);
+};
+
+void swap(MetadataPpdResult , MetadataPpdResult );
+
+
+class GetFileMetadataByExprResult {
+ public:
+
+  static const char* ascii_fingerprint; // = 
"2B0C1B8D7599529A5797481BE308375D";
+  static const uint8_t binary_fingerprint[16]; // = 
{0x2B,0x0C,0x1B,0x8D,0x75,0x99,0x52,0x9A,0x57,0x97,0x48,0x1B,0xE3,0x08,0x37,0x5D};
+
+  GetFileMetadataByExprResult(const GetFileMetadataByExprResult&);
+  GetFileMetadataByExprResult& operator=(const GetFileMetadataByExprResult&);
+  GetFileMetadataByExprResult() : isSupported(0) {
+  }
+
+  virtual ~GetFileMetadataByExprResult() throw();
+  std::map  metadata;
+  bool isSupported;
+  std::vector  unknownFileIds;
+
+  void __set_metadata(const std::map & val);
+
+  void __set_isSupported(const bool val);
+
+  void __set_unknownFileIds(const std::vector & val);
+
+  bool operator == (const GetFileMetadataByExprResult & rhs) const
+  {
+if (!(metadata == rhs.metadata))
+  return false;
+if (!(isSupported == rhs.isSupported))
+  return false;
+if (!(unknownFileIds == rhs.unknownFileIds))
+  return false;
+return true;
+  }
+  bool operator != (const GetFileMetadataByExprResult ) const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const GetFileMetadataByExprResult & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  friend std::ostream& operator<<(std::ostream& out, const 
GetFileMetadataByExprResult& obj);
+};
+
+void swap(GetFileMetadataByExprResult , GetFileMetadataByExprResult );
+
+
+class GetFileMetadataByExprRequest {
+ public:
+
+  static const char* ascii_fingerprint; // = 
"925353917FC0AF87976A2338011F5A31";
+  static const uint8_t binary_fingerprint[16]; // = 
{0x92,0x53,0x53,0x91,0x7F,0xC0,0xAF,0x87,0x97,0x6A,0x23,0x38,0x01,0x1F,0x5A,0x31};
+
+  GetFileMetadataByExprRequest(const GetFileMetadataByExprRequest&);
+  GetFileMetadataByExprRequest& operator=(const GetFileMetadataByExprRequest&);
+  GetFileMetadataByExprRequest() : expr() {
+  }
+
+  virtual ~GetFileMetadataByExprRequest() throw();
+  std::vector  fileIds;
+  std::string expr;
+
+  void __set_fileIds(const std::vector & val);
+
+  void __set_expr(const std::string& val);
+
+  bool operator == (const GetFileMetadataByExprRequest & rhs) const
+  {
+if (!(fileIds == rhs.fileIds))
+  return false;
+if (!(expr == rhs.expr))
+  return false;
+return true;
+  }
+  bool operator != (const GetFileMetadataByExprRequest ) const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const GetFileMetadataByExprRequest & ) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t 

[43/50] [abbrv] hive git commit: HIVE-11636 NPE in stats conversion with HBase metastore (Sergey Shelukhin via gates)

2015-09-21 Thread daijy
HIVE-11636 NPE in stats conversion with HBase metastore (Sergey Shelukhin via 
gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fbbb7cf1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fbbb7cf1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fbbb7cf1

Branch: refs/heads/master
Commit: fbbb7cf1fa5691037243a6db3993f294ffb00eeb
Parents: e150af9
Author: Alan Gates 
Authored: Fri Aug 28 11:03:26 2015 -0700
Committer: Alan Gates 
Committed: Fri Aug 28 11:03:26 2015 -0700

--
 .../java/org/apache/hadoop/hive/metastore/HiveMetaStore.java | 4 ++--
 .../org/apache/hadoop/hive/metastore/hbase/HBaseStore.java   | 8 
 2 files changed, 10 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fbbb7cf1/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index a06efc6..df64124 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -4281,8 +4281,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   }
   try {
 ColumnStatistics cs = getMS().getTableColumnStatistics(dbName, 
tblName, lowerCaseColNames);
-result = new TableStatsResult(
-cs == null ? Lists.newArrayList() : 
cs.getStatsObj());
+result = new TableStatsResult((cs == null || cs.getStatsObj() == null)
+? Lists.newArrayList() : cs.getStatsObj());
   } finally {
 endFunction("get_table_statistics_req: ", result == null, null, 
tblName);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/fbbb7cf1/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index 4cda9cc..df0fac3 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.metastore.RawStore;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
+import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -1693,9 +1694,11 @@ public class HBaseStore implements RawStore {
   partVals.add(partNameToVals(partName));
 }
 boolean commit = false;
+boolean hasAnyStats = false;
 openTransaction();
 try {
   AggrStats aggrStats = new AggrStats();
+  aggrStats.setPartsFound(0);
   for (String colName : colNames) {
 try {
   AggrStats oneCol =
@@ -1704,6 +1707,7 @@ public class HBaseStore implements RawStore {
 assert oneCol.getColStatsSize() == 1;
 aggrStats.setPartsFound(oneCol.getPartsFound());
 aggrStats.addToColStats(oneCol.getColStats().get(0));
+hasAnyStats = true;
   }
 } catch (CacheLoader.InvalidCacheLoadException e) {
   LOG.debug("Found no stats for column " + colName);
@@ -1712,6 +1716,10 @@ public class HBaseStore implements RawStore {
 }
   }
   commit = true;
+  if (!hasAnyStats) {
+// Set the required field.
+aggrStats.setColStats(new ArrayList());
+  }
   return aggrStats;
 } catch (IOException e) {
   LOG.error("Unable to fetch aggregate column statistics", e);



[37/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
--
diff --git a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h 
b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
index baa28e3..49d31e6 100644
--- a/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
+++ b/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
@@ -141,6 +141,10 @@ class ThriftHiveMetastoreIf : virtual public  
::facebook::fb303::FacebookService
   virtual void get_current_notificationEventId(CurrentNotificationEventId& 
_return) = 0;
   virtual void fire_listener_event(FireEventResponse& _return, const 
FireEventRequest& rqst) = 0;
   virtual void flushCache() = 0;
+  virtual void get_file_metadata_by_expr(GetFileMetadataByExprResult& _return, 
const GetFileMetadataByExprRequest& req) = 0;
+  virtual void get_file_metadata(GetFileMetadataResult& _return, const 
GetFileMetadataRequest& req) = 0;
+  virtual void put_file_metadata(PutFileMetadataResult& _return, const 
PutFileMetadataRequest& req) = 0;
+  virtual void clear_file_metadata(ClearFileMetadataResult& _return, const 
ClearFileMetadataRequest& req) = 0;
 };
 
 class ThriftHiveMetastoreIfFactory : virtual public  
::facebook::fb303::FacebookServiceIfFactory {
@@ -568,6 +572,18 @@ class ThriftHiveMetastoreNull : virtual public 
ThriftHiveMetastoreIf , virtual p
   void flushCache() {
 return;
   }
+  void get_file_metadata_by_expr(GetFileMetadataByExprResult& /* _return */, 
const GetFileMetadataByExprRequest& /* req */) {
+return;
+  }
+  void get_file_metadata(GetFileMetadataResult& /* _return */, const 
GetFileMetadataRequest& /* req */) {
+return;
+  }
+  void put_file_metadata(PutFileMetadataResult& /* _return */, const 
PutFileMetadataRequest& /* req */) {
+return;
+  }
+  void clear_file_metadata(ClearFileMetadataResult& /* _return */, const 
ClearFileMetadataRequest& /* req */) {
+return;
+  }
 };
 
 typedef struct _ThriftHiveMetastore_getMetaConf_args__isset {
@@ -18287,6 +18303,486 @@ class ThriftHiveMetastore_flushCache_presult {
   friend std::ostream& operator<<(std::ostream& out, const 
ThriftHiveMetastore_flushCache_presult& obj);
 };
 
+typedef struct _ThriftHiveMetastore_get_file_metadata_by_expr_args__isset {
+  _ThriftHiveMetastore_get_file_metadata_by_expr_args__isset() : req(false) {}
+  bool req :1;
+} _ThriftHiveMetastore_get_file_metadata_by_expr_args__isset;
+
+class ThriftHiveMetastore_get_file_metadata_by_expr_args {
+ public:
+
+  static const char* ascii_fingerprint; // = 
"35F3A2DA650F5293300EA6DB58284F86";
+  static const uint8_t binary_fingerprint[16]; // = 
{0x35,0xF3,0xA2,0xDA,0x65,0x0F,0x52,0x93,0x30,0x0E,0xA6,0xDB,0x58,0x28,0x4F,0x86};
+
+  ThriftHiveMetastore_get_file_metadata_by_expr_args(const 
ThriftHiveMetastore_get_file_metadata_by_expr_args&);
+  ThriftHiveMetastore_get_file_metadata_by_expr_args& operator=(const 
ThriftHiveMetastore_get_file_metadata_by_expr_args&);
+  ThriftHiveMetastore_get_file_metadata_by_expr_args() {
+  }
+
+  virtual ~ThriftHiveMetastore_get_file_metadata_by_expr_args() throw();
+  GetFileMetadataByExprRequest req;
+
+  _ThriftHiveMetastore_get_file_metadata_by_expr_args__isset __isset;
+
+  void __set_req(const GetFileMetadataByExprRequest& val);
+
+  bool operator == (const ThriftHiveMetastore_get_file_metadata_by_expr_args & 
rhs) const
+  {
+if (!(req == rhs.req))
+  return false;
+return true;
+  }
+  bool operator != (const ThriftHiveMetastore_get_file_metadata_by_expr_args 
) const {
+return !(*this == rhs);
+  }
+
+  bool operator < (const ThriftHiveMetastore_get_file_metadata_by_expr_args & 
) const;
+
+  uint32_t read(::apache::thrift::protocol::TProtocol* iprot);
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  friend std::ostream& operator<<(std::ostream& out, const 
ThriftHiveMetastore_get_file_metadata_by_expr_args& obj);
+};
+
+
+class ThriftHiveMetastore_get_file_metadata_by_expr_pargs {
+ public:
+
+  static const char* ascii_fingerprint; // = 
"35F3A2DA650F5293300EA6DB58284F86";
+  static const uint8_t binary_fingerprint[16]; // = 
{0x35,0xF3,0xA2,0xDA,0x65,0x0F,0x52,0x93,0x30,0x0E,0xA6,0xDB,0x58,0x28,0x4F,0x86};
+
+
+  virtual ~ThriftHiveMetastore_get_file_metadata_by_expr_pargs() throw();
+  const GetFileMetadataByExprRequest* req;
+
+  uint32_t write(::apache::thrift::protocol::TProtocol* oprot) const;
+
+  friend std::ostream& operator<<(std::ostream& out, const 
ThriftHiveMetastore_get_file_metadata_by_expr_pargs& obj);
+};
+
+typedef struct _ThriftHiveMetastore_get_file_metadata_by_expr_result__isset {
+  _ThriftHiveMetastore_get_file_metadata_by_expr_result__isset() : 
success(false) {}
+  bool success :1;
+} _ThriftHiveMetastore_get_file_metadata_by_expr_result__isset;
+
+class ThriftHiveMetastore_get_file_metadata_by_expr_result {
+ public:
+
+  static const char* 

[15/50] [abbrv] hive git commit: HIVE-11568 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c528294b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
--
diff --cc 
metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
index f8042fc,000..0204f37
mode 100644,00..100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java
@@@ -1,2225 -1,0 +1,2241 @@@
 +/**
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + * http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.apache.hadoop.hive.metastore.hbase;
 +
 +import com.google.common.annotations.VisibleForTesting;
 +import com.google.common.cache.CacheLoader;
 +import org.apache.commons.lang.StringUtils;
 +import org.apache.commons.logging.Log;
 +import org.apache.commons.logging.LogFactory;
 +import org.apache.hadoop.conf.Configuration;
 +import org.apache.hadoop.hive.common.FileUtils;
 +import org.apache.hadoop.hive.metastore.HiveMetaStore;
 +import org.apache.hadoop.hive.metastore.PartFilterExprUtil;
 +import org.apache.hadoop.hive.metastore.PartitionExpressionProxy;
 +import org.apache.hadoop.hive.metastore.RawStore;
 +import org.apache.hadoop.hive.metastore.api.AggrStats;
 +import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
 +import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 +import org.apache.hadoop.hive.metastore.api.Database;
 +import org.apache.hadoop.hive.metastore.api.FieldSchema;
 +import org.apache.hadoop.hive.metastore.api.Function;
 +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 +import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 +import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 +import org.apache.hadoop.hive.metastore.api.Index;
 +import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 +import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 +import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
 +import org.apache.hadoop.hive.metastore.api.MetaException;
 +import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 +import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 +import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
 +import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
 +import org.apache.hadoop.hive.metastore.api.Partition;
 +import org.apache.hadoop.hive.metastore.api.PartitionEventType;
 +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
 +import org.apache.hadoop.hive.metastore.api.PrincipalType;
 +import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 +import org.apache.hadoop.hive.metastore.api.Role;
 +import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 +import org.apache.hadoop.hive.metastore.api.Table;
 +import org.apache.hadoop.hive.metastore.api.Type;
 +import org.apache.hadoop.hive.metastore.api.UnknownDBException;
 +import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
 +import org.apache.hadoop.hive.metastore.api.UnknownTableException;
 +import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.PlanResult;
 +import org.apache.hadoop.hive.metastore.hbase.HBaseFilterPlanUtil.ScanPlan;
 +import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
 +import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 +import org.apache.thrift.TException;
 +
 +import java.io.IOException;
 +import java.util.ArrayList;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import java.util.Set;
 +
 +/**
 + * Implementation of RawStore that stores data in HBase
 + */
 +public class HBaseStore implements RawStore {
 +  static final private Log LOG = 
LogFactory.getLog(HBaseStore.class.getName());
 +
 +  // Do not access this directly, call getHBase to make sure it is 
initialized.
 +  private HBaseReadWrite hbase = null;
 +  private Configuration conf;
 +  private int txnNestLevel = 0;
 +  private PartitionExpressionProxy expressionProxy = 

[49/50] [abbrv] hive git commit: HIVE-11711: Merge hbase-metastore branch to trunk

2015-09-21 Thread daijy
HIVE-11711: Merge hbase-metastore branch to trunk


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4c17ecfd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4c17ecfd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4c17ecfd

Branch: refs/heads/master
Commit: 4c17ecfda525f2f65a29dab40563c50267e46eba
Parents: 76828e0
Author: Daniel Dai 
Authored: Mon Sep 21 21:54:52 2015 -0700
Committer: Daniel Dai 
Committed: Mon Sep 21 21:54:52 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java | 18 +-
 1 file changed, 17 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4c17ecfd/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 1d98766..0d07173 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -177,7 +177,23 @@ public class HiveConf extends Configuration {
   HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_WRITER_WAIT,
   HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_READER_WAIT,
   HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_MAX_FULL,
-  HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_CLEAN_UNTIL
+  HiveConf.ConfVars.METASTORE_AGGREGATE_STATS_CACHE_CLEAN_UNTIL,
+  HiveConf.ConfVars.METASTORE_FASTPATH,
+  HiveConf.ConfVars.METASTORE_HBASE_CATALOG_CACHE_SIZE,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGREGATE_STATS_CACHE_SIZE,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGREGATE_STATS_CACHE_MAX_PARTITIONS,
+  
HiveConf.ConfVars.METASTORE_HBASE_AGGREGATE_STATS_CACHE_FALSE_POSITIVE_PROBABILITY,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGREGATE_STATS_CACHE_MAX_VARIANCE,
+  HiveConf.ConfVars.METASTORE_HBASE_CACHE_TIME_TO_LIVE,
+  HiveConf.ConfVars.METASTORE_HBASE_CACHE_MAX_WRITER_WAIT,
+  HiveConf.ConfVars.METASTORE_HBASE_CACHE_MAX_READER_WAIT,
+  HiveConf.ConfVars.METASTORE_HBASE_CACHE_MAX_FULL,
+  HiveConf.ConfVars.METASTORE_HBASE_CACHE_CLEAN_UNTIL,
+  HiveConf.ConfVars.METASTORE_HBASE_CONNECTION_CLASS,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_CACHE_ENTRIES,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_MEMORY_TTL,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_INVALIDATOR_FREQUENCY,
+  HiveConf.ConfVars.METASTORE_HBASE_AGGR_STATS_HBASE_TTL
   };
 
   /**



[10/50] [abbrv] hive git commit: HIVE-10950: Unit test against HBase Metastore (Daniel Dai, Vaibhav Gumashta)

2015-09-21 Thread daijy
HIVE-10950: Unit test against HBase Metastore (Daniel Dai, Vaibhav Gumashta)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5acf458c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5acf458c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5acf458c

Branch: refs/heads/master
Commit: 5acf458c449bd2464076c243150760797ae57092
Parents: cb37021
Author: Vaibhav Gumashta 
Authored: Fri Jul 24 09:13:02 2015 -0700
Committer: Vaibhav Gumashta 
Committed: Fri Jul 24 09:13:02 2015 -0700

--
 data/conf/hbase/hive-site.xml   | 263 +++
 itests/qtest/pom.xml|  19 ++
 .../test/resources/testconfiguration.properties |  46 
 itests/util/pom.xml |  12 +
 .../org/apache/hadoop/hive/ql/QTestUtil.java|  72 -
 .../hive/metastore/hbase/HBaseReadWrite.java|   8 +-
 6 files changed, 411 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5acf458c/data/conf/hbase/hive-site.xml
--
diff --git a/data/conf/hbase/hive-site.xml b/data/conf/hbase/hive-site.xml
new file mode 100644
index 000..2cde40f
--- /dev/null
+++ b/data/conf/hbase/hive-site.xml
@@ -0,0 +1,263 @@
+
+
+
+
+
+
+
+  hive.in.test
+  true
+  Internal marker for test. Used for masking env-dependent 
values
+
+
+
+
+
+
+
+
+
+
+  hadoop.tmp.dir
+  ${test.tmp.dir}/hadoop-tmp
+  A base for other temporary directories.
+
+
+
+
+
+  hive.exec.scratchdir
+  ${test.tmp.dir}/scratchdir
+  Scratch space for Hive jobs
+
+
+
+  hive.exec.local.scratchdir
+  ${test.tmp.dir}/localscratchdir/
+  Local scratch space for Hive jobs
+
+
+
+  javax.jdo.option.ConnectionURL
+  
jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true
+
+
+
+  hive.stats.dbconnectionstring
+  
jdbc:derby:;databaseName=${test.tmp.dir}/TempStatsStore;create=true
+
+
+
+
+  javax.jdo.option.ConnectionDriverName
+  org.apache.derby.jdbc.EmbeddedDriver
+
+
+
+  javax.jdo.option.ConnectionUserName
+  APP
+
+
+
+  javax.jdo.option.ConnectionPassword
+  mine
+
+
+
+  
+  hive.metastore.warehouse.dir
+  ${test.warehouse.dir}
+  
+
+
+
+  hive.metastore.metadb.dir
+  file://${test.tmp.dir}/metadb/
+  
+  Required by metastore server or if the uris argument below is not supplied
+  
+
+
+
+  test.log.dir
+  ${test.tmp.dir}/log/
+  
+
+
+
+  test.data.files
+  ${hive.root}/data/files
+  
+
+
+
+  test.data.scripts
+  ${hive.root}/data/scripts
+  
+
+
+
+  hive.jar.path
+  
${maven.local.repository}/org/apache/hive/hive-exec/${hive.version}/hive-exec-${hive.version}.jar
+  
+
+
+
+  hive.querylog.location
+  ${test.tmp.dir}/tmp
+  Location of the structured hive logs
+
+
+
+  hive.exec.pre.hooks
+  org.apache.hadoop.hive.ql.hooks.PreExecutePrinter, 
org.apache.hadoop.hive.ql.hooks.EnforceReadOnlyTables
+  Pre Execute Hook for Tests
+
+
+
+  hive.exec.post.hooks
+  org.apache.hadoop.hive.ql.hooks.PostExecutePrinter
+  Post Execute Hook for Tests
+
+
+
+  hive.support.concurrency
+  false
+  Whether hive supports concurrency or not. A zookeeper instance 
must be up and running for the default hive lock manager to support read-write 
locks.
+
+
+
+  hive.unlock.numretries
+  2
+  The number of times you want to retry to do one 
unlock
+
+
+
+  hive.lock.sleep.between.retries
+  2
+  The sleep time (in seconds) between various 
retries
+
+
+
+
+  fs.pfile.impl
+  org.apache.hadoop.fs.ProxyLocalFileSystem
+  A proxy for local file system used for cross file system 
testing
+
+
+
+  hive.exec.mode.local.auto
+  false
+  
+Let hive determine whether to run in local mode automatically
+Disabling this for tests so that minimr is not affected
+  
+
+
+
+  hive.auto.convert.join
+  false
+  Whether Hive enable the optimization about converting common 
join into mapjoin based on the input file size
+
+
+
+  hive.ignore.mapjoin.hint
+  false
+  Whether Hive ignores the mapjoin hint
+
+
+
+  hive.input.format
+  org.apache.hadoop.hive.ql.io.CombineHiveInputFormat
+  The default input format, if it is not specified, the system 
assigns it. It is set to HiveInputFormat for hadoop versions 17, 18 and 19, 
whereas it is set to CombineHiveInputFormat for hadoop 20. The user can always 
overwrite it - if there is a bug in CombineHiveInputFormat, it can always be 
manually set to HiveInputFormat. 
+
+
+
+  hive.default.rcfile.serde
+  org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe
+  The default SerDe hive will use for the rcfile 
format
+
+
+
+  hive.stats.dbclass
+  jdbc:derby
+  The storage for temporary stats generated by tasks. Currently, 
jdbc, hbase and counter types are supported
+
+
+
+  hive.stats.key.prefix.reserve.length
+  0
+
+
+
+  

[47/50] [abbrv] hive git commit: HIVE-11731: Exclude hbase-metastore in itests for hadoop-1

2015-09-21 Thread daijy
HIVE-11731: Exclude hbase-metastore in itests for hadoop-1


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/757553e6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/757553e6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/757553e6

Branch: refs/heads/master
Commit: 757553e64280088bb2fc1546ac1259a519d064a6
Parents: 848b977
Author: Daniel Dai 
Authored: Thu Sep 3 21:57:42 2015 -0700
Committer: Daniel Dai 
Committed: Thu Sep 3 21:57:42 2015 -0700

--
 .../metastore/hbase/HBaseStoreTestUtil.java | 45 
 1 file changed, 45 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/757553e6/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
--
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
 
b/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
new file mode 100644
index 000..1f42007
--- /dev/null
+++ 
b/itests/util/src/main/java/org/apache/hadoop/hive/metastore/hbase/HBaseStoreTestUtil.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.metastore.hbase;
+
+import java.util.List;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hive.conf.HiveConf;
+
+public class HBaseStoreTestUtil {
+  public static void initHBaseMetastore(HBaseAdmin admin, HiveConf conf) 
throws Exception {
+for (String tableName : HBaseReadWrite.tableNames) {
+  List families = HBaseReadWrite.columnFamilies.get(tableName);
+  HTableDescriptor desc = new 
HTableDescriptor(TableName.valueOf(tableName));
+  for (byte[] family : families) {
+HColumnDescriptor columnDesc = new HColumnDescriptor(family);
+desc.addFamily(columnDesc);
+  }
+  admin.createTable(desc);
+}
+admin.close();
+if (conf != null) {
+  HBaseReadWrite.getInstance(conf);
+}
+  }
+}
\ No newline at end of file



[13/50] [abbrv] hive git commit: HIVE-11389 hbase import should allow partial imports and should work in parallel (gates)

2015-09-21 Thread daijy
HIVE-11389 hbase import should allow partial imports and should work in 
parallel (gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0fa45e4a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0fa45e4a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0fa45e4a

Branch: refs/heads/master
Commit: 0fa45e4a562fc2586b1ef06a88e9c186a0835316
Parents: 7e7f461
Author: Alan Gates 
Authored: Fri Jul 31 11:07:00 2015 -0700
Committer: Alan Gates 
Committed: Fri Jul 31 11:07:00 2015 -0700

--
 .../hive/metastore/hbase/TestHBaseImport.java   | 557 +--
 .../hive/metastore/hbase/HBaseImport.java   | 435 +--
 2 files changed, 899 insertions(+), 93 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0fa45e4a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
index 7bdff18..1ac10f0 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseImport.java
@@ -26,6 +26,9 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.FunctionType;
+import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.ResourceType;
@@ -38,12 +41,16 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.ExpectedException;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 /**
  * Test that import from an RDBMS based metastore works
@@ -52,6 +59,13 @@ public class TestHBaseImport extends HBaseIntegrationTests {
 
   private static final Log LOG = 
LogFactory.getLog(TestHBaseStoreIntegration.class.getName());
 
+  private static final String[] tableNames = new String[] {"allnonparttable", 
"allparttable"};
+  private static final String[] partVals = new String[] {"na", "emea", 
"latam", "apac"};
+  private static final String[] funcNames = new String[] {"allfunc1", 
"allfunc2"};
+
+  @Rule
+  public ExpectedException thrown = ExpectedException.none();
+
   @BeforeClass
   public static void startup() throws Exception {
 HBaseIntegrationTests.startMiniCluster();
@@ -69,25 +83,396 @@ public class TestHBaseImport extends HBaseIntegrationTests 
{
   }
 
   @Test
-  public void doImport() throws Exception {
-RawStore rdbms = new ObjectStore();
+  public void importAll() throws Exception {
+RawStore rdbms;
+rdbms = new ObjectStore();
+rdbms.setConf(conf);
+
+String[] dbNames = new String[] {"alldb1", "alldb2"};
+String[] roles = new String[] {"allrole1", "allrole2"};
+String[] tokenIds = new String[] {"alltokenid1", "alltokenid2"};
+String[] tokens = new String[] {"alltoken1", "alltoken2"};
+String[] masterKeys = new String[] {"allmk1", "allmk2"};
+int now = (int)System.currentTimeMillis() / 1000;
+
+setupObjectStore(rdbms, roles, dbNames, tokenIds, tokens, masterKeys, now);
+
+int baseNumRoles = store.listRoleNames() == null ? 0 : 
store.listRoleNames().size();
+int baseNumDbs = store.getAllDatabases() == null ? 0 : 
store.getAllDatabases().size();
+
+HBaseImport importer = new HBaseImport("-a");
+importer.setConnections(rdbms, store);
+importer.run();
+
+for (int i = 0; i < roles.length; i++) {
+  Role role = store.getRole(roles[i]);
+  Assert.assertNotNull(role);
+  Assert.assertEquals(roles[i], role.getRoleName());
+}
+// Make sure there aren't any extra roles
+Assert.assertEquals(baseNumRoles + 2, store.listRoleNames().size());
+
+for (int i = 0; i < dbNames.length; i++) {
+  Database db = store.getDatabase(dbNames[i]);
+  Assert.assertNotNull(db);
+  // check one random value in the db rather than every value
+  Assert.assertEquals("file:/tmp", db.getLocationUri());
+
+  Table table = 

[30/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
--
diff --git a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php 
b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
index 9c73767..8770e85 100644
--- a/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
+++ b/metastore/src/gen/thrift/gen-php/metastore/ThriftHiveMetastore.php
@@ -1010,6 +1010,26 @@ interface ThriftHiveMetastoreIf extends 
\FacebookServiceIf {
   /**
*/
   public function flushCache();
+  /**
+   * @param \metastore\GetFileMetadataByExprRequest $req
+   * @return \metastore\GetFileMetadataByExprResult
+   */
+  public function 
get_file_metadata_by_expr(\metastore\GetFileMetadataByExprRequest $req);
+  /**
+   * @param \metastore\GetFileMetadataRequest $req
+   * @return \metastore\GetFileMetadataResult
+   */
+  public function get_file_metadata(\metastore\GetFileMetadataRequest $req);
+  /**
+   * @param \metastore\PutFileMetadataRequest $req
+   * @return \metastore\PutFileMetadataResult
+   */
+  public function put_file_metadata(\metastore\PutFileMetadataRequest $req);
+  /**
+   * @param \metastore\ClearFileMetadataRequest $req
+   * @return \metastore\ClearFileMetadataResult
+   */
+  public function clear_file_metadata(\metastore\ClearFileMetadataRequest 
$req);
 }
 
 class ThriftHiveMetastoreClient extends \FacebookServiceClient implements 
\metastore\ThriftHiveMetastoreIf {
@@ -8191,196 +8211,221 @@ class ThriftHiveMetastoreClient extends 
\FacebookServiceClient implements \metas
 return;
   }
 
-}
-
-// HELPER FUNCTIONS AND STRUCTURES
+  public function 
get_file_metadata_by_expr(\metastore\GetFileMetadataByExprRequest $req)
+  {
+$this->send_get_file_metadata_by_expr($req);
+return $this->recv_get_file_metadata_by_expr();
+  }
 
-class ThriftHiveMetastore_getMetaConf_args {
-  static $_TSPEC;
+  public function 
send_get_file_metadata_by_expr(\metastore\GetFileMetadataByExprRequest $req)
+  {
+$args = new 
\metastore\ThriftHiveMetastore_get_file_metadata_by_expr_args();
+$args->req = $req;
+$bin_accel = ($this->output_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_write_binary');
+if ($bin_accel)
+{
+  thrift_protocol_write_binary($this->output_, 
'get_file_metadata_by_expr', TMessageType::CALL, $args, $this->seqid_, 
$this->output_->isStrictWrite());
+}
+else
+{
+  $this->output_->writeMessageBegin('get_file_metadata_by_expr', 
TMessageType::CALL, $this->seqid_);
+  $args->write($this->output_);
+  $this->output_->writeMessageEnd();
+  $this->output_->getTransport()->flush();
+}
+  }
 
-  /**
-   * @var string
-   */
-  public $key = null;
+  public function recv_get_file_metadata_by_expr()
+  {
+$bin_accel = ($this->input_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_read_binary');
+if ($bin_accel) $result = thrift_protocol_read_binary($this->input_, 
'\metastore\ThriftHiveMetastore_get_file_metadata_by_expr_result', 
$this->input_->isStrictRead());
+else
+{
+  $rseqid = 0;
+  $fname = null;
+  $mtype = 0;
 
-  public function __construct($vals=null) {
-if (!isset(self::$_TSPEC)) {
-  self::$_TSPEC = array(
-1 => array(
-  'var' => 'key',
-  'type' => TType::STRING,
-  ),
-);
-}
-if (is_array($vals)) {
-  if (isset($vals['key'])) {
-$this->key = $vals['key'];
+  $this->input_->readMessageBegin($fname, $mtype, $rseqid);
+  if ($mtype == TMessageType::EXCEPTION) {
+$x = new TApplicationException();
+$x->read($this->input_);
+$this->input_->readMessageEnd();
+throw $x;
   }
+  $result = new 
\metastore\ThriftHiveMetastore_get_file_metadata_by_expr_result();
+  $result->read($this->input_);
+  $this->input_->readMessageEnd();
 }
+if ($result->success !== null) {
+  return $result->success;
+}
+throw new \Exception("get_file_metadata_by_expr failed: unknown result");
   }
 
-  public function getName() {
-return 'ThriftHiveMetastore_getMetaConf_args';
+  public function get_file_metadata(\metastore\GetFileMetadataRequest $req)
+  {
+$this->send_get_file_metadata($req);
+return $this->recv_get_file_metadata();
   }
 
-  public function read($input)
+  public function send_get_file_metadata(\metastore\GetFileMetadataRequest 
$req)
   {
-$xfer = 0;
-$fname = null;
-$ftype = 0;
-$fid = 0;
-$xfer += $input->readStructBegin($fname);
-while (true)
+$args = new \metastore\ThriftHiveMetastore_get_file_metadata_args();
+$args->req = $req;
+$bin_accel = ($this->output_ instanceof TBinaryProtocolAccelerated) && 
function_exists('thrift_protocol_write_binary');
+if ($bin_accel)
 {
-  $xfer += 

[24/50] [abbrv] hive git commit: HIVE-10289: Support filter on non-first partition key and non-string partition key (Daniel Dai reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/5e16d53e/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
--
diff --git 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
index 5c5818a..39a7278 100644
--- 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
+++ 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
@@ -30617,6 +30617,3675 @@ public final class HbaseMetastoreProto {
 // 
@@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Table)
   }
 
+  public interface PartitionKeyComparatorOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required string names = 1;
+/**
+ * required string names = 1;
+ */
+boolean hasNames();
+/**
+ * required string names = 1;
+ */
+java.lang.String getNames();
+/**
+ * required string names = 1;
+ */
+com.google.protobuf.ByteString
+getNamesBytes();
+
+// required string types = 2;
+/**
+ * required string types = 2;
+ */
+boolean hasTypes();
+/**
+ * required string types = 2;
+ */
+java.lang.String getTypes();
+/**
+ * required string types = 2;
+ */
+com.google.protobuf.ByteString
+getTypesBytes();
+
+// repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 3;
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;
+ */
+
java.util.List
 
+getOpList();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Operator
 getOp(int index);
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;
+ */
+int getOpCount();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;
+ */
+java.util.List
 
+getOpOrBuilderList();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Operator op = 
3;
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.OperatorOrBuilder
 getOpOrBuilder(
+int index);
+
+// repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 4;
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;
+ */
+
java.util.List
 
+getRangeList();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.Range
 getRange(int index);
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;
+ */
+int getRangeCount();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;
+ */
+java.util.List
 
+getRangeOrBuilderList();
+/**
+ * repeated 
.org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator.Range range = 
4;
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PartitionKeyComparator.RangeOrBuilder
 getRangeOrBuilder(
+int index);
+  }
+  /**
+   * Protobuf type {@code 
org.apache.hadoop.hive.metastore.hbase.PartitionKeyComparator}
+   */
+  public static final class PartitionKeyComparator extends
+  com.google.protobuf.GeneratedMessage
+  implements PartitionKeyComparatorOrBuilder {
+// Use PartitionKeyComparator.newBuilder() to construct.
+private 
PartitionKeyComparator(com.google.protobuf.GeneratedMessage.Builder builder) 
{
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private PartitionKeyComparator(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final PartitionKeyComparator defaultInstance;
+public static PartitionKeyComparator getDefaultInstance() {
+  return defaultInstance;
+}
+
+public PartitionKeyComparator getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private PartitionKeyComparator(
+com.google.protobuf.CodedInputStream input,
+

[02/50] [abbrv] hive git commit: HIVE-11294 Use HBase to cache aggregated stats (gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/c53c6f45/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
--
diff --git 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
index 2d9e592..314fc7f 100644
--- 
a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
+++ 
b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java
@@ -90,6 +90,3688 @@ public final class HbaseMetastoreProto {
 // 
@@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.PrincipalType)
   }
 
+  public interface AggrStatsOrBuilder
+  extends com.google.protobuf.MessageOrBuilder {
+
+// required int64 parts_found = 1;
+/**
+ * required int64 parts_found = 1;
+ */
+boolean hasPartsFound();
+/**
+ * required int64 parts_found = 1;
+ */
+long getPartsFound();
+
+// repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats col_stats 
= 2;
+/**
+ * repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats 
col_stats = 2;
+ */
+
java.util.List
 
+getColStatsList();
+/**
+ * repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats 
col_stats = 2;
+ */
+org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats 
getColStats(int index);
+/**
+ * repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats 
col_stats = 2;
+ */
+int getColStatsCount();
+/**
+ * repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats 
col_stats = 2;
+ */
+java.util.List
 
+getColStatsOrBuilderList();
+/**
+ * repeated .org.apache.hadoop.hive.metastore.hbase.ColumnStats 
col_stats = 2;
+ */
+
org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStatsOrBuilder 
getColStatsOrBuilder(
+int index);
+  }
+  /**
+   * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.AggrStats}
+   */
+  public static final class AggrStats extends
+  com.google.protobuf.GeneratedMessage
+  implements AggrStatsOrBuilder {
+// Use AggrStats.newBuilder() to construct.
+private AggrStats(com.google.protobuf.GeneratedMessage.Builder builder) 
{
+  super(builder);
+  this.unknownFields = builder.getUnknownFields();
+}
+private AggrStats(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+private static final AggrStats defaultInstance;
+public static AggrStats getDefaultInstance() {
+  return defaultInstance;
+}
+
+public AggrStats getDefaultInstanceForType() {
+  return defaultInstance;
+}
+
+private final com.google.protobuf.UnknownFieldSet unknownFields;
+@java.lang.Override
+public final com.google.protobuf.UnknownFieldSet
+getUnknownFields() {
+  return this.unknownFields;
+}
+private AggrStats(
+com.google.protobuf.CodedInputStream input,
+com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+throws com.google.protobuf.InvalidProtocolBufferException {
+  initFields();
+  int mutable_bitField0_ = 0;
+  com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+  com.google.protobuf.UnknownFieldSet.newBuilder();
+  try {
+boolean done = false;
+while (!done) {
+  int tag = input.readTag();
+  switch (tag) {
+case 0:
+  done = true;
+  break;
+default: {
+  if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+done = true;
+  }
+  break;
+}
+case 8: {
+  bitField0_ |= 0x0001;
+  partsFound_ = input.readInt64();
+  break;
+}
+case 18: {
+  if (!((mutable_bitField0_ & 0x0002) == 0x0002)) {
+colStats_ = new 
java.util.ArrayList();
+mutable_bitField0_ |= 0x0002;
+  }
+  
colStats_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.ColumnStats.PARSER,
 extensionRegistry));
+  break;
+}
+  }
+}
+  } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+throw e.setUnfinishedMessage(this);
+  } catch (java.io.IOException e) {
+throw new com.google.protobuf.InvalidProtocolBufferException(
+e.getMessage()).setUnfinishedMessage(this);
+  } finally {
+if (((mutable_bitField0_ & 0x0002) == 0x0002)) {
+  colStats_ = 

[25/50] [abbrv] hive git commit: HIVE-10289: Support filter on non-first partition key and non-string partition key (Daniel Dai reviewed by Alan Gates)

2015-09-21 Thread daijy
HIVE-10289: Support filter on non-first partition key and non-string partition 
key (Daniel Dai reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e16d53e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e16d53e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e16d53e

Branch: refs/heads/master
Commit: 5e16d53e98e44567bbfa1b291f8a927a3e3e4b9b
Parents: 9d9dd72
Author: Daniel Dai 
Authored: Mon Aug 24 11:20:55 2015 -0700
Committer: Daniel Dai 
Committed: Mon Aug 24 11:20:55 2015 -0700

--
 .../metastore/hbase/HbaseMetastoreProto.java| 3732 +-
 .../metastore/hbase/HBaseFilterPlanUtil.java|  341 +-
 .../hive/metastore/hbase/HBaseReadWrite.java|  114 +-
 .../hadoop/hive/metastore/hbase/HBaseStore.java |   11 +-
 .../hadoop/hive/metastore/hbase/HBaseUtils.java |  129 +-
 .../metastore/hbase/PartitionKeyComparator.java |  292 ++
 .../metastore/hbase/hbase_metastore_proto.proto |   25 +
 .../hbase/TestHBaseFilterPlanUtil.java  |  278 +-
 .../BinarySortableSerDeWithEndPrefix.java   |   41 +
 9 files changed, 4702 insertions(+), 261 deletions(-)
--




[40/50] [abbrv] hive git commit: HIVE-11633 : import tool should print help by default (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
HIVE-11633 : import tool should print help by default (Sergey Shelukhin, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4d66206d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4d66206d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4d66206d

Branch: refs/heads/master
Commit: 4d66206d89f57849f08ff1d4fc7e3a48f61b6275
Parents: 129bed5
Author: Sergey Shelukhin 
Authored: Tue Aug 25 11:03:53 2015 -0700
Committer: Sergey Shelukhin 
Committed: Tue Aug 25 11:03:53 2015 -0700

--
 .../hive/metastore/hbase/HBaseImport.java   | 38 +++-
 1 file changed, 30 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4d66206d/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
index e143de7..fac8e90 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseImport.java
@@ -75,14 +75,18 @@ public class HBaseImport {
 
   static final private Log LOG = 
LogFactory.getLog(HBaseImport.class.getName());
 
-  public static void main(String[] args) {
+  public static int main(String[] args) {
 try {
-  HBaseImport tool = new HBaseImport(args);
+  HBaseImport tool = new HBaseImport();
+  int rv = tool.init(args);
+  if (rv != 0) return rv;
   tool.run();
 } catch (Exception e) {
   System.err.println("Caught exception " + e.getClass().getName() + " with 
message <" +
   e.getMessage() + ">");
+  return 1;
 }
+return 0;
   }
 
   private ThreadLocal rdbmsStore = new ThreadLocal() {
@@ -121,8 +125,14 @@ public class HBaseImport {
   private int parallel;
   private int batchSize;
 
+  private HBaseImport() {}
+
   @VisibleForTesting
-  HBaseImport(String... args) throws ParseException {
+  public HBaseImport(String... args) throws ParseException {
+init(args);
+  }
+
+  private int init(String... args) throws ParseException {
 Options options = new Options();
 
 doAll = doKerberos = false;
@@ -185,38 +195,45 @@ public class HBaseImport {
 
 // Process help, if it was asked for, this must be done first
 if (cli.hasOption('h')) {
-  HelpFormatter formatter = new HelpFormatter();
-  formatter.printHelp("hbaseschematool", options);
-  // returning here results in nothing else happening, because none of the 
other flags have
-  // been set.
-  return;
+  printHelp(options);
+  return 1;
 }
 
+boolean hasCmd = false;
 // Now process the other command line args
 if (cli.hasOption('a')) {
+  hasCmd = true;
   doAll = true;
 }
 if (cli.hasOption('b')) {
   batchSize = Integer.valueOf(cli.getOptionValue('b'));
 }
 if (cli.hasOption('d')) {
+  hasCmd = true;
   dbsToImport = Arrays.asList(cli.getOptionValues('d'));
 }
 if (cli.hasOption('f')) {
+  hasCmd = true;
   functionsToImport = Arrays.asList(cli.getOptionValues('f'));
 }
 if (cli.hasOption('p')) {
   parallel = Integer.valueOf(cli.getOptionValue('p'));
 }
 if (cli.hasOption('r')) {
+  hasCmd = true;
   rolesToImport = Arrays.asList(cli.getOptionValues('r'));
 }
 if (cli.hasOption('k')) {
   doKerberos = true;
 }
 if (cli.hasOption('t')) {
+  hasCmd = true;
   tablesToImport = Arrays.asList(cli.getOptionValues('t'));
 }
+if (!hasCmd) {
+  printHelp(options);
+  return 1;
+}
 
 dbs = new ArrayList<>();
 // We don't want to bound the size of the table queue because we keep it 
all in memory
@@ -225,6 +242,11 @@ public class HBaseImport {
 
 // Bound the size of this queue so we don't get too much in memory.
 partQueue = new ArrayBlockingQueue<>(parallel * 2);
+return 0;
+  }
+
+  private void printHelp(Options options) {
+(new HelpFormatter()).printHelp("hbaseschematool", options);
   }
 
   @VisibleForTesting



[22/50] [abbrv] hive git commit: HIVE-11588 : merge master into branch (Sergey Shelukhin)

2015-09-21 Thread daijy
HIVE-11588 : merge master into branch (Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9d9dd72a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9d9dd72a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9d9dd72a

Branch: refs/heads/master
Commit: 9d9dd72a06ee2db379dbbae3561d172223d7c96d
Parents: 2fe6086 3b6825b
Author: Sergey Shelukhin 
Authored: Mon Aug 17 15:20:25 2015 -0700
Committer: Sergey Shelukhin 
Committed: Mon Aug 17 15:20:25 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   6 +
 data/conf/hive-log4j2.xml   |   5 +-
 data/conf/hive-site.xml |   6 -
 .../deployers/config/hive/hive-site.mysql.xml   |  22 +++
 .../hive/hcatalog/streaming/TestStreaming.java  |  54 +-
 .../TestOperationLoggingAPIWithMr.java  |   2 -
 .../TestOperationLoggingAPIWithTez.java |   2 -
 .../operation/TestOperationLoggingLayout.java   |   2 -
 .../hadoop/hive/metastore/HiveMetaStore.java|  18 ++
 .../hive/metastore/HouseKeeperService.java  |  39 
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 153 +++-
 .../hive/metastore/txn/TestTxnHandler.java  |   7 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  15 +-
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java | 183 ++-
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |  97 +++---
 .../apache/hadoop/hive/ql/io/orc/OrcSplit.java  |  25 ++-
 .../hadoop/hive/ql/lib/DefaultGraphWalker.java  |   2 +-
 .../calcite/reloperators/HiveFilter.java|   2 +-
 .../calcite/translator/ExprNodeConverter.java   |  26 ---
 .../apache/hadoop/hive/ql/parse/ASTNode.java|  18 +-
 .../hive/ql/txn/AcidHouseKeeperService.java | 104 +++
 .../hive/ql/txn/compactor/CompactorMR.java  |  19 +-
 .../hadoop/hive/ql/txn/compactor/Initiator.java |  10 +-
 .../hadoop/hive/ql/txn/compactor/Worker.java|   2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  |  21 +++
 .../apache/hadoop/hive/ql/TestTxnCommands2.java |   1 +
 .../apache/hadoop/hive/ql/io/TestAcidUtils.java |  27 +--
 .../hive/ql/io/orc/TestInputOutputFormat.java   |   6 +-
 .../hive/ql/lockmgr/TestDbTxnManager.java   |  35 +++-
 .../hadoop/hive/shims/Hadoop20SShims.java   |  11 ++
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |  66 +++
 .../apache/hadoop/hive/shims/HadoopShims.java   |  15 ++
 32 files changed, 782 insertions(+), 219 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9d9dd72a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/9d9dd72a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/9d9dd72a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--



[04/50] [abbrv] hive git commit: HIVE-11300 HBase metastore: Support token and master key methods (gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/a310524c/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index 332e30a..ae73feb 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -80,6 +80,8 @@ class HBaseReadWrite {
   @VisibleForTesting final static String PART_TABLE = "HBMS_PARTITIONS";
   @VisibleForTesting final static String ROLE_TABLE = "HBMS_ROLES";
   @VisibleForTesting final static String SD_TABLE = "HBMS_SDS";
+  @VisibleForTesting final static String SECURITY_TABLE = "HBMS_SECURITY";
+  @VisibleForTesting final static String SEQUENCES_TABLE = "HBMS_SEQUENCES";
   @VisibleForTesting final static String TABLE_TABLE = "HBMS_TBLS";
   @VisibleForTesting final static String USER_TO_ROLE_TABLE = 
"HBMS_USER_TO_ROLE";
   @VisibleForTesting final static byte[] CATALOG_CF = 
"c".getBytes(HBaseUtils.ENCODING);
@@ -90,7 +92,7 @@ class HBaseReadWrite {
*/
   final static String[] tableNames = { AGGR_STATS_TABLE, DB_TABLE, FUNC_TABLE, 
GLOBAL_PRIVS_TABLE,
PART_TABLE, USER_TO_ROLE_TABLE, 
ROLE_TABLE, SD_TABLE,
-   TABLE_TABLE  };
+   SECURITY_TABLE, SEQUENCES_TABLE, 
TABLE_TABLE};
   final static Map> columnFamilies =
   new HashMap> (tableNames.length);
 
@@ -103,6 +105,8 @@ class HBaseReadWrite {
 columnFamilies.put(USER_TO_ROLE_TABLE, Arrays.asList(CATALOG_CF));
 columnFamilies.put(ROLE_TABLE, Arrays.asList(CATALOG_CF));
 columnFamilies.put(SD_TABLE, Arrays.asList(CATALOG_CF));
+columnFamilies.put(SECURITY_TABLE, Arrays.asList(CATALOG_CF));
+columnFamilies.put(SEQUENCES_TABLE, Arrays.asList(CATALOG_CF));
 columnFamilies.put(TABLE_TABLE, Arrays.asList(CATALOG_CF, STATS_CF));
   }
 
@@ -110,12 +114,16 @@ class HBaseReadWrite {
* Stores the bloom filter for the aggregated stats, to determine what 
partitions are in this
* aggregate.
*/
+  final static byte[] MASTER_KEY_SEQUENCE = "mk".getBytes(HBaseUtils.ENCODING);
   final static byte[] AGGR_STATS_BLOOM_COL = "b".getBytes(HBaseUtils.ENCODING);
   private final static byte[] CATALOG_COL = "c".getBytes(HBaseUtils.ENCODING);
   private final static byte[] ROLES_COL = 
"roles".getBytes(HBaseUtils.ENCODING);
   private final static byte[] REF_COUNT_COL = 
"ref".getBytes(HBaseUtils.ENCODING);
+  private final static byte[] DELEGATION_TOKEN_COL = 
"dt".getBytes(HBaseUtils.ENCODING);
+  private final static byte[] MASTER_KEY_COL = 
"mk".getBytes(HBaseUtils.ENCODING);
   private final static byte[] AGGR_STATS_STATS_COL = 
"s".getBytes(HBaseUtils.ENCODING);
   private final static byte[] GLOBAL_PRIVS_KEY = 
"gp".getBytes(HBaseUtils.ENCODING);
+  private final static byte[] SEQUENCES_KEY = 
"seq".getBytes(HBaseUtils.ENCODING);
   private final static int TABLES_TO_CACHE = 10;
   // False positives are very bad here because they cause us to invalidate 
entries we shouldn't.
   // Space used and # of hash functions grows in proportion to ln of num bits 
so a 10x increase
@@ -226,7 +234,7 @@ class HBaseReadWrite {
 sdHits = new Counter("storage descriptor cache hits");
 sdMisses = new Counter("storage descriptor cache misses");
 sdOverflows = new Counter("storage descriptor cache overflows");
-counters = new ArrayList();
+counters = new ArrayList<>();
 counters.add(tableHits);
 counters.add(tableMisses);
 counters.add(tableOverflows);
@@ -241,18 +249,16 @@ class HBaseReadWrite {
 // (storage descriptors are shared, so 99% should be the same for a given 
table)
 int sdsCacheSize = totalCatalogObjectsToCache / 100;
 if (conf.getBoolean(NO_CACHE_CONF, false)) {
-  tableCache = new BogusObjectCache, Table>();
-  sdCache = new BogusObjectCache();
+  tableCache = new BogusObjectCache<>();
+  sdCache = new BogusObjectCache<>();
   partCache = new BogusPartitionCache();
 } else {
-  tableCache = new ObjectCache, 
Table>(TABLES_TO_CACHE, tableHits,
-  tableMisses, tableOverflows);
-  sdCache = new ObjectCache(sdsCacheSize, sdHits,
-  sdMisses, sdOverflows);
+  tableCache = new ObjectCache<>(TABLES_TO_CACHE, tableHits, tableMisses, 
tableOverflows);
+  sdCache = new ObjectCache<>(sdsCacheSize, sdHits, sdMisses, sdOverflows);
   partCache = new PartitionCache(totalCatalogObjectsToCache, partHits, 
partMisses, partOverflows);
 }
 statsCache = 

[39/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey 
Shelukhin, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/129bed52
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/129bed52
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/129bed52

Branch: refs/heads/master
Commit: 129bed52e65b169ddb62f323fc7427df5bb50f19
Parents: 5e16d53
Author: Sergey Shelukhin 
Authored: Mon Aug 24 11:40:09 2015 -0700
Committer: Sergey Shelukhin 
Committed: Mon Aug 24 11:40:09 2015 -0700

--
 metastore/if/hive_metastore.thrift  |   53 +
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.cpp  | 6792 +++--
 .../gen/thrift/gen-cpp/ThriftHiveMetastore.h|  556 ++
 .../ThriftHiveMetastore_server.skeleton.cpp |   20 +
 .../gen/thrift/gen-cpp/hive_metastore_types.cpp | 1294 ++-
 .../gen/thrift/gen-cpp/hive_metastore_types.h   |  371 +
 .../hive/metastore/api/AbortTxnRequest.java |2 +-
 .../metastore/api/AddDynamicPartitions.java |2 +-
 .../metastore/api/AddPartitionsRequest.java |2 +-
 .../hive/metastore/api/AddPartitionsResult.java |2 +-
 .../hadoop/hive/metastore/api/AggrStats.java|2 +-
 .../metastore/api/AlreadyExistsException.java   |2 +-
 .../metastore/api/BinaryColumnStatsData.java|2 +-
 .../metastore/api/BooleanColumnStatsData.java   |2 +-
 .../hive/metastore/api/CheckLockRequest.java|2 +-
 .../metastore/api/ClearFileMetadataRequest.java |  438 +
 .../metastore/api/ClearFileMetadataResult.java  |  283 +
 .../hive/metastore/api/ColumnStatistics.java|2 +-
 .../metastore/api/ColumnStatisticsDesc.java |2 +-
 .../hive/metastore/api/ColumnStatisticsObj.java |2 +-
 .../hive/metastore/api/CommitTxnRequest.java|2 +-
 .../hive/metastore/api/CompactionRequest.java   |2 +-
 .../api/ConfigValSecurityException.java |2 +-
 .../api/CurrentNotificationEventId.java |2 +-
 .../hadoop/hive/metastore/api/Database.java |2 +-
 .../apache/hadoop/hive/metastore/api/Date.java  |2 +-
 .../hive/metastore/api/DateColumnStatsData.java |2 +-
 .../hadoop/hive/metastore/api/Decimal.java  |2 +-
 .../metastore/api/DecimalColumnStatsData.java   |2 +-
 .../metastore/api/DoubleColumnStatsData.java|2 +-
 .../hive/metastore/api/DropPartitionsExpr.java  |2 +-
 .../metastore/api/DropPartitionsRequest.java|2 +-
 .../metastore/api/DropPartitionsResult.java |2 +-
 .../hive/metastore/api/EnvironmentContext.java  |2 +-
 .../hadoop/hive/metastore/api/FieldSchema.java  |2 +-
 .../hive/metastore/api/FireEventRequest.java|2 +-
 .../hive/metastore/api/FireEventResponse.java   |2 +-
 .../hadoop/hive/metastore/api/Function.java |2 +-
 .../metastore/api/GetAllFunctionsResponse.java  |   38 +-
 .../api/GetFileMetadataByExprRequest.java   |  548 ++
 .../api/GetFileMetadataByExprResult.java|  703 ++
 .../metastore/api/GetFileMetadataRequest.java   |  438 +
 .../metastore/api/GetFileMetadataResult.java|  540 +
 .../metastore/api/GetOpenTxnsInfoResponse.java  |2 +-
 .../hive/metastore/api/GetOpenTxnsResponse.java |2 +-
 .../api/GetPrincipalsInRoleRequest.java |2 +-
 .../api/GetPrincipalsInRoleResponse.java|2 +-
 .../api/GetRoleGrantsForPrincipalRequest.java   |2 +-
 .../api/GetRoleGrantsForPrincipalResponse.java  |2 +-
 .../api/GrantRevokePrivilegeRequest.java|2 +-
 .../api/GrantRevokePrivilegeResponse.java   |2 +-
 .../metastore/api/GrantRevokeRoleRequest.java   |2 +-
 .../metastore/api/GrantRevokeRoleResponse.java  |2 +-
 .../hive/metastore/api/HeartbeatRequest.java|2 +-
 .../metastore/api/HeartbeatTxnRangeRequest.java |2 +-
 .../api/HeartbeatTxnRangeResponse.java  |2 +-
 .../hive/metastore/api/HiveObjectPrivilege.java |2 +-
 .../hive/metastore/api/HiveObjectRef.java   |2 +-
 .../apache/hadoop/hive/metastore/api/Index.java |2 +-
 .../api/IndexAlreadyExistsException.java|2 +-
 .../metastore/api/InsertEventRequestData.java   |2 +-
 .../metastore/api/InvalidInputException.java|2 +-
 .../metastore/api/InvalidObjectException.java   |2 +-
 .../api/InvalidOperationException.java  |2 +-
 .../api/InvalidPartitionException.java  |2 +-
 .../hive/metastore/api/LockComponent.java   |2 +-
 .../hadoop/hive/metastore/api/LockRequest.java  |2 +-
 .../hadoop/hive/metastore/api/LockResponse.java |2 +-
 .../hive/metastore/api/LongColumnStatsData.java |2 +-
 .../hive/metastore/api/MetaException.java   |2 +-
 .../hive/metastore/api/MetadataPpdResult.java   |  508 +
 .../hive/metastore/api/NoSuchLockException.java |2 +-
 

[27/50] [abbrv] hive git commit: HIVE-11552 : implement basic methods for getting/putting file metadata (Sergey Shelukhin, reviewed by Alan Gates)

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/129bed52/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
--
diff --git a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py 
b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
index 0b80390..7fcdd7e 100644
--- a/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
+++ b/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
@@ -9750,6 +9750,726 @@ class FireEventResponse:
   def __ne__(self, other):
 return not (self == other)
 
+class MetadataPpdResult:
+  """
+  Attributes:
+   - metadata
+   - includeBitset
+  """
+
+  thrift_spec = (
+None, # 0
+(1, TType.STRING, 'metadata', None, None, ), # 1
+(2, TType.STRING, 'includeBitset', None, None, ), # 2
+  )
+
+  def __init__(self, metadata=None, includeBitset=None,):
+self.metadata = metadata
+self.includeBitset = includeBitset
+
+  def read(self, iprot):
+if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and 
isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is 
not None and fastbinary is not None:
+  fastbinary.decode_binary(self, iprot.trans, (self.__class__, 
self.thrift_spec))
+  return
+iprot.readStructBegin()
+while True:
+  (fname, ftype, fid) = iprot.readFieldBegin()
+  if ftype == TType.STOP:
+break
+  if fid == 1:
+if ftype == TType.STRING:
+  self.metadata = iprot.readString();
+else:
+  iprot.skip(ftype)
+  elif fid == 2:
+if ftype == TType.STRING:
+  self.includeBitset = iprot.readString();
+else:
+  iprot.skip(ftype)
+  else:
+iprot.skip(ftype)
+  iprot.readFieldEnd()
+iprot.readStructEnd()
+
+  def write(self, oprot):
+if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and 
self.thrift_spec is not None and fastbinary is not None:
+  oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, 
self.thrift_spec)))
+  return
+oprot.writeStructBegin('MetadataPpdResult')
+if self.metadata is not None:
+  oprot.writeFieldBegin('metadata', TType.STRING, 1)
+  oprot.writeString(self.metadata)
+  oprot.writeFieldEnd()
+if self.includeBitset is not None:
+  oprot.writeFieldBegin('includeBitset', TType.STRING, 2)
+  oprot.writeString(self.includeBitset)
+  oprot.writeFieldEnd()
+oprot.writeFieldStop()
+oprot.writeStructEnd()
+
+  def validate(self):
+if self.metadata is None:
+  raise TProtocol.TProtocolException(message='Required field metadata is 
unset!')
+if self.includeBitset is None:
+  raise TProtocol.TProtocolException(message='Required field includeBitset 
is unset!')
+return
+
+
+  def __hash__(self):
+value = 17
+value = (value * 31) ^ hash(self.metadata)
+value = (value * 31) ^ hash(self.includeBitset)
+return value
+
+  def __repr__(self):
+L = ['%s=%r' % (key, value)
+  for key, value in self.__dict__.iteritems()]
+return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+return isinstance(other, self.__class__) and self.__dict__ == 
other.__dict__
+
+  def __ne__(self, other):
+return not (self == other)
+
+class GetFileMetadataByExprResult:
+  """
+  Attributes:
+   - metadata
+   - isSupported
+   - unknownFileIds
+  """
+
+  thrift_spec = (
+None, # 0
+(1, TType.MAP, 'metadata', 
(TType.I64,None,TType.STRUCT,(MetadataPpdResult, 
MetadataPpdResult.thrift_spec)), None, ), # 1
+(2, TType.BOOL, 'isSupported', None, None, ), # 2
+(3, TType.LIST, 'unknownFileIds', (TType.I64,None), None, ), # 3
+  )
+
+  def __init__(self, metadata=None, isSupported=None, unknownFileIds=None,):
+self.metadata = metadata
+self.isSupported = isSupported
+self.unknownFileIds = unknownFileIds
+
+  def read(self, iprot):
+if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and 
isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is 
not None and fastbinary is not None:
+  fastbinary.decode_binary(self, iprot.trans, (self.__class__, 
self.thrift_spec))
+  return
+iprot.readStructBegin()
+while True:
+  (fname, ftype, fid) = iprot.readFieldBegin()
+  if ftype == TType.STOP:
+break
+  if fid == 1:
+if ftype == TType.MAP:
+  self.metadata = {}
+  (_ktype463, _vtype464, _size462 ) = iprot.readMapBegin()
+  for _i466 in xrange(_size462):
+_key467 = iprot.readI64();
+_val468 = MetadataPpdResult()
+_val468.read(iprot)
+self.metadata[_key467] = _val468
+  iprot.readMapEnd()
+else:
+  iprot.skip(ftype)
+  elif fid == 2:
+if ftype == TType.BOOL:
+  self.isSupported = iprot.readBool();
+else:
+  iprot.skip(ftype)
+  elif fid == 3:
+ 

[42/50] [abbrv] hive git commit: HIVE-11654 After HIVE-10289, HBase metastore tests failing (Daniel Dai via gates)

2015-09-21 Thread daijy
HIVE-11654 After HIVE-10289, HBase metastore tests failing (Daniel Dai via 
gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e150af94
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e150af94
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e150af94

Branch: refs/heads/master
Commit: e150af9457079c87c267094f3861528286e951ea
Parents: f014f0d
Author: Alan Gates 
Authored: Fri Aug 28 10:48:35 2015 -0700
Committer: Alan Gates 
Committed: Fri Aug 28 10:48:35 2015 -0700

--
 .../TestHBaseAggrStatsCacheIntegration.java |  4 +-
 .../hive/metastore/hbase/HBaseReadWrite.java| 36 -
 .../hadoop/hive/metastore/hbase/HBaseStore.java | 79 ++--
 .../hadoop/hive/metastore/hbase/HBaseUtils.java | 36 -
 .../hive/metastore/hbase/TestHBaseStore.java| 73 +-
 5 files changed, 144 insertions(+), 84 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e150af94/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
index ad76b2e..899fee1 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseAggrStatsCacheIntegration.java
@@ -140,7 +140,7 @@ public class TestHBaseAggrStatsCacheIntegration extends 
HBaseIntegrationTests {
 Checker statChecker = new Checker() {
   @Override
   public void checkStats(AggrStats aggrStats) throws Exception {
-Assert.assertEquals(4, aggrStats.getPartsFound());
+Assert.assertEquals(2, aggrStats.getPartsFound());
 Assert.assertEquals(2, aggrStats.getColStatsSize());
 ColumnStatisticsObj cso = aggrStats.getColStats().get(0);
 Assert.assertEquals("col1", cso.getColName());
@@ -152,7 +152,7 @@ public class TestHBaseAggrStatsCacheIntegration extends 
HBaseIntegrationTests {
 
 cso = aggrStats.getColStats().get(1);
 Assert.assertEquals("col2", cso.getColName());
-Assert.assertEquals("string", cso.getColType());
+Assert.assertEquals("varchar", cso.getColType());
 StringColumnStatsData scsd = cso.getStatsData().getStringStats();
 Assert.assertEquals(10.3, scsd.getAvgColLen(), 0.1);
 Assert.assertEquals(2000, scsd.getMaxColLen());

http://git-wip-us.apache.org/repos/asf/hive/blob/e150af94/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
index 8a1448c..d38c561 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java
@@ -550,7 +550,7 @@ public class HBaseReadWrite {
* @param newPart partitiion to replace it with
* @throws IOException
*/
-  void replacePartition(Partition oldPart, Partition newPart) throws 
IOException {
+  void replacePartition(Partition oldPart, Partition newPart, List 
partTypes) throws IOException {
 byte[] hash;
 byte[] oldHash = HBaseUtils.hashStorageDescriptor(oldPart.getSd(), md);
 byte[] newHash = HBaseUtils.hashStorageDescriptor(newPart.getSd(), md);
@@ -565,7 +565,7 @@ public class HBaseReadWrite {
 store(PART_TABLE, serialized[0], CATALOG_CF, CATALOG_COL, serialized[1]);
 partCache.put(newPart.getDbName(), newPart.getTableName(), newPart);
 if (!oldPart.getTableName().equals(newPart.getTableName())) {
-  deletePartition(oldPart.getDbName(), oldPart.getTableName(), 
oldPart.getValues());
+  deletePartition(oldPart.getDbName(), oldPart.getTableName(), partTypes, 
oldPart.getValues());
 }
   }
 
@@ -592,7 +592,7 @@ public class HBaseReadWrite {
 conn.flush(htab);
   }
 
-  void replacePartitions(List oldParts, List newParts) 
throws IOException {
+  void replacePartitions(List oldParts, List newParts, 
List oldPartTypes) throws IOException {
 if (oldParts.size() != newParts.size()) {
   throw new RuntimeException("Number of old and new partitions must 
match.");
 }
@@ -616,7 +616,7 @@ public class HBaseReadWrite {
   partCache.put(newParts.get(i).getDbName(), 

[07/50] [abbrv] hive git commit: HIVE-11343 Merge branch 'master' into hbase-metastore

2015-09-21 Thread daijy
http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/itests/hive-unit/pom.xml
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/metastore/pom.xml
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
--

http://git-wip-us.apache.org/repos/asf/hive/blob/61db7b80/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --cc metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index e5a32fb,39ab9e7..d2177a5
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@@ -299,8 -338,27 +339,28 @@@ public class ObjectStore implements Raw
  " created in the thread with id: " + Thread.currentThread().getId());
}
  
+   /**
+* Creates the proxy used to evaluate expressions. This is here to prevent 
circular
+* dependency - ql - metastore client - metastore server - 
ql. If server and
+* client are split, this can be removed.
+* @param conf Configuration.
+* @return The partition expression proxy.
+*/
+   private static PartitionExpressionProxy createExpressionProxy(Configuration 
conf) {
+ String className = HiveConf.getVar(conf, 
HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS);
+ try {
+   @SuppressWarnings("unchecked")
+   Class clazz =
+   (Class)MetaStoreUtils.getClass(className);
+   return MetaStoreUtils.newInstance(
+   clazz, new Class[0], new Object[0]);
+ } catch (MetaException e) {
+   LOG.error("Error loading PartitionExpressionProxy", e);
+   throw new RuntimeException("Error loading PartitionExpressionProxy: " + 
e.getMessage());
+ }
+   }
  
 +
/**
 * Properties specified in hive-default.xml override the properties 
specified
 * in jpox.properties.
@@@ -3258,8 -3426,9 +3350,9 @@@
  if (roleMember.size() > 0) {
pm.deletePersistentAll(roleMember);
  }
+ queryWrapper.close();
  // then remove all the grants
 -List userGrants = listPrincipalGlobalGrants(
 +List userGrants = listPrincipalMGlobalGrants(
  mRol.getRoleName(), PrincipalType.ROLE);
  if (userGrants.size() > 0) {
pm.deletePersistentAll(userGrants);
@@@ -3344,26 -3521,27 +3445,27 @@@
}
  
@SuppressWarnings("unchecked")
 -  @Override
 -  public List listRoles(String principalName, PrincipalType 
principalType) {
 +  public List listMRoles(String principalName,
 +  PrincipalType principalType) {
  boolean success = false;
- List mRoleMember = null;
+ Query query = null;
+ List mRoleMember = new ArrayList();
+ 
  try {
-   openTransaction();
LOG.debug("Executing listRoles");
-   Query query = pm
-   .newQuery(
-   MRoleMap.class,
-   "principalName == t1 && principalType == t2");
-   query
-   .declareParameters("java.lang.String t1, java.lang.String t2");
+ 
+   openTransaction();
+   query = pm.newQuery(MRoleMap.class, "principalName == t1 && 
principalType == t2");
+   query.declareParameters("java.lang.String t1, java.lang.String t2");
query.setUnique(false);
-   mRoleMember = (List) query.executeWithArray(
-   principalName, principalType.toString());
-   LOG.debug("Done executing query for listMSecurityUserRoleMap");
-   pm.retrieveAll(mRoleMember);
+   List mRoles =
+   (List) query.executeWithArray(principalName, 
principalType.toString());
+   pm.retrieveAll(mRoles);
success = commitTransaction();
-   LOG.debug("Done retrieving all objects for listMSecurityUserRoleMap");
+ 
+   mRoleMember.addAll(mRoles);
+ 
+   LOG.debug("Done retrieving all objects for listRoles");
  } finally {
if (!success) {
  rollbackTransaction();
@@@ -3372,60 -3553,18 +3477,56 @@@
  
  if (principalType == PrincipalType.USER) {
// All users belong to public role implicitly, add that role
-   if 

hive git commit: HIVE-11950: WebHCat status file doesn't show UTF8 character (Daniel Dai reviewed by Thejas Nair)

2015-09-25 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master a4eb3c5d4 -> 02121a872


HIVE-11950: WebHCat status file doesn't show UTF8 character (Daniel Dai 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/02121a87
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/02121a87
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/02121a87

Branch: refs/heads/master
Commit: 02121a872bceead3cd8f84461af68985f6f48d5a
Parents: a4eb3c5
Author: Daniel Dai 
Authored: Fri Sep 25 14:01:25 2015 -0700
Committer: Daniel Dai 
Committed: Fri Sep 25 14:01:25 2015 -0700

--
 .../org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/02121a87/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index 422e75e..a5ff67e 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.hcatalog.templeton.AppConfig;
 import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
 
@@ -43,6 +44,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
@@ -544,9 +546,10 @@ public class LaunchMapper extends Mapper
 public void run() {
   PrintWriter writer = null;
   try {
-InputStreamReader isr = new InputStreamReader(in);
+String enc = conf.get(AppConfig.EXEC_ENCODING_NAME);
+InputStreamReader isr = new InputStreamReader(in, enc);
 BufferedReader reader = new BufferedReader(isr);
-writer = new PrintWriter(out);
+writer = new PrintWriter(new OutputStreamWriter(out, enc));
 
 String line;
 while ((line = reader.readLine()) != null) {



hive git commit: HIVE-11950: WebHCat status file doesn't show UTF8 character (Daniel Dai reviewed by Thejas Nair)

2015-09-25 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 bf2132218 -> 15a469528


HIVE-11950: WebHCat status file doesn't show UTF8 character (Daniel Dai 
reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/15a46952
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/15a46952
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/15a46952

Branch: refs/heads/branch-1
Commit: 15a46952808738f9648fbb14f48c31acbe59067c
Parents: bf21322
Author: Daniel Dai 
Authored: Fri Sep 25 14:01:25 2015 -0700
Committer: Daniel Dai 
Committed: Fri Sep 25 14:02:12 2015 -0700

--
 .../org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/15a46952/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index 422e75e..a5ff67e 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.hcatalog.templeton.AppConfig;
 import org.apache.hive.hcatalog.templeton.BadParam;
 import org.apache.hive.hcatalog.templeton.LauncherDelegator;
 
@@ -43,6 +44,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.OutputStream;
+import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
@@ -544,9 +546,10 @@ public class LaunchMapper extends Mapper
 public void run() {
   PrintWriter writer = null;
   try {
-InputStreamReader isr = new InputStreamReader(in);
+String enc = conf.get(AppConfig.EXEC_ENCODING_NAME);
+InputStreamReader isr = new InputStreamReader(in, enc);
 BufferedReader reader = new BufferedReader(isr);
-writer = new PrintWriter(out);
+writer = new PrintWriter(new OutputStreamWriter(out, enc));
 
 String line;
 while ((line = reader.readLine()) != null) {



hive git commit: HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, reviewed by Sergey Shelukhin)

2015-12-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/master 6cc5761b0 -> 7a1f14c4d


HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, 
reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7a1f14c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7a1f14c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7a1f14c4

Branch: refs/heads/master
Commit: 7a1f14c4d9dd267012e43c1ccd051d64bc65698b
Parents: 6cc5761
Author: Daniel Dai 
Authored: Sat Dec 5 17:31:41 2015 -0800
Committer: Daniel Dai 
Committed: Sat Dec 5 17:31:59 2015 -0800

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 29 
 1 file changed, 17 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7a1f14c4/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 62b608c..d81e17a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -131,6 +131,7 @@ public class Driver implements CommandProcessor {
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
   static final int SHUTDOWN_HOOK_PRIORITY = 0;
+  private Runnable shutdownRunner = null;
 
   private int maxRows = 100;
   ByteStream.Output bos = new ByteStream.Output();
@@ -400,18 +401,19 @@ public class Driver implements CommandProcessor {
   // Initialize the transaction manager.  This must be done before analyze 
is called.
   final HiveTxnManager txnManager = SessionState.get().initTxnMgr(conf);
   // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
-  ShutdownHookManager.addShutdownHook(
-  new Runnable() {
-@Override
-public void run() {
-  try {
-releaseLocksAndCommitOrRollback(false, txnManager);
-  } catch (LockException e) {
-LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
-e.getMessage());
-  }
-}
-  }, SHUTDOWN_HOOK_PRIORITY);
+
+  shutdownRunner = new Runnable() {
+@Override
+public void run() {
+  try {
+releaseLocksAndCommitOrRollback(false, txnManager);
+  } catch (LockException e) {
+LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
+e.getMessage());
+  }
+}
+  };
+  ShutdownHookManager.addShutdownHook(shutdownRunner, 
SHUTDOWN_HOOK_PRIORITY);
 
   command = new VariableSubstitution(new HiveVariableSource() {
 @Override
@@ -1950,6 +1952,9 @@ public class Driver implements CommandProcessor {
 LOG.warn("Exception when releasing locking in destroy: " +
 e.getMessage());
   }
+  if (shutdownRunner != null) {
+ShutdownHookManager.removeShutdownHook(shutdownRunner);
+  }
 }
   }
 



hive git commit: HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, reviewed by Sergey Shelukhin)

2015-12-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-2.0 67d095d89 -> 6775ccdfb


HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, 
reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6775ccdf
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6775ccdf
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6775ccdf

Branch: refs/heads/branch-2.0
Commit: 6775ccdfb803db6aed16a048e7de3842764577bf
Parents: 67d095d
Author: Daniel Dai 
Authored: Sat Dec 5 17:31:41 2015 -0800
Committer: Daniel Dai 
Committed: Sat Dec 5 17:33:37 2015 -0800

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 29 
 1 file changed, 17 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6775ccdf/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 62b608c..d81e17a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -131,6 +131,7 @@ public class Driver implements CommandProcessor {
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
   static final int SHUTDOWN_HOOK_PRIORITY = 0;
+  private Runnable shutdownRunner = null;
 
   private int maxRows = 100;
   ByteStream.Output bos = new ByteStream.Output();
@@ -400,18 +401,19 @@ public class Driver implements CommandProcessor {
   // Initialize the transaction manager.  This must be done before analyze 
is called.
   final HiveTxnManager txnManager = SessionState.get().initTxnMgr(conf);
   // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
-  ShutdownHookManager.addShutdownHook(
-  new Runnable() {
-@Override
-public void run() {
-  try {
-releaseLocksAndCommitOrRollback(false, txnManager);
-  } catch (LockException e) {
-LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
-e.getMessage());
-  }
-}
-  }, SHUTDOWN_HOOK_PRIORITY);
+
+  shutdownRunner = new Runnable() {
+@Override
+public void run() {
+  try {
+releaseLocksAndCommitOrRollback(false, txnManager);
+  } catch (LockException e) {
+LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
+e.getMessage());
+  }
+}
+  };
+  ShutdownHookManager.addShutdownHook(shutdownRunner, 
SHUTDOWN_HOOK_PRIORITY);
 
   command = new VariableSubstitution(new HiveVariableSource() {
 @Override
@@ -1950,6 +1952,9 @@ public class Driver implements CommandProcessor {
 LOG.warn("Exception when releasing locking in destroy: " +
 e.getMessage());
   }
+  if (shutdownRunner != null) {
+ShutdownHookManager.removeShutdownHook(shutdownRunner);
+  }
 }
   }
 



hive git commit: HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, reviewed by Sergey Shelukhin)

2015-12-05 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 19620f7bb -> 718bf34f6


HIVE-12583: HS2 ShutdownHookManager holds extra of Driver instance (Daniel Dai, 
reviewed by Sergey Shelukhin)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/718bf34f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/718bf34f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/718bf34f

Branch: refs/heads/branch-1
Commit: 718bf34f6690080172d1332bcb25c1d637a4c7fe
Parents: 19620f7
Author: Daniel Dai 
Authored: Sat Dec 5 17:44:30 2015 -0800
Committer: Daniel Dai 
Committed: Sat Dec 5 17:44:30 2015 -0800

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 28 +++-
 1 file changed, 16 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/718bf34f/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index c134653..4b032cc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -127,6 +127,7 @@ public class Driver implements CommandProcessor {
   static final private Log LOG = LogFactory.getLog(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
   static final int SHUTDOWN_HOOK_PRIORITY = 0;
+  private Runnable shutdownRunner = null;
 
   private static final Object compileMonitor = new Object();
 
@@ -391,18 +392,18 @@ public class Driver implements CommandProcessor {
   // Initialize the transaction manager.  This must be done before analyze 
is called.
   final HiveTxnManager txnManager = SessionState.get().initTxnMgr(conf);
   // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
-  ShutdownHookManager.addShutdownHook(
-  new Runnable() {
-@Override
-public void run() {
-  try {
-releaseLocksAndCommitOrRollback(ctx.getHiveLocks(), false, 
txnManager);
-  } catch (LockException e) {
-LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
-e.getMessage());
-  }
-}
-  }, SHUTDOWN_HOOK_PRIORITY);
+  shutdownRunner = new Runnable() {
+@Override
+public void run() {
+  try {
+releaseLocksAndCommitOrRollback(ctx.getHiveLocks(), false, 
txnManager);
+  } catch (LockException e) {
+LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
+e.getMessage());
+  }
+}
+  };
+  ShutdownHookManager.addShutdownHook(shutdownRunner, 
SHUTDOWN_HOOK_PRIORITY);
 
   command = new VariableSubstitution().substitute(conf, command);
   ctx = new Context(conf);
@@ -1891,6 +1892,9 @@ public class Driver implements CommandProcessor {
 LOG.warn("Exception when releasing locking in destroy: " +
 e.getMessage());
   }
+  if (shutdownRunner != null) {
+ShutdownHookManager.removeShutdownHook(shutdownRunner);
+  }
 }
   }
 



[3/3] hive git commit: HIVE-11935: Race condition in HiveMetaStoreClient: isCompatibleWith and close (Daniel Dai, Reviewed by Alan Gates)

2015-12-18 Thread daijy
HIVE-11935: Race condition in  HiveMetaStoreClient: isCompatibleWith and close 
(Daniel Dai, Reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/94964091
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/94964091
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/94964091

Branch: refs/heads/master
Commit: 949640919e47bf5c729c51ba396424f7101ff43b
Parents: 27a14d5
Author: Daniel Dai 
Authored: Fri Dec 18 12:06:16 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:06:16 2015 -0800

--
 .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/94964091/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index c5e7a5f..178796d 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -309,13 +309,16 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient {
 
   @Override
   public boolean isCompatibleWith(HiveConf conf) {
-if (currentMetaVars == null) {
+// Make a copy of currentMetaVars, there is a race condition that
+   // currentMetaVars might be changed during the execution of the method
+Map currentMetaVarsCopy = currentMetaVars;
+if (currentMetaVarsCopy == null) {
   return false; // recreate
 }
 boolean compatible = true;
 for (ConfVars oneVar : HiveConf.metaVars) {
   // Since metaVars are all of different types, use string for comparison
-  String oldVar = currentMetaVars.get(oneVar.varname);
+  String oldVar = currentMetaVarsCopy.get(oneVar.varname);
   String newVar = conf.get(oneVar.varname, "");
   if (oldVar == null ||
   (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : 
!oldVar.equalsIgnoreCase(newVar))) {



hive git commit: HIVE-11935: Race condition in HiveMetaStoreClient: isCompatibleWith and close (Daniel Dai, Reviewed by Alan Gates)

2015-12-18 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-1 0d9689657 -> 3fe412417


HIVE-11935: Race condition in  HiveMetaStoreClient: isCompatibleWith and close 
(Daniel Dai, Reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3fe41241
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3fe41241
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3fe41241

Branch: refs/heads/branch-1
Commit: 3fe4124178b8c5a413b07b400c9b5df6360f6d29
Parents: 0d96896
Author: Daniel Dai 
Authored: Fri Dec 18 12:06:16 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:06:57 2015 -0800

--
 .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 7 +--
 1 file changed, 5 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3fe41241/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index e1ab1d5..6ee48c6 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -287,13 +287,16 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient {
 
   @Override
   public boolean isCompatibleWith(HiveConf conf) {
-if (currentMetaVars == null) {
+// Make a copy of currentMetaVars, there is a race condition that
+   // currentMetaVars might be changed during the execution of the method
+Map currentMetaVarsCopy = currentMetaVars;
+if (currentMetaVarsCopy == null) {
   return false; // recreate
 }
 boolean compatible = true;
 for (ConfVars oneVar : HiveConf.metaVars) {
   // Since metaVars are all of different types, use string for comparison
-  String oldVar = currentMetaVars.get(oneVar.varname);
+  String oldVar = currentMetaVarsCopy.get(oneVar.varname);
   String newVar = conf.get(oneVar.varname, "");
   if (oldVar == null ||
   (oneVar.isCaseSensitive() ? !oldVar.equals(newVar) : 
!oldVar.equalsIgnoreCase(newVar))) {



[2/3] hive git commit: Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive

2015-12-18 Thread daijy
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/hive


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/27a14d5a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/27a14d5a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/27a14d5a

Branch: refs/heads/master
Commit: 27a14d5a4cf00b34bdfd9c556c71b49fa2e58fff
Parents: 95d2273 1199754
Author: Daniel Dai 
Authored: Fri Dec 18 12:04:17 2015 -0800
Committer: Daniel Dai 
Committed: Fri Dec 18 12:04:17 2015 -0800

--
 .../predicate/AccumuloPredicateHandler.java | 4 +-
 .../predicate/TestAccumuloPredicateHandler.java |36 +-
 common/pom.xml  | 5 +
 .../hadoop/hive/common/DiskRangeInfo.java   |59 -
 .../common/metrics/common/MetricsConstant.java  | 8 +
 .../metrics/metrics2/CodahaleMetrics.java   | 7 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   125 +-
 .../apache/hadoop/hive/ql/log/PerfLogger.java   |56 +-
 .../apache/hive/common/util/BloomFilter.java|   309 -
 .../org/apache/hive/common/util/Murmur3.java|   335 -
 .../java/org/apache/hive/http/HttpServer.java   |47 +
 .../hive/common/metrics/MetricsTestUtils.java   |13 +-
 .../apache/hadoop/hive/conf/TestHiveConf.java   |14 +
 .../apache/hive/common/util/TestMurmur3.java|   224 -
 conf/ivysettings.xml|12 +-
 data/conf/hive-site.xml | 5 +
 data/conf/llap/hive-site.xml| 5 +
 data/conf/llap/llap-daemon-site.xml | 5 +
 data/conf/perf-reg/hive-site.xml|   285 +
 data/conf/perf-reg/tez-site.xml | 6 +
 .../metastore_export/csv/TABLE_PARAMS.txt   |   102 +
 .../metastore_export/csv/TAB_COL_STATS.txt  |   259 +
 data/scripts/q_perf_test_init.sql   |   616 +
 .../hive/hbase/HiveHBaseTableInputFormat.java   |11 +-
 .../test/results/positive/hbase_queries.q.out   |38 +-
 .../src/test/templates/TestHBaseCliDriver.vm|63 +-
 .../templates/TestHBaseNegativeCliDriver.vm |64 +-
 .../hive/hcatalog/mapreduce/SpecialCases.java   | 2 +-
 .../hive/hcatalog/api/HCatClientHMSImpl.java|26 +-
 itests/custom-udfs/pom.xml  |62 +
 itests/custom-udfs/udf-classloader-udf1/pom.xml |43 +
 .../src/main/java/hive/it/custom/udfs/UDF1.java |58 +
 itests/custom-udfs/udf-classloader-udf2/pom.xml |43 +
 .../src/main/java/hive/it/custom/udfs/UDF2.java |60 +
 itests/custom-udfs/udf-classloader-util/pom.xml |35 +
 .../src/main/java/hive/it/custom/udfs/Util.java |25 +
 .../hive/thrift/TestHadoopAuthBridge23.java | 4 -
 .../hive/metastore/TestMetaStoreMetrics.java|   152 +-
 .../hbase/TestHBaseMetastoreMetrics.java|   128 +
 .../hadoop/hive/ql/TestLocationQueries.java | 2 +-
 .../plugin/TestHiveAuthorizerShowFilters.java   |36 +-
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 8 +
 .../hive/jdbc/miniHS2/TestHs2Metrics.java   |42 +-
 itests/pom.xml  | 1 +
 itests/qtest/pom.xml|21 +-
 .../test/resources/testconfiguration.properties | 1 +
 .../hadoop/hive/accumulo/AccumuloQTestUtil.java | 2 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java   | 2 +-
 .../hadoop/hive/hbase/HBaseTestSetup.java   | 9 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java|   337 +-
 .../org/apache/hive/jdbc/HiveConnection.java| 9 +-
 .../org/apache/hive/jdbc/HiveStatement.java |43 +-
 jdbc/src/java/org/apache/hive/jdbc/Utils.java   | 2 +
 .../hadoop/hive/llap/io/api/LlapIoProxy.java|78 -
 .../hadoop/hive/llap/io/api/LlapProxy.java  |   111 +
 .../hive/llap/registry/ServiceInstance.java | 7 +
 .../registry/impl/LlapFixedRegistryImpl.java| 7 +
 .../registry/impl/LlapYarnRegistryImpl.java |30 +-
 .../hive/llap/security/LlapTokenIdentifier.java |82 +
 .../hive/llap/security/LlapTokenProvider.java   |27 +
 .../daemon/rpc/LlapDaemonProtocolProtos.java|  1059 +-
 .../hadoop/hive/llap/cache/BuddyAllocator.java  | 1 -
 .../llap/cache/LowLevelCacheMemoryManager.java  |41 +-
 .../daemon/LlapDaemonProtocolBlockingPB.java| 6 +
 .../LlapManagementProtocolBlockingPB.java   |24 +
 .../hive/llap/daemon/impl/LlapDaemon.java   |41 +-
 .../impl/LlapDaemonProtocolClientImpl.java  | 1 -
 .../impl/LlapDaemonProtocolServerImpl.java  |   155 +-
 .../impl/LlapManagementProtocolClientImpl.java  |82 +
 .../hive/llap/daemon/impl/QueryFileCleaner.java |96 -
 .../hive/llap/daemon/impl/QueryTracker.java |   114 +-
 .../daemon/services/impl/LlapWebServices.java   |33 +-
 .../llap/io/decode/OrcEncodedDataConsumer.java  

  1   2   3   4   5   6   >