svn commit: r1640255 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java

2014-11-17 Thread ekoifman
Author: ekoifman
Date: Mon Nov 17 23:26:35 2014
New Revision: 1640255

URL: http://svn.apache.org/r1640255
Log:
HIVE-8881 when web client tries to fetch all jobs from webhcat where HDFS does 
not have the data

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java?rev=1640255r1=1640254r2=1640255view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SimpleWebException.java
 Mon Nov 17 23:26:35 2014
@@ -32,7 +32,7 @@ import org.codehaus.jackson.map.ObjectMa
  * instead map our own so that Jersey doesn't log our exceptions as
  * error in the output log.  See SimpleExceptionMapper.
  */
-public class SimpleWebException extends Throwable {
+public class SimpleWebException extends Exception {
   public int httpCode;
   public MapString, Object params;
 




svn commit: r1640981 - in /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton: AppConfig.java tool/TempletonUtils.java tool/TrivialExecService.java

2014-11-21 Thread ekoifman
Author: ekoifman
Date: Fri Nov 21 19:24:54 2014
New Revision: 1640981

URL: http://svn.apache.org/r1640981
Log:
HIVE-8877 improve context logging during job submission via WebHCat (reviewed 
by Thejas Nair)

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1640981r1=1640980r2=1640981view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 Fri Nov 21 19:24:54 2014
@@ -28,12 +28,14 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.StringTokenizer;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
@@ -224,14 +226,8 @@ public class AppConfig extends Configura
* support/debugging.  Later it may be worth adding a REST call which will 
return this data.
*/
   private String dumpEnvironent() {
-StringBuilder sb = new StringBuilder(WebHCat environment:\n);
-MapString, String env = System.getenv();
-ListString propKeys = new ArrayListString(env.keySet());
-Collections.sort(propKeys);
-for(String propKey : propKeys) {
-  sb.append(propKey).append('=').append(env.get(propKey)).append('\n');
-}
-sb.append(Configration properties: \n);
+StringBuilder sb = TempletonUtils.dumpPropMap(WebHCat 
System.getenv(), System.getenv());
+sb.append(STARTWebHCat AppConfig.iterator(): \n);
 IteratorMap.EntryString, String configIter = this.iterator();
 ListMap.EntryString, String configVals = new 
ArrayListMap.EntryString, String();
 while(configIter.hasNext()) {
@@ -245,8 +241,19 @@ public class AppConfig extends Configura
 });
 for(Map.EntryString, String entry : configVals) {
   //use get() to make sure variable substitution works
-  
sb.append(entry.getKey()).append('=').append(get(entry.getKey())).append('\n');
+  if(entry.getKey().toLowerCase().contains(path)) {
+StringTokenizer st = new StringTokenizer(get(entry.getKey()), 
File.pathSeparator);
+sb.append(entry.getKey()).append(=\n);
+while(st.hasMoreTokens()) {
+  sb.append(
).append(st.nextToken()).append(File.pathSeparator).append('\n');
+}
+  }
+  else {
+
sb.append(entry.getKey()).append('=').append(get(entry.getKey())).append('\n');
+  }
 }
+sb.append(ENDWebHCat AppConfig.iterator(): \n);
+sb.append(TempletonUtils.dumpPropMap(WebHCat 
System.getProperties(), System.getProperties()));
 return sb.toString();
   }
   public void startCleanup() {

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java?rev=1640981r1=1640980r2=1640981view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
 Fri Nov 21 19:24:54 2014
@@ -18,6 +18,7 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
@@ -34,6 +35,8 @@ import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Properties;
+import java.util.StringTokenizer;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
@@ -408,4 +411,34 @@ public class TempletonUtils {
 }
 return null;
   }
+  public static StringBuilder dumpPropMap(String header, Properties props) {
+MapString, String map = new HashMapString, String();
+for(Map.EntryObject, Object

svn commit: r1634244 - /hive/trunk/hcatalog/src/test/e2e/templeton/README.txt

2014-10-25 Thread ekoifman
Author: ekoifman
Date: Sat Oct 25 19:20:37 2014
New Revision: 1634244

URL: http://svn.apache.org/r1634244
Log:
HIVE-8588 sqoop REST endpoint fails to send appropriate JDBC driver to the 
cluster

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/README.txt

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/README.txt?rev=1634244r1=1634243r2=1634244view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/README.txt Sat Oct 25 19:20:37 
2014
@@ -101,7 +101,7 @@ For Hadoop 2.x you would need to upload 
 Also see 
https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-HadoopDistributedCache
  for notes on additional JAR files to copy to HDFS.
 
-5. Make sure TEMPLETON_HOME evnironment variable is set
+5. Make sure TEMPLETON_HOME environment variable is set
 
 6. hadoop/conf/core-site.xml should have items described in
 
https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-Permissions
@@ -124,6 +124,9 @@ You may also need to adjust the followin
   finaltrue/final
 /property
 
+8.Sqoop test require JDBC jar to be placed on HDFS for whichever DB the test 
is configured with,
+for example mysql-connector-java-5.1.30-bin.jar.
+
 
  See deployers/ for scripts that automate a lot of the set up.
 




svn commit: r1634242 - in /hive/trunk/hcatalog: src/test/e2e/templeton/deployers/ src/test/e2e/templeton/deployers/config/webhcat/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/apache/hi

2014-10-25 Thread ekoifman
Author: ekoifman
Date: Sat Oct 25 19:19:37 2014
New Revision: 1634242

URL: http://svn.apache.org/r1634242
Log:
HIVE-8588 sqoop REST endpoint fails to send appropriate JDBC driver to the 
cluster

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh?rev=1634242r1=1634241r2=1634242view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh 
Sat Oct 25 19:19:37 2014
@@ -28,6 +28,6 @@
 echo Deleting artifacts from HDFS...
 
 ${HADOOP_HOME}/bin/hdfs dfs -rm -r   /user/hive/ /user/${USER}/ 
/user/templeton /apps /tmp /sqoopoutputdir
-${HADOOP_HOME}/bin/hdfs dfs -mkdir -p/tmp/hadoop-${USER} 
/user/hive/warehouse /user/${USER}/ /user/templeton /apps/templeton 
/tmp/hadoop-yarn /tmp/templeton_test_out
+${HADOOP_HOME}/bin/hdfs dfs -mkdir -p/tmp/hadoop-${USER} 
/user/hive/warehouse /user/${USER}/ /user/templeton /apps/templeton/jdbc 
/tmp/hadoop-yarn /tmp/templeton_test_out
 ${HADOOP_HOME}/bin/hdfs dfs -chmod -R a+rwx /user /tmp/
 ${HADOOP_HOME}/bin/hdfs dfs -chmod g+rwx   /user/hive/warehouse

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1634242r1=1634241r2=1634242view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 (original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 Sat Oct 25 19:19:37 2014
@@ -78,10 +78,22 @@
 HCat, Hive query, etc./description
 /property
 property
+nametempleton.sqoop.archive/name
+
valuehdfs:///apps/templeton/sqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz/value
+descriptionThis should point to Sqoop tar that will be shipped to 
target node executing
+the actual sqoop command.  If not set, Sqoop is expected to be 
installed on every node of the
+ cluster./description
+/property
+property
 nametempleton.sqoop.path/name
-value${env.SQOOP_HOME}/bin/sqoop/value
+
valuesqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz/sqoop-1.4.5.bin__hadoop-2.0.4-alpha/bin/sqoop/value
 descriptionThe path to the Sqoop executable./description
 /property
+property
+nametempleton.sqoop.home/name
+
valuesqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz/sqoop-1.4.5.bin__hadoop-2.0.4-alpha/value
+descriptionThe path to the Sqoop home in the exploded 
archive./description
+/property
 
 property
 nametempleton.controller.mr.child.opts/name

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh?rev=1634242r1=1634241r2=1634242view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
(original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
Sat Oct 25 19:19:37 2014
@@ -35,7 +35,6 @@ ${HADOOP_HOME}/bin/hdfs dfs -put ${HADOO
 ${HADOOP_HOME}/bin/hdfs dfs -put 
${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-${HADOOP_VERSION}.jar
 webhcate2e/hclient.jar
 ${HADOOP_HOME

svn commit: r1634245 - in /hive/branches/branch-0.14/hcatalog: src/test/e2e/templeton/ src/test/e2e/templeton/deployers/ src/test/e2e/templeton/deployers/config/webhcat/ src/test/e2e/templeton/tests/

2014-10-25 Thread ekoifman
Author: ekoifman
Date: Sat Oct 25 19:21:33 2014
New Revision: 1634245

URL: http://svn.apache.org/r1634245
Log:
HIVE-8588 sqoop REST endpoint fails to send appropriate JDBC driver to the 
cluster

Modified:
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/README.txt

hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh

hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml

hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/env.sh

hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JarDelegator.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/LauncherDelegator.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java

Modified: hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/README.txt
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/README.txt?rev=1634245r1=1634244r2=1634245view=diff
==
--- hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/README.txt 
(original)
+++ hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/README.txt Sat 
Oct 25 19:21:33 2014
@@ -101,7 +101,7 @@ For Hadoop 2.x you would need to upload 
 Also see 
https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-HadoopDistributedCache
  for notes on additional JAR files to copy to HDFS.
 
-5. Make sure TEMPLETON_HOME evnironment variable is set
+5. Make sure TEMPLETON_HOME environment variable is set
 
 6. hadoop/conf/core-site.xml should have items described in
 
https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-Permissions
@@ -124,6 +124,9 @@ You may also need to adjust the followin
   finaltrue/final
 /property
 
+8.Sqoop test require JDBC jar to be placed on HDFS for whichever DB the test 
is configured with,
+for example mysql-connector-java-5.1.30-bin.jar.
+
 
  See deployers/ for scripts that automate a lot of the set up.
 

Modified: 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh?rev=1634245r1=1634244r2=1634245view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/clean_file_system.sh
 Sat Oct 25 19:21:33 2014
@@ -28,6 +28,6 @@
 echo Deleting artifacts from HDFS...
 
 ${HADOOP_HOME}/bin/hdfs dfs -rm -r   /user/hive/ /user/${USER}/ 
/user/templeton /apps /tmp /sqoopoutputdir
-${HADOOP_HOME}/bin/hdfs dfs -mkdir -p/tmp/hadoop-${USER} 
/user/hive/warehouse /user/${USER}/ /user/templeton /apps/templeton 
/tmp/hadoop-yarn /tmp/templeton_test_out
+${HADOOP_HOME}/bin/hdfs dfs -mkdir -p/tmp/hadoop-${USER} 
/user/hive/warehouse /user/${USER}/ /user/templeton /apps/templeton/jdbc 
/tmp/hadoop-yarn /tmp/templeton_test_out
 ${HADOOP_HOME}/bin/hdfs dfs -chmod -R a+rwx /user /tmp/
 ${HADOOP_HOME}/bin/hdfs dfs -chmod g+rwx   /user/hive/warehouse

Modified: 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1634245r1=1634244r2=1634245view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 Sat Oct 25 19:21:33 2014
@@ -78,10 +78,22 @@
 HCat, Hive query, etc./description
 /property
 property

svn commit: r1635594 - in /hive/trunk: hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ ql/src/java/org/apache/hadoop/hive/ql/security/ shims/common-secure/src/main/java/org/apac

2014-10-30 Thread ekoifman
Author: ekoifman
Date: Thu Oct 30 19:16:51 2014
New Revision: 1635594

URL: http://svn.apache.org/r1635594
Log:
HIVE-8643 DDL operations via WebHCat with doAs parameter in secure cluster fail

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java

hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java?rev=1635594r1=1635593r2=1635594view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 Thu Oct 30 19:16:51 2014
@@ -121,9 +121,9 @@ public class SecureProxySupport {
 if (isEnabled) {
   args.add(-D);
   args.add(hive.metastore.token.signature= + getHcatServiceStr());
-  args.add(-D);
-  args.add(proxy.user.name= + user);
 }
+args.add(-D);
+args.add(proxy.user.name= + user);
   }
 
   class TokenWrapper {
@@ -140,6 +140,7 @@ public class SecureProxySupport {
 ugi.doAs(new PrivilegedExceptionActionObject() {
   public Object run() throws IOException {
 FileSystem fs = FileSystem.get(conf);
+//todo: according to JavaDoc this seems like private API: 
addDelegationToken should be used
 twrapper.token = fs.getDelegationToken(ugi.getShortUserName());
 return null;
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java?rev=1635594r1=1635593r2=1635594view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
 Thu Oct 30 19:16:51 2014
@@ -30,6 +30,8 @@ import org.apache.hadoop.security.UserGr
  * but honours a proxy config setting proxy.user.name instead of the
  * current user if set. This allows server processes like webhcat which
  * proxy other users to easily specify an override if allowed.
+ *
+ * It is no longer necessary to use this class with WebHCat as of Hive 0.14.
  */
 public class ProxyUserAuthenticator extends HadoopDefaultAuthenticator {
 

Modified: 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1635594r1=1635593r2=1635594view=diff
==
--- 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 (original)
+++ 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 Thu Oct 30 19:16:51 2014
@@ -463,6 +463,16 @@ public abstract class HadoopShimsSecure 
 
   @Override
   public UserGroupInformation getUGIForConf(Configuration conf) throws 
IOException {
+String doAs = conf.get(proxy.user.name);
+if(doAs != null  doAs.length()  0) {
+ /*
+  * this allows doAs (proxy user) to be passed along across process 
boundary where
+  * delegation tokens are not supported.  For example, a DDL stmt via 
WebHCat with
+  * a doAs parameter, forks to 'hcat' which needs to start a Session that
+  * proxies the end user
+  */
+  return UserGroupInformation.createProxyUser(doAs, 
UserGroupInformation.getLoginUser());
+}
 return UserGroupInformation.getCurrentUser();
   }
 




svn commit: r1636024 - in /hive/branches/branch-0.14: hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/

2014-11-01 Thread ekoifman
Author: ekoifman
Date: Sat Nov  1 19:59:19 2014
New Revision: 1636024

URL: http://svn.apache.org/r1636024
Log:
HIVE-8685 DDL operations in WebHCat set proxy user to null in unsecure mode 
(Eugene Koifman, reviewed by Thejas M Nair)

Modified:

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java

hive/branches/branch-0.14/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

Modified: 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java?rev=1636024r1=1636023r2=1636024view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
 Sat Nov  1 19:59:19 2014
@@ -229,15 +229,17 @@ public class ExecServiceImpl implements 
   watchdog.checkException();
 }
 catch (Exception ex) {
-  LOG.error(Command:  + cmd +  failed:, ex);
+  LOG.error(Command:  + cmd +  failed. res= + res, ex);
 }
 if(watchdog.killedProcess()) {
   String msg =  was terminated due to timeout( + timeout + ms).  See  
+ AppConfig
   .EXEC_TIMEOUT_NAME +  property; 
-  LOG.warn(Command:  + cmd + msg);
+  LOG.warn(Command:  + cmd + msg +  res= + res);
   res.stderr +=  Command  + msg; 
 }
-
+if(res.exitcode != 0) {
+  LOG.info(Command:  + cmd +  failed. res= + res);
+}
 return res;
   }
 

Modified: 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java?rev=1636024r1=1636023r2=1636024view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 Sat Nov  1 19:59:19 2014
@@ -40,7 +40,7 @@ import org.apache.thrift.TException;
 
 /**
  * Helper class to run jobs using Kerberos security.  Always safe to
- * use these methods, it's a noop if security is not enabled.
+ * use these methods, it's a no-op if security is not enabled.
  */
 public class SecureProxySupport {
   private Path tokenPath;
@@ -121,9 +121,9 @@ public class SecureProxySupport {
 if (isEnabled) {
   args.add(-D);
   args.add(hive.metastore.token.signature= + getHcatServiceStr());
+  args.add(-D);
+  args.add(proxy.user.name= + user);
 }
-args.add(-D);
-args.add(proxy.user.name= + user);
   }
 
   class TokenWrapper {

Modified: 
hive/branches/branch-0.14/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1636024r1=1636023r2=1636024view=diff
==
--- 
hive/branches/branch-0.14/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 (original)
+++ 
hive/branches/branch-0.14/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 Sat Nov  1 19:59:19 2014
@@ -463,7 +463,7 @@ public abstract class HadoopShimsSecure 
 
   @Override
   public UserGroupInformation getUGIForConf(Configuration conf) throws 
IOException {
-String doAs = conf.get(proxy.user.name);
+String doAs = System.getenv(HADOOP_USER_NAME);
 if(doAs != null  doAs.length()  0) {
  /*
   * this allows doAs (proxy user) to be passed along across process 
boundary where




svn commit: r1636023 - in /hive/trunk: hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/

2014-11-01 Thread ekoifman
Author: ekoifman
Date: Sat Nov  1 19:59:02 2014
New Revision: 1636023

URL: http://svn.apache.org/r1636023
Log:
HIVE-8685 DDL operations in WebHCat set proxy user to null in unsecure mode 
(Eugene Koifman, reviewed by Thejas M Nair)

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java

hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java?rev=1636023r1=1636022r2=1636023view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ExecServiceImpl.java
 Sat Nov  1 19:59:02 2014
@@ -229,15 +229,17 @@ public class ExecServiceImpl implements 
   watchdog.checkException();
 }
 catch (Exception ex) {
-  LOG.error(Command:  + cmd +  failed:, ex);
+  LOG.error(Command:  + cmd +  failed. res= + res, ex);
 }
 if(watchdog.killedProcess()) {
   String msg =  was terminated due to timeout( + timeout + ms).  See  
+ AppConfig
   .EXEC_TIMEOUT_NAME +  property; 
-  LOG.warn(Command:  + cmd + msg);
+  LOG.warn(Command:  + cmd + msg +  res= + res);
   res.stderr +=  Command  + msg; 
 }
-
+if(res.exitcode != 0) {
+  LOG.info(Command:  + cmd +  failed. res= + res);
+}
 return res;
   }
 

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java?rev=1636023r1=1636022r2=1636023view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 Sat Nov  1 19:59:02 2014
@@ -40,7 +40,7 @@ import org.apache.thrift.TException;
 
 /**
  * Helper class to run jobs using Kerberos security.  Always safe to
- * use these methods, it's a noop if security is not enabled.
+ * use these methods, it's a no-op if security is not enabled.
  */
 public class SecureProxySupport {
   private Path tokenPath;
@@ -121,9 +121,9 @@ public class SecureProxySupport {
 if (isEnabled) {
   args.add(-D);
   args.add(hive.metastore.token.signature= + getHcatServiceStr());
+  args.add(-D);
+  args.add(proxy.user.name= + user);
 }
-args.add(-D);
-args.add(proxy.user.name= + user);
   }
 
   class TokenWrapper {

Modified: 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1636023r1=1636022r2=1636023view=diff
==
--- 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 (original)
+++ 
hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 Sat Nov  1 19:59:02 2014
@@ -463,7 +463,7 @@ public abstract class HadoopShimsSecure 
 
   @Override
   public UserGroupInformation getUGIForConf(Configuration conf) throws 
IOException {
-String doAs = conf.get(proxy.user.name);
+String doAs = System.getenv(HADOOP_USER_NAME);
 if(doAs != null  doAs.length()  0) {
  /*
   * this allows doAs (proxy user) to be passed along across process 
boundary where




svn commit: r1637207 - in /hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton: SqoopDelegator.java tool/LaunchMapper.java

2014-11-06 Thread ekoifman
Author: ekoifman
Date: Thu Nov  6 20:20:31 2014
New Revision: 1637207

URL: http://svn.apache.org/r1637207
Log:
HIVE-8754 Sqoop job submission via WebHCat doesn't properly localize required 
jdbc jars in secure cluster (Eugene Koifman, reviewed by Thejas Nair)

Modified:

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java

hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

Modified: 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java?rev=1637207r1=1637206r2=1637207view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SqoopDelegator.java
 Thu Nov  6 20:20:31 2014
@@ -83,6 +83,20 @@ public class SqoopDelegator extends Laun
 args.add(-D + TempletonControllerJob.TOKEN_FILE_ARG_PLACEHOLDER);
 args.add(-D + 
TempletonControllerJob.MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER);
   }
+  if(i == 0  TempletonUtils.isset(libdir)  
TempletonUtils.isset(appConf.sqoopArchive())) {
+
//http://sqoop.apache.org/docs/1.4.5/SqoopUserGuide.html#_using_generic_and_specific_arguments
+String libJars = null;
+for(String s : args) {
+  if(s.startsWith(JobSubmissionConstants.Sqoop.LIB_JARS)) {
+libJars = s.substring(s.indexOf(=) + 1);
+break;
+  }
+}
+//the jars in libJars will be localized to CWD of the launcher 
task; then -libjars will
+//cause them to be localized for the Sqoop MR job tasks
+args.add(TempletonUtils.quoteForWindows(-libjars));
+args.add(TempletonUtils.quoteForWindows(libJars));
+  }
 }
   } else if (TempletonUtils.isset(optionsFile)) {
 args.add(--options-file);
@@ -114,11 +128,13 @@ public class SqoopDelegator extends Laun
   /**Sqoop accesses databases via JDBC.  This means it needs to have 
appropriate JDBC
   drivers available.  Normally, the user would install Sqoop and place 
these jars
   into SQOOP_HOME/lib.  When WebHCat is configured to auto-ship the Sqoop 
tar file, we
-  need to make sure that relevant JDBC jars are available on target node.
+  need to make sure that relevant JDBC jars are available on target node 
but we cannot modify
+  lib/ of exploded tar because Dist Cache intentionally prevents this.
   The user is expected to place any JDBC jars into an HDFS directory and 
specify this
-  dir in libdir parameter.  All the files in this dir will be copied to 
lib/ of the
-  exploded Sqoop tar ball on target node.
+  dir in libdir parameter.  WebHCat then ensures that these jars are 
localized for the launcher task
+  and made available to Sqoop.
   {@link 
org.apache.hive.hcatalog.templeton.tool.LaunchMapper#handleSqoop(org.apache.hadoop.conf.Configuration,
 java.util.Map)}
+  {@link #makeArgs(String, String, String, String, String, boolean, 
String)}
   */
   LOG.debug(libdir= + libdir);
   ListPath jarList = TempletonUtils.hadoopFsListChildren(libdir, 
appConf, runAs);

Modified: 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1637207r1=1637206r2=1637207view=diff
==
--- 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 (original)
+++ 
hive/branches/branch-0.14/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 Thu Nov  6 20:20:31 2014
@@ -18,7 +18,6 @@
  */
 package org.apache.hive.hcatalog.templeton.tool;
 
-import com.google.common.io.Files;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -101,11 +100,18 @@ public class LaunchMapper extends Mapper
 if(TempletonUtils.isset(conf.get(Sqoop.LIB_JARS))) {
   //LIB_JARS should only be set if Sqoop is auto-shipped
   LOG.debug(Sqoop.LIB_JARS + = + conf.get(Sqoop.LIB_JARS));
-  //copy these (which have now been localized) jars to sqoop/lib
-  String destDir = conf.get(AppConfig.SQOOP_HOME_PATH) + File.separator + 
lib;
   String[] files

svn commit: r924235 - in /websites/production/hive/content: ./ javadocs/

2014-09-30 Thread ekoifman
Author: ekoifman
Date: Tue Sep 30 23:57:26 2014
New Revision: 924235

Log:
updated committer list

Added:
websites/production/hive/content/
  - copied from r924234, websites/staging/hive/trunk/content/
websites/production/hive/content/javadocs/
  - copied from r924234, websites/production/hive/content/javadocs/



svn commit: r1633197 - in /hive/trunk: ./ hcatalog/webhcat/svr/ hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/

2014-10-20 Thread ekoifman
Author: ekoifman
Date: Mon Oct 20 18:44:12 2014
New Revision: 1633197

URL: http://svn.apache.org/r1633197
Log:
HIVE-8387 add retry logic to ZooKeeperStorage in WebHCat

Modified:
hive/trunk/hcatalog/webhcat/svr/pom.xml

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobStateTracker.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonStorage.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperCleanup.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/ZooKeeperStorage.java
hive/trunk/pom.xml

Modified: hive/trunk/hcatalog/webhcat/svr/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/pom.xml?rev=1633197r1=1633196r2=1633197view=diff
==
--- hive/trunk/hcatalog/webhcat/svr/pom.xml (original)
+++ hive/trunk/hcatalog/webhcat/svr/pom.xml Mon Oct 20 18:44:12 2014
@@ -38,7 +38,7 @@
   /properties
 
   dependencies
-!-- dependencies are always listed in sorted order by groupId, artifectId 
--
+!-- dependencies are always listed in sorted order by groupId, artifactId 
--
 !-- intra-project --
 dependency
   groupIdorg.apache.hive.hcatalog/groupId
@@ -72,6 +72,14 @@
   artifactIdcommons-exec/artifactId
   version${commons-exec.version}/version
 /dependency
+
+  
+  dependency
+  groupIdorg.apache.curator/groupId
+  artifactIdcurator-framework/artifactId
+   version${curator.version}/version
+/dependency
+
 dependency
   groupIdorg.apache.zookeeper/groupId
   artifactIdzookeeper/artifactId
@@ -195,6 +203,37 @@
   /execution
 /executions
   /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-shade-plugin/artifactId
+executions
+  execution
+idinclude-curator/id
+!-- WebHCat uses Curator library to work with ZooKeeper.  Thus it 
must be available
+on a random node in the cluster where LaunchMapper runs to 
actually execute the job.
+The simplest way is to include it in webhcat jar which is shipped 
to target node since
+it contains LaunchMapper.java.--
+phasepackage/phase
+goals
+  goalshade/goal
+/goals
+configuration
+  minimizeJartrue/minimizeJar
+  artifactSet
+includes
+  includeorg.apache.curator/include
+/includes
+  /artifactSet
+  relocations
+relocation
+  patternorg.apache.curator/pattern
+  shadedPatternwebhcat.org.apache.curator/shadedPattern
+/relocation
+  /relocations
+/configuration
+  /execution
+/executions
+  /plugin
 /plugins
   /build
 /project

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java?rev=1633197r1=1633196r2=1633197view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSStorage.java
 Mon Oct 20 18:44:12 2014
@@ -25,9 +25,7 @@ import java.io.InputStreamReader;
 import java.io.OutputStreamWriter;
 import java.io.PrintWriter;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -116,32 +114,6 @@ public class HDFSStorage implements Temp
   }
 
   @Override
-  public MapString, String getFields(Type type, String id) {
-HashMapString, String map = new HashMapString, String();
-BufferedReader in = null;
-Path p = new Path(getPath(type) + / + id);
-try {
-  for (FileStatus status : fs.listStatus(p)) {
-in = new BufferedReader(new 
InputStreamReader(fs.open(status.getPath(;
-String line = null;
-String val = ;
-while ((line = in.readLine()) != null) {
-  if (!val.equals()) {
-val += \n;
-  }
-  val += line;
-}
-map.put(status.getPath().getName(), val);
-  }
-} catch (IOException e) {
-  LOG.trace(Couldn't find  + p);
-} finally {
-  close(in);
-}
-return map;
-  }
-
-  @Override
   public boolean delete(Type type

svn commit: r1647379 - /hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql

2014-12-22 Thread ekoifman
Author: ekoifman
Date: Mon Dec 22 19:06:57 2014
New Revision: 1647379

URL: http://svn.apache.org/r1647379
Log:
HIVE-9155 HIVE_LOCKS uses int instead of bigint 
hive-txn-schema-0.14.0.mssql.sql (Reviewed by Alan Gates)

Modified:
hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql

Modified: 
hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql?rev=1647379r1=1647378r2=1647379view=diff
==
--- hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql 
(original)
+++ hive/trunk/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql 
Mon Dec 22 19:06:57 2014
@@ -1 +1,101 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements.  See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the License); you may not use this file except in compliance with
-- the License.  You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an AS IS BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.

--
-- Tables for transaction management
-- 

CREATE TABLE COMPACTION_QUEUE(
CQ_ID int NOT NULL,
CQ_DATABASE varchar(128) NOT NULL,
CQ_TABLE varchar(128) NOT NULL,
CQ_PARTITION varchar(767) NULL,
CQ_STATE char(1) NOT NULL,
CQ_TYPE char(1) NOT NULL,
CQ_WORKER_ID varchar(128) NULL,
CQ_START int NULL,
CQ_RUN_AS varchar(128) NULL,
PRIMARY KEY CLUSTERED 
(
CQ_ID ASC
)
);

CREATE TABLE COMPLETED_TXN_COMPONENTS(
CTC_TXNID int NULL,
CTC_DATABASE varchar(128) NOT NULL,
CTC_TABLE varchar(128) NULL,
CTC_PARTITION varchar(767) NULL
);

CREATE TABLE HIVE_LOCKS(
HL_LOCK_EXT_ID int NOT NULL,
HL_LOCK_INT_ID int NOT NULL,
HL_TXNID int NULL,
HL_DB varchar(128) NOT NULL,
HL_TABLE varchar(128) NULL,
HL_PARTITION varchar(767) NULL,
HL_LOCK_STATE char(1) NOT NULL,
HL_LOCK_TYPE char(1) NOT NULL,
HL_LAST_HEARTBEAT int NOT NULL,
HL_ACQUIRED_AT int NULL,
HL_USER varchar(128) NOT NULL,
HL_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED 
(
HL_LOCK_EXT_ID ASC,
HL_LOCK_INT_ID ASC
)
);

CREATE TABLE NEXT_COMPACTION_QUEUE_ID(
NCQ_NEXT int NOT NULL
);

INSERT INTO NEXT_COMPACTION_QUEUE_ID VALUES(1);

CREATE TABLE NEXT_LOCK_ID(
NL_NEXT int NOT NULL
);

INSERT INTO NEXT_LOCK_ID VALUES(1);

CREATE TABLE NEXT_TXN_ID(
NTXN_NEXT int NOT NULL
);

INSERT INTO NEXT_TXN_ID VALUES(1);

CREATE TABLE TXNS(
TXN_ID int NOT NULL,
TXN_STATE char(1) NOT NULL,
TXN_STARTED int NOT NULL,
TXN_LAST_HEARTBEAT int NOT NULL,
TXN_USER varchar(128) NOT NULL,
TXN_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED 
(
TXN_ID ASC
)
);

CREATE TABLE TXN_COMPONENTS(
TC_TXNID int NULL,
TC_DATABASE varchar(128) NOT NULL,
TC_TABLE varchar(128) NULL,
TC_PARTITION varchar(767) NULL
);

ALTER TABLE TXN_COMPONENTS  WITH CHECK ADD FOREIGN KEY(TC_TXNID) REFERENCES 
TXNS (TXN_ID);

\ No newline at end of file
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the License); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an AS IS BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+
+--
+-- Tables for transaction management
+--
+
+CREATE TABLE COMPACTION_QUEUE(
+   CQ_ID bigint NOT NULL,
+   CQ_DATABASE varchar(128) NOT NULL,
+   CQ_TABLE varchar(128) NOT NULL,
+   CQ_PARTITION varchar(767) NULL,
+   CQ_STATE char(1) NOT NULL,
+   CQ_TYPE char(1) NOT NULL,
+   CQ_WORKER_ID varchar(128) NULL,
+   CQ_START bigint NULL,
+   CQ_RUN_AS varchar(128) NULL,
+PRIMARY KEY CLUSTERED 
+(
+   CQ_ID ASC

svn commit: r1647642 - /hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql

2014-12-23 Thread ekoifman
Author: ekoifman
Date: Tue Dec 23 19:00:43 2014
New Revision: 1647642

URL: http://svn.apache.org/r1647642
Log:
HIVE-9155 HIVE_LOCKS uses int instead of bigint 
hive-txn-schema-0.14.0.mssql.sql (Reviewed by Alan Gates)

Modified:

hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql

Modified: 
hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql?rev=1647642r1=1647641r2=1647642view=diff
==
--- 
hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql
 (original)
+++ 
hive/branches/branch-0.14/metastore/scripts/upgrade/mssql/hive-txn-schema-0.14.0.mssql.sql
 Tue Dec 23 19:00:43 2014
@@ -1 +1,101 @@
--- Licensed to the Apache Software Foundation (ASF) under one or more
-- contributor license agreements.  See the NOTICE file distributed with
-- this work for additional information regarding copyright ownership.
-- The ASF licenses this file to You under the Apache License, Version 2.0
-- (the License); you may not use this file except in compliance with
-- the License.  You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an AS IS BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.

--
-- Tables for transaction management
-- 

CREATE TABLE COMPACTION_QUEUE(
CQ_ID int NOT NULL,
CQ_DATABASE varchar(128) NOT NULL,
CQ_TABLE varchar(128) NOT NULL,
CQ_PARTITION varchar(767) NULL,
CQ_STATE char(1) NOT NULL,
CQ_TYPE char(1) NOT NULL,
CQ_WORKER_ID varchar(128) NULL,
CQ_START int NULL,
CQ_RUN_AS varchar(128) NULL,
PRIMARY KEY CLUSTERED 
(
CQ_ID ASC
)
);

CREATE TABLE COMPLETED_TXN_COMPONENTS(
CTC_TXNID int NULL,
CTC_DATABASE varchar(128) NOT NULL,
CTC_TABLE varchar(128) NULL,
CTC_PARTITION varchar(767) NULL
);

CREATE TABLE HIVE_LOCKS(
HL_LOCK_EXT_ID int NOT NULL,
HL_LOCK_INT_ID int NOT NULL,
HL_TXNID int NULL,
HL_DB varchar(128) NOT NULL,
HL_TABLE varchar(128) NULL,
HL_PARTITION varchar(767) NULL,
HL_LOCK_STATE char(1) NOT NULL,
HL_LOCK_TYPE char(1) NOT NULL,
HL_LAST_HEARTBEAT int NOT NULL,
HL_ACQUIRED_AT int NULL,
HL_USER varchar(128) NOT NULL,
HL_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED 
(
HL_LOCK_EXT_ID ASC,
HL_LOCK_INT_ID ASC
)
);

CREATE TABLE NEXT_COMPACTION_QUEUE_ID(
NCQ_NEXT int NOT NULL
);

INSERT INTO NEXT_COMPACTION_QUEUE_ID VALUES(1);

CREATE TABLE NEXT_LOCK_ID(
NL_NEXT int NOT NULL
);

INSERT INTO NEXT_LOCK_ID VALUES(1);

CREATE TABLE NEXT_TXN_ID(
NTXN_NEXT int NOT NULL
);

INSERT INTO NEXT_TXN_ID VALUES(1);

CREATE TABLE TXNS(
TXN_ID int NOT NULL,
TXN_STATE char(1) NOT NULL,
TXN_STARTED int NOT NULL,
TXN_LAST_HEARTBEAT int NOT NULL,
TXN_USER varchar(128) NOT NULL,
TXN_HOST varchar(128) NOT NULL,
PRIMARY KEY CLUSTERED 
(
TXN_ID ASC
)
);

CREATE TABLE TXN_COMPONENTS(
TC_TXNID int NULL,
TC_DATABASE varchar(128) NOT NULL,
TC_TABLE varchar(128) NULL,
TC_PARTITION varchar(767) NULL
);

ALTER TABLE TXN_COMPONENTS  WITH CHECK ADD FOREIGN KEY(TC_TXNID) REFERENCES 
TXNS (TXN_ID);

\ No newline at end of file
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements.  See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the License); you may not use this file except in compliance with
+-- the License.  You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an AS IS BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+
+--
+-- Tables for transaction management
+--
+
+CREATE TABLE COMPACTION_QUEUE(
+   CQ_ID bigint NOT NULL,
+   CQ_DATABASE varchar(128) NOT NULL,
+   CQ_TABLE varchar(128) NOT NULL,
+   CQ_PARTITION varchar(767) NULL,
+   CQ_STATE char(1) NOT NULL,
+   CQ_TYPE char(1) NOT NULL,
+   CQ_WORKER_ID varchar(128) NULL,
+   CQ_START bigint NULL

svn commit: r1654856 - /hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

2015-01-26 Thread ekoifman
Author: ekoifman
Date: Mon Jan 26 18:22:17 2015
New Revision: 1654856

URL: http://svn.apache.org/r1654856
Log:
HIVE-9361 - Intermittent NPE in SessionHiveMetaStoreClient.alterTempTable

Modified:

hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

Modified: 
hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1654856r1=1654855r2=1654856view=diff
==
--- 
hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 (original)
+++ 
hive/branches/branch-1.0/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 Mon Jan 26 18:22:17 2015
@@ -361,7 +361,7 @@ public class SessionHiveMetaStoreClient
 
 org.apache.hadoop.hive.metastore.api.Table newtCopy = 
deepCopyAndLowerCaseTable(newt);
 MetaStoreUtils.updateUnpartitionedTableStatsFast(newtCopy,
-wh.getFileStatusesForSD(newtCopy.getSd()), false, true);
+getWh().getFileStatusesForSD(newtCopy.getSd()), false, true);
 Table newTable = new Table(newtCopy);
 String newDbName = newTable.getDbName();
 String newTableName = newTable.getTableName();




svn commit: r1654854 - /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

2015-01-26 Thread ekoifman
Author: ekoifman
Date: Mon Jan 26 18:22:03 2015
New Revision: 1654854

URL: http://svn.apache.org/r1654854
Log:
HIVE-9361 - Intermittent NPE in SessionHiveMetaStoreClient.alterTempTable

Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1654854r1=1654853r2=1654854view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 Mon Jan 26 18:22:03 2015
@@ -387,7 +387,7 @@ public class SessionHiveMetaStoreClient
 
 org.apache.hadoop.hive.metastore.api.Table newtCopy = 
deepCopyAndLowerCaseTable(newt);
 MetaStoreUtils.updateUnpartitionedTableStatsFast(newtCopy,
-wh.getFileStatusesForSD(newtCopy.getSd()), false, true);
+getWh().getFileStatusesForSD(newtCopy.getSd()), false, true);
 Table newTable = new Table(newtCopy);
 String newDbName = newTable.getDbName();
 String newTableName = newTable.getTableName();




svn commit: r1654855 - /hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

2015-01-26 Thread ekoifman
Author: ekoifman
Date: Mon Jan 26 18:22:13 2015
New Revision: 1654855

URL: http://svn.apache.org/r1654855
Log:
HIVE-9361 - Intermittent NPE in SessionHiveMetaStoreClient.alterTempTable

Modified:

hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java

Modified: 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1654855r1=1654854r2=1654855view=diff
==
--- 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 (original)
+++ 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 Mon Jan 26 18:22:13 2015
@@ -361,7 +361,7 @@ public class SessionHiveMetaStoreClient
 
 org.apache.hadoop.hive.metastore.api.Table newtCopy = 
deepCopyAndLowerCaseTable(newt);
 MetaStoreUtils.updateUnpartitionedTableStatsFast(newtCopy,
-wh.getFileStatusesForSD(newtCopy.getSd()), false, true);
+getWh().getFileStatusesForSD(newtCopy.getSd()), false, true);
 Table newTable = new Table(newtCopy);
 String newDbName = newTable.getDbName();
 String newTableName = newTable.getTableName();




svn commit: r1653337 - /hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java

2015-01-20 Thread ekoifman
Author: ekoifman
Date: Tue Jan 20 19:05:14 2015
New Revision: 1653337

URL: http://svn.apache.org/r1653337
Log:
HIVE-9404 NPE in 
org.apache.hadoop.hive.metastore.txn.TxnHandler.determineDatabaseProduct() 
(Eugene Koifman, reviewed by Alan Gates)

Added:

hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java

Added: 
hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java?rev=1653337view=auto
==
--- 
hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
 (added)
+++ 
hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
 Tue Jan 20 19:05:14 2015
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.txn;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.junit.Test;
+
+public class TestTxnHandlerNegative {
+  static final private Log LOG = 
LogFactory.getLog(TestTxnHandlerNegative.class);
+
+  /**
+   * this intentionally sets a bad URL for connection to test error handling 
logic
+   * in TxnHandler
+   * @throws Exception
+   */
+  @Test
+  public void testBadConnection() throws Exception {
+HiveConf conf = new HiveConf();
+conf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, blah);
+TxnHandler txnHandler1 = new TxnHandler(conf);
+MetaException e = null;
+try {
+  txnHandler1.getOpenTxns();
+}
+catch(MetaException ex) {
+  LOG.info(Expected error:  + ex.getMessage(), ex);
+  e = ex;
+}
+assert e != null : did not get exception;
+  }
+}




svn commit: r1653336 - /hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

2015-01-20 Thread ekoifman
Author: ekoifman
Date: Tue Jan 20 19:04:56 2015
New Revision: 1653336

URL: http://svn.apache.org/r1653336
Log:
HIVE-9404 NPE in 
org.apache.hadoop.hive.metastore.txn.TxnHandler.determineDatabaseProduct() 
(Eugene Koifman, reviewed by Alan Gates)

Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1653336r1=1653335r2=1653336view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Tue Jan 20 19:04:56 2015
@@ -902,7 +902,7 @@ public class TxnHandler {
 // so I've tried to capture the different error messages (there appear to 
be fewer different
 // error messages than SQL states).
 // Derby and newer MySQL driver use the new SQLTransactionRollbackException
-if (dbProduct == null) {
+if (dbProduct == null  conn != null) {
   determineDatabaseProduct(conn);
 }
 if (e instanceof SQLTransactionRollbackException ||




svn commit: r1653406 - in /hive/trunk/hcatalog/src/test/e2e/templeton: ./ deployers/ drivers/ inpdir/ tests/

2015-01-20 Thread ekoifman
Author: ekoifman
Date: Tue Jan 20 23:26:26 2015
New Revision: 1653406

URL: http://svn.apache.org/r1653406
Log:
HIVE-9272 Tests for utf-8 support (Aswathy Chellammal Sreekumar via Eugene 
Koifman)

Added:
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/PigJoin䶴ㄩ鼾丄狜〇work.pig
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/artof䶴ㄩ鼾丄狜〇war.txt
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/table1.txt
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/table3.txt
hive/trunk/hcatalog/src/test/e2e/templeton/inpdir/table3ToJoin.txt
hive/trunk/hcatalog/src/test/e2e/templeton/tests/utf8.conf
Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/build.xml
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/build.xml?rev=1653406r1=1653405r2=1653406view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/build.xml (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/build.xml Tue Jan 20 23:26:26 
2015
@@ -120,6 +120,7 @@
 arg value=${basedir}/tests/ddl.conf/
 arg value=${basedir}/tests/jobsubmission.conf/
 arg value=${basedir}/tests/jobsubmission2.conf/
+arg value=${basedir}/tests/utf8.conf/
 /exec
 /target
 

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh?rev=1653406r1=1653405r2=1653406view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
(original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
Tue Jan 20 23:26:26 2015
@@ -32,6 +32,8 @@ ${HADOOP_HOME}/bin/hdfs dfs -put ${PROJ_
 
 #For hadoop2 there are 2 separate jars
 ${HADOOP_HOME}/bin/hdfs dfs -put 
${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-examples-${HADOOP_VERSION}.jar
  webhcate2e/hexamples.jar
+#For utf8 test(for mapreduce) we need a jar with utf-8 characters in the name
+${HADOOP_HOME}/bin/hdfs dfs -put 
${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-examples-${HADOOP_VERSION}.jar
  webhcate2e/hadoop_examples_䶴ㄩ鼾丄狜〇_2_2_0.jar
 ${HADOOP_HOME}/bin/hdfs dfs -put 
${HADOOP_HOME}/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-${HADOOP_VERSION}.jar
 webhcate2e/hclient.jar
 ${HADOOP_HOME}/bin/hdfs dfs -put 
${HADOOP_HOME}/share/hadoop/tools/lib/hadoop-streaming-${HADOOP_VERSION}.jar  
/user/templeton/hadoop-streaming.jar
 

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1653406r1=1653405r2=1653406view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Tue 
Jan 20 23:26:26 2015
@@ -37,6 +37,7 @@ use English;
 use Storable qw(dclone);
 use File::Glob ':glob';
 use JSON::Path;
+use utf8;
 
 my $passedStr = 'passed';
 my $failedStr = 'failed';
@@ -922,12 +923,38 @@ sub compare
   $result = 0;
   next;
 }
-  
+my $exp_userargsvalue;
+my $r_userargsvalue;
+if(ref($exp_userargs{$key}) eq ARRAY){
+  my @values = $exp_userargs{$key};
+  my $num_values = @values;
+
+  for(my $i=0;$i=$num_values;$i++){
+if (utf8::is_utf8($exp_userargs{$key}[$i])){
+  $exp_userargs{$key}[$i] = 
utf8::decode($exp_userargs{$key}[$i]);
+  $r_userargs{$key}[$i] = utf8::decode($r_userargs{$key}[$i]);
+}
+  }
+  $exp_userargsvalue = $exp_userargs{$key};
+  $r_userargsvalue = $r_userargs{$key};
+}
+else {
+  if (utf8::is_utf8($exp_userargs{$key}))
+  {
+$exp_userargsvalue = utf8::decode($exp_userargs{$key});
+$r_userargsvalue = utf8::decode($r_userargs{$key});
+  } 
+  else 
+  {
+$exp_userargsvalue = $exp_userargs{$key};
+$r_userargsvalue = $r_userargs{$key};
+  }
+}
 print $log $0::$subName DEBUG comparing expected  
 .  $key - . dump($exp_userargs{$key})
 .  With result $key - . dump($r_userargs{$key}) . \n;
 
-if (!Compare($exp_userargs{$key}, $r_userargs{$key

svn commit: r1652558 [1/2] - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-16 Thread ekoifman
Author: ekoifman
Date: Sat Jan 17 02:54:40 2015
New Revision: 1652558

URL: http://svn.apache.org/r1652558
Log:
HIVE-9390 Enhance retry logic wrt DB access in TxnHandler (Eugene Koifman 
reviewed by Alan Gates)

Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1652558r1=1652557r2=1652558view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Sat Jan 17 02:54:40 2015
@@ -5377,126 +5377,74 @@ public class HiveMetaStore extends Thrif
 // Transaction and locking methods
 @Override
 public GetOpenTxnsResponse get_open_txns() throws TException {
-  try {
-return getTxnHandler().getOpenTxns();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxns();
 }
 
 // Transaction and locking methods
 @Override
 public GetOpenTxnsInfoResponse get_open_txns_info() throws TException {
-  try {
-return getTxnHandler().getOpenTxnsInfo();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxnsInfo();
 }
 
 @Override
 public OpenTxnsResponse open_txns(OpenTxnRequest rqst) throws TException {
-  try {
-return getTxnHandler().openTxns(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().openTxns(rqst);
 }
 
 @Override
 public void abort_txn(AbortTxnRequest rqst) throws NoSuchTxnException, 
TException {
-  try {
-getTxnHandler().abortTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().abortTxn(rqst);
 }
 
 @Override
 public void commit_txn(CommitTxnRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-getTxnHandler().commitTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().commitTxn(rqst);
 }
 
 @Override
 public LockResponse lock(LockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-return getTxnHandler().lock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().lock(rqst);
 }
 
 @Override
 public LockResponse check_lock(CheckLockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, NoSuchLockException, 
TException {
-  try {
-return getTxnHandler().checkLock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().checkLock(rqst);
 }
 
 @Override
 public void unlock(UnlockRequest rqst)
 throws NoSuchLockException, TxnOpenException, TException {
-  try {
-getTxnHandler().unlock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().unlock(rqst);
 }
 
 @Override
 public ShowLocksResponse show_locks(ShowLocksRequest rqst) throws 
TException {
-  try {
-return getTxnHandler().showLocks(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().showLocks(rqst);
 }
 
 @Override
 public void heartbeat(HeartbeatRequest ids)
 throws NoSuchLockException, NoSuchTxnException, TxnAbortedException, 
TException {
-  try {
-getTxnHandler().heartbeat(ids);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().heartbeat(ids);
 }
 
 @Override
 public HeartbeatTxnRangeResponse 
heartbeat_txn_range(HeartbeatTxnRangeRequest rqst)
   throws TException {
-  try {
-return getTxnHandler().heartbeatTxnRange(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().heartbeatTxnRange(rqst);
 }
 
 @Override
 public void compact(CompactionRequest rqst) throws TException {
-  try {
-getTxnHandler().compact(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().compact(rqst);
 }
 
 @Override
 public

svn commit: r1652558 [2/2] - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-16 Thread ekoifman
Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1652558r1=1652557r2=1652558view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Sat Jan 17 02:54:40 2015
@@ -77,7 +77,7 @@ public class TxnHandler {
   static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
 
   static private DataSource connPool;
-  private static Boolean lockLock = new Boolean(true); // Random object to 
lock on for the lock
+  private final static Object lockLock = new Object(); // Random object to 
lock on for the lock
   // method
 
   /**
@@ -87,10 +87,13 @@ public class TxnHandler {
   protected HiveConf conf;
   protected DatabaseProduct dbProduct;
 
-  // Transaction timeout, in milliseconds.
+  // (End user) Transaction timeout, in milliseconds.
   private long timeout;
 
   private String identifierQuoteString; // quotes to use for quoting tables, 
where necessary
+  private final long retryInterval;
+  private final int retryLimit;
+  private int retryNum;
 
   // DEADLOCK DETECTION AND HANDLING
   // A note to developers of this class.  ALWAYS access HIVE_LOCKS before TXNS 
to avoid deadlock
@@ -125,113 +128,122 @@ public class TxnHandler {
 timeout = HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 
TimeUnit.MILLISECONDS);
 deadlockCnt = 0;
 buildJumpTable();
+retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL, TimeUnit.MILLISECONDS);
+retryLimit = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HMSHANDLERATTEMPTS);
+
   }
 
   public GetOpenTxnsInfoResponse getOpenTxnsInfo() throws MetaException {
-// We need to figure out the current transaction number and the list of
-// open transactions.  To avoid needing a transaction on the underlying
-// database we'll look at the current transaction number first.  If it
-// subsequently shows up in the open list that's ok.
-Connection dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
-Statement stmt = null;
 try {
-  stmt = dbConn.createStatement();
-  String s = select ntxn_next - 1 from NEXT_TXN_ID;
-  LOG.debug(Going to execute query  + s + );
-  ResultSet rs = stmt.executeQuery(s);
-  if (!rs.next()) {
-throw new MetaException(Transaction tables not properly  +
+  // We need to figure out the current transaction number and the list of
+  // open transactions.  To avoid needing a transaction on the underlying
+  // database we'll look at the current transaction number first.  If it
+  // subsequently shows up in the open list that's ok.
+  Connection dbConn = null;
+  Statement stmt = null;
+  try {
+dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
+stmt = dbConn.createStatement();
+String s = select ntxn_next - 1 from NEXT_TXN_ID;
+LOG.debug(Going to execute query  + s + );
+ResultSet rs = stmt.executeQuery(s);
+if (!rs.next()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, no record found in next_txn_id);
-  }
-  long hwm = rs.getLong(1);
-  if (rs.wasNull()) {
-throw new MetaException(Transaction tables not properly  +
+}
+long hwm = rs.getLong(1);
+if (rs.wasNull()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, null record found in next_txn_id);
-  }
-
-  ListTxnInfo txnInfo = new ArrayListTxnInfo();
-  s = select txn_id, txn_state, txn_user, txn_host from TXNS;
-  LOG.debug(Going to execute query + s + );
-  rs = stmt.executeQuery(s);
-  while (rs.next()) {
-char c = rs.getString(2).charAt(0);
-TxnState state;
-switch (c) {
-  case TXN_ABORTED:
-state = TxnState.ABORTED;
-break;
+}
 
-  case TXN_OPEN:
-state = TxnState.OPEN;
-break;
+ListTxnInfo txnInfo = new ArrayListTxnInfo();
+s = select txn_id, txn_state, txn_user, txn_host from TXNS;
+LOG.debug(Going to execute query + s + );
+rs = stmt.executeQuery(s);
+while (rs.next()) {
+  char c = rs.getString(2).charAt(0);
+  TxnState state;
+  switch (c) {
+case TXN_ABORTED:
+  state = TxnState.ABORTED;
+  break;
+
+case TXN_OPEN:
+  state = TxnState.OPEN;
+  break;
 
-  default:
-throw new MetaException(Unexpected transaction state  + c +
+default:
+  throw new 

svn commit: r1651737 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/HiveParser.g test/org/apache/hadoop/hive/ql/parse/TestIUD.java

2015-01-14 Thread ekoifman
Author: ekoifman
Date: Wed Jan 14 17:26:22 2015
New Revision: 1651737

URL: http://svn.apache.org/r1651737
Log:
HIVE-9353 - make TABLE keyword optional in INSERT INTO TABLE foo...

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1651737r1=1651736r2=1651737view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Wed Jan 
14 17:26:22 2015
@@ -2216,7 +2216,7 @@ insertClause
 @after { popMsg(state); }
:
  KW_INSERT KW_OVERWRITE destination ifNotExists? - ^(TOK_DESTINATION 
destination ifNotExists?)
-   | KW_INSERT KW_INTO KW_TABLE tableOrPartition
+   | KW_INSERT KW_INTO KW_TABLE? tableOrPartition
- ^(TOK_INSERT_INTO tableOrPartition)
;
 

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java?rev=1651737r1=1651736r2=1651737view=diff
==
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java 
(original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java Wed Jan 
14 17:26:22 2015
@@ -193,7 +193,7 @@ public class TestIUD {
   }
   @Test
   public void testInsertIntoTableAsSelectFromNamedVirtTable() throws 
ParseException {
-ASTNode ast = parse(insert into table page_view select a,b as c from 
(values (1,2),(3,4)) as VC(a,b) where b = 9);
+ASTNode ast = parse(insert into page_view select a,b as c from (values 
(1,2),(3,4)) as VC(a,b) where b = 9);
 Assert.assertEquals(AST doesn't match,
   (TOK_QUERY  +
 (TOK_FROM  +
@@ -209,7 +209,7 @@ public class TestIUD {
   }
   @Test
   public void testInsertIntoTableFromAnonymousTable1Row() throws 
ParseException {
-ASTNode ast = parse(insert into table page_view values(1,2));
+ASTNode ast = parse(insert into page_view values(1,2));
 Assert.assertEquals(AST doesn't match,
   (TOK_QUERY  +
 (TOK_FROM  +
@@ -232,5 +232,16 @@ public class TestIUD {
 (TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view)))  +
   (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF,
   ast.toStringTree());
+//same query as above less the table keyword KW_TABLE
+ast = parse(insert into page_view values(-1,2),(3,+4));
+Assert.assertEquals(AST doesn't match,
+  (TOK_QUERY  +
+(TOK_FROM  +
+(TOK_VIRTUAL_TABLE  +
+(TOK_VIRTUAL_TABREF TOK_ANONYMOUS)  +
+(TOK_VALUES_TABLE (TOK_VALUE_ROW (- 1) 2) (TOK_VALUE_ROW 3 (+ 4) 
 +
+(TOK_INSERT (TOK_INSERT_INTO (TOK_TAB (TOK_TABNAME page_view)))  +
+(TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF,
+  ast.toStringTree());
   }
 }




svn commit: r1654447 - in /hive/branches/branch-0.14/metastore/src: java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java

2015-01-23 Thread ekoifman
Author: ekoifman
Date: Sat Jan 24 01:21:47 2015
New Revision: 1654447

URL: http://svn.apache.org/r1654447
Log:
HIVE-9404 NPE in 
org.apache.hadoop.hive.metastore.txn.TxnHandler.determineDatabaseProduct()

Added:

hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
Modified:

hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

Modified: 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1654447r1=1654446r2=1654447view=diff
==
--- 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Sat Jan 24 01:21:47 2015
@@ -902,7 +902,7 @@ public class TxnHandler {
 // so I've tried to capture the different error messages (there appear to 
be fewer different
 // error messages than SQL states).
 // Derby and newer MySQL driver use the new SQLTransactionRollbackException
-if (dbProduct == null) {
+if (dbProduct == null  conn != null) {
   determineDatabaseProduct(conn);
 }
 if (e instanceof SQLTransactionRollbackException ||

Added: 
hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java?rev=1654447view=auto
==
--- 
hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
 (added)
+++ 
hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandlerNegative.java
 Sat Jan 24 01:21:47 2015
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.txn;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.junit.Test;
+
+public class TestTxnHandlerNegative {
+  static final private Log LOG = 
LogFactory.getLog(TestTxnHandlerNegative.class);
+
+  /**
+   * this intentionally sets a bad URL for connection to test error handling 
logic
+   * in TxnHandler
+   * @throws Exception
+   */
+  @Test
+  public void testBadConnection() throws Exception {
+HiveConf conf = new HiveConf();
+conf.setVar(HiveConf.ConfVars.METASTORECONNECTURLKEY, blah);
+TxnHandler txnHandler1 = new TxnHandler(conf);
+MetaException e = null;
+try {
+  txnHandler1.getOpenTxns();
+}
+catch(MetaException ex) {
+  LOG.info(Expected error:  + ex.getMessage(), ex);
+  e = ex;
+}
+assert e != null : did not get exception;
+  }
+}




svn commit: r1654443 [2/2] - in /hive/branches/branch-1.0/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-23 Thread ekoifman
Modified: 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1654443r1=1654442r2=1654443view=diff
==
--- 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Sat Jan 24 01:05:22 2015
@@ -77,7 +77,7 @@ public class TxnHandler {
   static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
 
   static private DataSource connPool;
-  private static Boolean lockLock = new Boolean(true); // Random object to 
lock on for the lock
+  private final static Object lockLock = new Object(); // Random object to 
lock on for the lock
   // method
 
   /**
@@ -87,10 +87,13 @@ public class TxnHandler {
   protected HiveConf conf;
   protected DatabaseProduct dbProduct;
 
-  // Transaction timeout, in milliseconds.
+  // (End user) Transaction timeout, in milliseconds.
   private long timeout;
 
   private String identifierQuoteString; // quotes to use for quoting tables, 
where necessary
+  private final long retryInterval;
+  private final int retryLimit;
+  private int retryNum;
 
   // DEADLOCK DETECTION AND HANDLING
   // A note to developers of this class.  ALWAYS access HIVE_LOCKS before TXNS 
to avoid deadlock
@@ -125,113 +128,122 @@ public class TxnHandler {
 timeout = HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 
TimeUnit.MILLISECONDS);
 deadlockCnt = 0;
 buildJumpTable();
+retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL, TimeUnit.MILLISECONDS);
+retryLimit = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HMSHANDLERATTEMPTS);
+
   }
 
   public GetOpenTxnsInfoResponse getOpenTxnsInfo() throws MetaException {
-// We need to figure out the current transaction number and the list of
-// open transactions.  To avoid needing a transaction on the underlying
-// database we'll look at the current transaction number first.  If it
-// subsequently shows up in the open list that's ok.
-Connection dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
-Statement stmt = null;
 try {
-  stmt = dbConn.createStatement();
-  String s = select ntxn_next - 1 from NEXT_TXN_ID;
-  LOG.debug(Going to execute query  + s + );
-  ResultSet rs = stmt.executeQuery(s);
-  if (!rs.next()) {
-throw new MetaException(Transaction tables not properly  +
+  // We need to figure out the current transaction number and the list of
+  // open transactions.  To avoid needing a transaction on the underlying
+  // database we'll look at the current transaction number first.  If it
+  // subsequently shows up in the open list that's ok.
+  Connection dbConn = null;
+  Statement stmt = null;
+  try {
+dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
+stmt = dbConn.createStatement();
+String s = select ntxn_next - 1 from NEXT_TXN_ID;
+LOG.debug(Going to execute query  + s + );
+ResultSet rs = stmt.executeQuery(s);
+if (!rs.next()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, no record found in next_txn_id);
-  }
-  long hwm = rs.getLong(1);
-  if (rs.wasNull()) {
-throw new MetaException(Transaction tables not properly  +
+}
+long hwm = rs.getLong(1);
+if (rs.wasNull()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, null record found in next_txn_id);
-  }
-
-  ListTxnInfo txnInfo = new ArrayListTxnInfo();
-  s = select txn_id, txn_state, txn_user, txn_host from TXNS;
-  LOG.debug(Going to execute query + s + );
-  rs = stmt.executeQuery(s);
-  while (rs.next()) {
-char c = rs.getString(2).charAt(0);
-TxnState state;
-switch (c) {
-  case TXN_ABORTED:
-state = TxnState.ABORTED;
-break;
+}
 
-  case TXN_OPEN:
-state = TxnState.OPEN;
-break;
+ListTxnInfo txnInfo = new ArrayListTxnInfo();
+s = select txn_id, txn_state, txn_user, txn_host from TXNS;
+LOG.debug(Going to execute query + s + );
+rs = stmt.executeQuery(s);
+while (rs.next()) {
+  char c = rs.getString(2).charAt(0);
+  TxnState state;
+  switch (c) {
+case TXN_ABORTED:
+  state = TxnState.ABORTED;
+  break;
+
+case TXN_OPEN:
+  state = TxnState.OPEN;
+  break;
 
-  default:
-throw new MetaException(Unexpected transaction state  + c +
+  

svn commit: r1654442 [2/2] - in /hive/branches/branch-0.14/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-23 Thread ekoifman
Modified: 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1654442r1=1654441r2=1654442view=diff
==
--- 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Sat Jan 24 01:05:12 2015
@@ -77,7 +77,7 @@ public class TxnHandler {
   static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
 
   static private DataSource connPool;
-  private static Boolean lockLock = new Boolean(true); // Random object to 
lock on for the lock
+  private final static Object lockLock = new Object(); // Random object to 
lock on for the lock
   // method
 
   /**
@@ -87,10 +87,13 @@ public class TxnHandler {
   protected HiveConf conf;
   protected DatabaseProduct dbProduct;
 
-  // Transaction timeout, in milliseconds.
+  // (End user) Transaction timeout, in milliseconds.
   private long timeout;
 
   private String identifierQuoteString; // quotes to use for quoting tables, 
where necessary
+  private final long retryInterval;
+  private final int retryLimit;
+  private int retryNum;
 
   // DEADLOCK DETECTION AND HANDLING
   // A note to developers of this class.  ALWAYS access HIVE_LOCKS before TXNS 
to avoid deadlock
@@ -125,113 +128,122 @@ public class TxnHandler {
 timeout = HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 
TimeUnit.MILLISECONDS);
 deadlockCnt = 0;
 buildJumpTable();
+retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL, TimeUnit.MILLISECONDS);
+retryLimit = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HMSHANDLERATTEMPTS);
+
   }
 
   public GetOpenTxnsInfoResponse getOpenTxnsInfo() throws MetaException {
-// We need to figure out the current transaction number and the list of
-// open transactions.  To avoid needing a transaction on the underlying
-// database we'll look at the current transaction number first.  If it
-// subsequently shows up in the open list that's ok.
-Connection dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
-Statement stmt = null;
 try {
-  stmt = dbConn.createStatement();
-  String s = select ntxn_next - 1 from NEXT_TXN_ID;
-  LOG.debug(Going to execute query  + s + );
-  ResultSet rs = stmt.executeQuery(s);
-  if (!rs.next()) {
-throw new MetaException(Transaction tables not properly  +
+  // We need to figure out the current transaction number and the list of
+  // open transactions.  To avoid needing a transaction on the underlying
+  // database we'll look at the current transaction number first.  If it
+  // subsequently shows up in the open list that's ok.
+  Connection dbConn = null;
+  Statement stmt = null;
+  try {
+dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
+stmt = dbConn.createStatement();
+String s = select ntxn_next - 1 from NEXT_TXN_ID;
+LOG.debug(Going to execute query  + s + );
+ResultSet rs = stmt.executeQuery(s);
+if (!rs.next()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, no record found in next_txn_id);
-  }
-  long hwm = rs.getLong(1);
-  if (rs.wasNull()) {
-throw new MetaException(Transaction tables not properly  +
+}
+long hwm = rs.getLong(1);
+if (rs.wasNull()) {
+  throw new MetaException(Transaction tables not properly  +
 initialized, null record found in next_txn_id);
-  }
-
-  ListTxnInfo txnInfo = new ArrayListTxnInfo();
-  s = select txn_id, txn_state, txn_user, txn_host from TXNS;
-  LOG.debug(Going to execute query + s + );
-  rs = stmt.executeQuery(s);
-  while (rs.next()) {
-char c = rs.getString(2).charAt(0);
-TxnState state;
-switch (c) {
-  case TXN_ABORTED:
-state = TxnState.ABORTED;
-break;
+}
 
-  case TXN_OPEN:
-state = TxnState.OPEN;
-break;
+ListTxnInfo txnInfo = new ArrayListTxnInfo();
+s = select txn_id, txn_state, txn_user, txn_host from TXNS;
+LOG.debug(Going to execute query + s + );
+rs = stmt.executeQuery(s);
+while (rs.next()) {
+  char c = rs.getString(2).charAt(0);
+  TxnState state;
+  switch (c) {
+case TXN_ABORTED:
+  state = TxnState.ABORTED;
+  break;
+
+case TXN_OPEN:
+  state = TxnState.OPEN;
+  break;
 
-  default:
-throw new MetaException(Unexpected transaction state  + c +
+  

svn commit: r1654442 [1/2] - in /hive/branches/branch-0.14/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-23 Thread ekoifman
Author: ekoifman
Date: Sat Jan 24 01:05:12 2015
New Revision: 1654442

URL: http://svn.apache.org/r1654442
Log:
HIVE-9390 Enhance retry logic wrt DB access in TxnHandler

Modified:

hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java

hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

hive/branches/branch-0.14/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java

Modified: 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1654442r1=1654441r2=1654442view=diff
==
--- 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/branches/branch-0.14/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Sat Jan 24 01:05:12 2015
@@ -5318,126 +5318,74 @@ public class HiveMetaStore extends Thrif
 // Transaction and locking methods
 @Override
 public GetOpenTxnsResponse get_open_txns() throws TException {
-  try {
-return getTxnHandler().getOpenTxns();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxns();
 }
 
 // Transaction and locking methods
 @Override
 public GetOpenTxnsInfoResponse get_open_txns_info() throws TException {
-  try {
-return getTxnHandler().getOpenTxnsInfo();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxnsInfo();
 }
 
 @Override
 public OpenTxnsResponse open_txns(OpenTxnRequest rqst) throws TException {
-  try {
-return getTxnHandler().openTxns(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().openTxns(rqst);
 }
 
 @Override
 public void abort_txn(AbortTxnRequest rqst) throws NoSuchTxnException, 
TException {
-  try {
-getTxnHandler().abortTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().abortTxn(rqst);
 }
 
 @Override
 public void commit_txn(CommitTxnRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-getTxnHandler().commitTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().commitTxn(rqst);
 }
 
 @Override
 public LockResponse lock(LockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-return getTxnHandler().lock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().lock(rqst);
 }
 
 @Override
 public LockResponse check_lock(CheckLockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, NoSuchLockException, 
TException {
-  try {
-return getTxnHandler().checkLock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().checkLock(rqst);
 }
 
 @Override
 public void unlock(UnlockRequest rqst)
 throws NoSuchLockException, TxnOpenException, TException {
-  try {
-getTxnHandler().unlock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().unlock(rqst);
 }
 
 @Override
 public ShowLocksResponse show_locks(ShowLocksRequest rqst) throws 
TException {
-  try {
-return getTxnHandler().showLocks(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().showLocks(rqst);
 }
 
 @Override
 public void heartbeat(HeartbeatRequest ids)
 throws NoSuchLockException, NoSuchTxnException, TxnAbortedException, 
TException {
-  try {
-getTxnHandler().heartbeat(ids);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().heartbeat(ids);
 }
 
 @Override
 public HeartbeatTxnRangeResponse 
heartbeat_txn_range(HeartbeatTxnRangeRequest rqst)
   throws TException {
-  try {
-return getTxnHandler().heartbeatTxnRange(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().heartbeatTxnRange(rqst);
 }
 
 @Override
 public void compact(CompactionRequest rqst) throws TException {
-  try {
-getTxnHandler().compact(rqst);
-  } catch (MetaException e) {
-throw new TException(e

svn commit: r1654443 [1/2] - in /hive/branches/branch-1.0/metastore/src: java/org/apache/hadoop/hive/metastore/ java/org/apache/hadoop/hive/metastore/txn/ test/org/apache/hadoop/hive/metastore/txn/

2015-01-23 Thread ekoifman
Author: ekoifman
Date: Sat Jan 24 01:05:22 2015
New Revision: 1654443

URL: http://svn.apache.org/r1654443
Log:
HIVE-9390 Enhance retry logic wrt DB access in TxnHandler

Modified:

hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java

hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

hive/branches/branch-1.0/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestTxnHandler.java

Modified: 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1654443r1=1654442r2=1654443view=diff
==
--- 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/branches/branch-1.0/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Sat Jan 24 01:05:22 2015
@@ -5318,126 +5318,74 @@ public class HiveMetaStore extends Thrif
 // Transaction and locking methods
 @Override
 public GetOpenTxnsResponse get_open_txns() throws TException {
-  try {
-return getTxnHandler().getOpenTxns();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxns();
 }
 
 // Transaction and locking methods
 @Override
 public GetOpenTxnsInfoResponse get_open_txns_info() throws TException {
-  try {
-return getTxnHandler().getOpenTxnsInfo();
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().getOpenTxnsInfo();
 }
 
 @Override
 public OpenTxnsResponse open_txns(OpenTxnRequest rqst) throws TException {
-  try {
-return getTxnHandler().openTxns(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().openTxns(rqst);
 }
 
 @Override
 public void abort_txn(AbortTxnRequest rqst) throws NoSuchTxnException, 
TException {
-  try {
-getTxnHandler().abortTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().abortTxn(rqst);
 }
 
 @Override
 public void commit_txn(CommitTxnRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-getTxnHandler().commitTxn(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().commitTxn(rqst);
 }
 
 @Override
 public LockResponse lock(LockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, TException {
-  try {
-return getTxnHandler().lock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().lock(rqst);
 }
 
 @Override
 public LockResponse check_lock(CheckLockRequest rqst)
 throws NoSuchTxnException, TxnAbortedException, NoSuchLockException, 
TException {
-  try {
-return getTxnHandler().checkLock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().checkLock(rqst);
 }
 
 @Override
 public void unlock(UnlockRequest rqst)
 throws NoSuchLockException, TxnOpenException, TException {
-  try {
-getTxnHandler().unlock(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().unlock(rqst);
 }
 
 @Override
 public ShowLocksResponse show_locks(ShowLocksRequest rqst) throws 
TException {
-  try {
-return getTxnHandler().showLocks(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().showLocks(rqst);
 }
 
 @Override
 public void heartbeat(HeartbeatRequest ids)
 throws NoSuchLockException, NoSuchTxnException, TxnAbortedException, 
TException {
-  try {
-getTxnHandler().heartbeat(ids);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  getTxnHandler().heartbeat(ids);
 }
 
 @Override
 public HeartbeatTxnRangeResponse 
heartbeat_txn_range(HeartbeatTxnRangeRequest rqst)
   throws TException {
-  try {
-return getTxnHandler().heartbeatTxnRange(rqst);
-  } catch (MetaException e) {
-throw new TException(e);
-  }
+  return getTxnHandler().heartbeatTxnRange(rqst);
 }
 
 @Override
 public void compact(CompactionRequest rqst) throws TException {
-  try {
-getTxnHandler().compact(rqst);
-  } catch (MetaException e) {
-throw new TException(e

svn commit: r1650647 - /hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

2015-01-09 Thread ekoifman
Author: ekoifman
Date: Fri Jan  9 20:24:27 2015
New Revision: 1650647

URL: http://svn.apache.org/r1650647
Log:
HIVE-9316 TestSqoop tests in WebHCat testsuite hardcode libdir path to hdfs 
(Deepesh Khandelwal via Eugene Koifman)

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf?rev=1650647r1=1650646r2=1650647view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf Fri Jan 
 9 20:24:27 2015
@@ -524,7 +524,7 @@ $cfg =
  'num' = 1,
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/sqoop?user.name=:UNAME:',
- 'post_options' = ['libdir=hdfs:///apps/templeton/jdbc', 'command=export 
--connect :DB_CONNECTION_STRING: --username :DB_USER_NAME: --password 
:DB_PASSWORD: --export-dir :INPDIR_HDFS:/sqoop --table 
person','statusdir=TestSqoop_:TNUM:' ],
+ 'post_options' = ['libdir=/apps/templeton/jdbc', 'command=export 
--connect :DB_CONNECTION_STRING: --username :DB_USER_NAME: --password 
:DB_PASSWORD: --export-dir :INPDIR_HDFS:/sqoop --table 
person','statusdir=TestSqoop_:TNUM:' ],
  'json_field_substr_match' = { 'id' = '\d+'},
#results
  'status_code' = 200,
@@ -539,7 +539,7 @@ $cfg =
  'num' = 2,
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/sqoop?user.name=:UNAME:',
- 'post_options' = ['libdir=hdfs:///apps/templeton/jdbc', 
'files=:INPDIR_HDFS:/sqoopcommand.txt','command=import --connect 
:DB_CONNECTION_STRING: --username :DB_USER_NAME: --password :DB_PASSWORD: 
--options-file sqoopcommand.txt','statusdir=TestSqoop_:TNUM:' ],
+ 'post_options' = ['libdir=/apps/templeton/jdbc', 
'files=:INPDIR_HDFS:/sqoopcommand.txt','command=import --connect 
:DB_CONNECTION_STRING: --username :DB_USER_NAME: --password :DB_PASSWORD: 
--options-file sqoopcommand.txt','statusdir=TestSqoop_:TNUM:' ],
  'json_field_substr_match' = { 'id' = '\d+'},
#results
  'status_code' = 200,




svn commit: r1651558 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java

2015-01-13 Thread ekoifman
Author: ekoifman
Date: Wed Jan 14 02:46:32 2015
New Revision: 1651558

URL: http://svn.apache.org/r1651558
Log:
HIVE-8914 HDFSCleanup thread holds reference to FileSystem (shanyu zhao via 
Eugene Koifman)

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java?rev=1651558r1=1651557r2=1651558view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
 Wed Jan 14 02:46:32 2015
@@ -91,18 +91,25 @@ public class HDFSCleanup extends Thread
*
*/
   public void run() {
-FileSystem fs = null;
 while (!stop) {
   try {
 // Put each check in a separate try/catch, so if that particular
 // cycle fails, it'll try again on the next cycle.
+FileSystem fs=null;
 try {
-  if (fs == null) {
-fs = new Path(storage_root).getFileSystem(appConf);
-  }
+  fs = new Path(storage_root).getFileSystem(appConf);
   checkFiles(fs);
 } catch (Exception e) {
   LOG.error(Cleanup cycle failed:  + e.getMessage());
+} finally {
+  if(fs != null) {
+try {
+  fs.close();
+}
+catch (Exception e) {
+  LOG.error(Closing file system failed:  + e.getMessage());
+}
+  }
 }
 
 long sleepMillis = (long) (Math.random() * interval);




svn commit: r1659640 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ test/org/apache/hadoop/hive/ql/parse/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientn

2015-02-13 Thread ekoifman
Author: ekoifman
Date: Fri Feb 13 18:27:13 2015
New Revision: 1659640

URL: http://svn.apache.org/r1659640
Log:
HIVE-9481 allow column list specification in INSERT statement

Added:
hive/trunk/ql/src/test/queries/clientnegative/insert_into_with_schema.q
hive/trunk/ql/src/test/queries/clientnegative/insert_into_with_schema1.q
hive/trunk/ql/src/test/queries/clientnegative/insert_into_with_schema2.q
hive/trunk/ql/src/test/queries/clientnegative/insert_into_with_schema3.q
hive/trunk/ql/src/test/queries/clientnegative/insert_into_with_schema4.q
hive/trunk/ql/src/test/queries/clientpositive/insert_into_with_schema.q
hive/trunk/ql/src/test/results/clientnegative/insert_into_with_schema.q.out
hive/trunk/ql/src/test/results/clientnegative/insert_into_with_schema1.q.out
hive/trunk/ql/src/test/results/clientnegative/insert_into_with_schema2.q.out
hive/trunk/ql/src/test/results/clientnegative/insert_into_with_schema3.q.out
hive/trunk/ql/src/test/results/clientnegative/insert_into_with_schema4.q.out
hive/trunk/ql/src/test/results/clientpositive/insert_into_with_schema.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestIUD.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1659640r1=1659639r2=1659640view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Fri Feb 
13 18:27:13 2015
@@ -2284,8 +2284,8 @@ insertClause
 @after { popMsg(state); }
:
  KW_INSERT KW_OVERWRITE destination ifNotExists? - ^(TOK_DESTINATION 
destination ifNotExists?)
-   | KW_INSERT KW_INTO KW_TABLE? tableOrPartition
-   - ^(TOK_INSERT_INTO tableOrPartition)
+   | KW_INSERT KW_INTO KW_TABLE? tableOrPartition (LPAREN 
targetCols=columnNameList RPAREN)?
+   - ^(TOK_INSERT_INTO tableOrPartition $targetCols?)
;
 
 destination

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=1659640r1=1659639r2=1659640view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Fri 
Feb 13 18:27:13 2015
@@ -103,6 +103,9 @@ public class QBMetaData {
 return nameToDestType.get(alias.toLowerCase());
   }
 
+  /**
+   * @param alias this is actually dest name, like insclause-0
+   */
   public Table getDestTableForAlias(String alias) {
 return nameToDestTable.get(alias.toLowerCase());
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java?rev=1659640r1=1659639r2=1659640view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java Fri 
Feb 13 18:27:13 2015
@@ -43,7 +43,15 @@ public class QBParseInfo {
   private ASTNode joinExpr;
   private ASTNode hints;
   private final HashMapString, ASTNode aliasToSrc;
+  /**
+   * insclause-0 - TOK_TAB ASTNode
+   */
   private final HashMapString, ASTNode nameToDest;
+  /**
+   * For 'insert into FOO(x,y) select ...' this stores the
+   * insclause-0 - x,y mapping
+   */
+  private final MapString, ListString nameToDestSchema;
   private final HashMapString, TableSample nameToSample;
   private final MapASTNode, String exprToColumnAlias;
   private final MapString, ASTNode destToSelExpr;
@@ -111,6 +119,7 @@ public class QBParseInfo {
   public QBParseInfo(String alias, boolean isSubQ) {
 aliasToSrc = new HashMapString, ASTNode();
 nameToDest = new HashMapString, ASTNode();
+nameToDestSchema = new HashMapString, ListString();
 nameToSample = new HashMapString, TableSample();
 exprToColumnAlias = new HashMapASTNode, String();
 destToLateralView = new HashMapString, ASTNode();
@@ -234,6 +243,13 @@ public class QBParseInfo {
 nameToDest.put(clause, ast);
   }
 
+  ListString setDestSchemaForClause(String clause, ListString columnList) {
+return nameToDestSchema.put(clause, columnList);
+  }
+  ListString getDestSchemaForClause(String

svn propchange: r1659640 - svn:log

2015-02-13 Thread ekoifman
Author: ekoifman
Revision: 1659640
Modified property: svn:log

Modified: svn:log at Fri Feb 13 18:46:41 2015
--
--- svn:log (original)
+++ svn:log Fri Feb 13 18:46:41 2015
@@ -1 +1 @@
-HIVE-9481 allow column list specification in INSERT statement
+HIVE-9481 allow column list specification in INSERT statement (Eugene Koifman, 
reviewed by Alan Gates)



svn commit: r1670162 - in /hive/trunk/hcatalog: src/test/e2e/templeton/deployers/ src/test/e2e/templeton/deployers/config/webhcat/ webhcat/svr/src/main/config/ webhcat/svr/src/main/java/org/apache/hiv

2015-03-30 Thread ekoifman
Author: ekoifman
Date: Mon Mar 30 18:10:29 2015
New Revision: 1670162

URL: http://svn.apache.org/r1670162
Log:
HIVE-10066 Hive on Tez job submission through WebHCat doesn't ship Tez 
artifacts (Eugene Koifman, reviewed by Thejas Nair)

Modified:

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/TempletonDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TrivialExecService.java

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml?rev=1670162r1=1670161r2=1670162view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 (original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 Mon Mar 30 18:10:29 2015
@@ -35,7 +35,7 @@
 
 property
 nametempleton.libjars/name
-value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.5.jar/value
+
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar/value
 descriptionJars to add to the classpath./description
 /property
 
@@ -69,6 +69,11 @@
 shipped to the target node in the cluster to execute Pig job which 
uses 
 HCat, Hive query, etc./description
 /property
+
+property
+  nametempleton.hive.extra.files/name
+  
value${env.TEZ_CLIENT_HOME}/conf/tez-site.xml,${env.TEZ_CLIENT_HOME}/,${env.TEZ_CLIENT_HOME}/lib/value
+/property
 property
 nametempleton.hcat.home/name
 
valueapache-hive-${env.HIVE_VERSION}-bin.tar.gz/apache-hive-${env.HIVE_VERSION}-bin/hcatalog/value
@@ -101,7 +106,7 @@
 /property
 
 property
-!--\,thrift://127.0.0.1:9933--
+!--\,thrift://127.0.0.1:9933,,hive.execution.engine=tez--
 nametempleton.hive.properties/name
 
valuehive.metastore.uris=thrift://localhost:9933,hive.metastore.sasl.enabled=false/value
 /property

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1670162r1=1670161r2=1670162view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh Mon Mar 30 
18:10:29 2015
@@ -36,6 +36,10 @@ if [ -z ${PIG_VERSION} ]; then
   export PIG_VERSION=0.12.2-SNAPSHOT
 fi
 
+if [ -z ${TEZ_VERSION} ]; then
+  export TEZ_VERSION=0.5.3
+fi
+
 #Root of project source tree
 if [ -z ${PROJ_HOME} ]; then
   export PROJ_HOME=/Users/${USER}/dev/hive
@@ -46,6 +50,7 @@ if [ -z ${HADOOP_HOME} ]; then
   export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
 fi
 
+export TEZ_CLIENT_HOME=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build
 #this is part of Pig distribution

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1670162r1=1670161r2=1670162view=diff
==
--- hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
(original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Mon Mar 
30 18:10:29 2015
@@ -39,7 +39,7 @@
 
   property
 nametempleton.libjars/name
-
value${env.TEMPLETON_HOME}/share/webhcat/svr/lib/zookeeper-3.4.3.jar/value
+
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar/value
 descriptionJars to add to the classpath./description
   /property
 
@@ -106,7 +106,20 @@
   property
 nametempleton.hive.path/name
 valuehive-0.11.0.tar.gz/hive-0.11.0/bin/hive/value
-descriptionThe path to the Hive executable./description

svn commit: r1670160 - in /hive/trunk/hcatalog/webhcat/svr/src/main: config/webhcat-default.xml java/org/apache/hive/hcatalog/templeton/AppConfig.java java/org/apache/hive/hcatalog/templeton/tool/Temp

2015-03-30 Thread ekoifman
Author: ekoifman
Date: Mon Mar 30 18:00:26 2015
New Revision: 1670160

URL: http://svn.apache.org/r1670160
Log:
HIVE-10050 Support overriding memory configuration for AM launched for 
TempletonControllerJob (Hitesh Shah via Eugene Koifman)

Modified:
hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1670160r1=1670159r2=1670160view=diff
==
--- hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
(original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Mon Mar 
30 18:00:26 2015
@@ -197,6 +197,32 @@
 /description
   /property
 
+  !--
+  property
+nametempleton.controller.mr.am.java.opts/name
+value/value
+descriptionJava options to be set for the templeton controller job's
+MapReduce application master. When submitting the controller job,
+Templeton will override yarn.app.mapreduce.am.command-opts with
+this value.  If this is not specified, Templeton will not set the
+property and therefore the value will be picked up from
+mapred-site.xml.
+/description
+  /property
+
+  property
+nametempleton.mr.am.memory.mb/name
+value/value
+descriptionTempleton controller job's Application Master's memory
+limit in MB. When submitting controller job, Templeton will
+overwrite yarn.app.mapreduce.am.resource.mb with this value. If
+empty, Templeton will not set yarn.app.mapreduce.am.resource.mb
+when submitting the controller job, therefore the configuration
+in mapred-site.xml will be used.
+/description
+  /property
+  --
+
   property
 nametempleton.exec.envs/name
 valueHADOOP_PREFIX,HADOOP_HOME,JAVA_HOME,HIVE_HOME/value

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java?rev=1670160r1=1670159r2=1670160view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/AppConfig.java
 Mon Mar 30 18:00:26 2015
@@ -35,7 +35,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.util.VersionInfo;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
@@ -104,6 +103,8 @@ public class AppConfig extends Configura
   public static final String HIVE_ARCHIVE_NAME   = templeton.hive.archive;
   public static final String HIVE_PATH_NAME  = templeton.hive.path;
   public static final String MAPPER_MEMORY_MB= 
templeton.mapper.memory.mb;
+  public static final String MR_AM_MEMORY_MB = templeton.mr.am.memory.mb;
+
   /**
* see webhcat-default.xml
*/
@@ -130,6 +131,8 @@ public class AppConfig extends Configura
   public static final String OVERRIDE_JARS_ENABLED = 
templeton.override.enabled;
   public static final String TEMPLETON_CONTROLLER_MR_CHILD_OPTS 
 = templeton.controller.mr.child.opts;
+  public static final String TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS
+= templeton.controller.mr.am.java.opts;
 
   public static final String KERBEROS_SECRET = templeton.kerberos.secret;
   public static final String KERBEROS_PRINCIPAL  = 
templeton.kerberos.principal;
@@ -148,6 +151,8 @@ public class AppConfig extends Configura
 = mapred.map.tasks.speculative.execution;
   public static final String HADOOP_CHILD_JAVA_OPTS = mapred.child.java.opts;
   public static final String HADOOP_MAP_MEMORY_MB = mapreduce.map.memory.mb;
+  public static final String HADOOP_MR_AM_JAVA_OPTS = 
yarn.app.mapreduce.am.command-opts;
+  public static final String HADOOP_MR_AM_MEMORY_MB = 
yarn.app.mapreduce.am.resource.mb;
   public static final String UNIT_TEST_MODE = templeton.unit.test.mode;
 
 
@@ -313,7 +318,13 @@ public class AppConfig extends Configura
   public String controllerMRChildOpts() { 
 return get(TEMPLETON_CONTROLLER_MR_CHILD_OPTS); 
   }
+  public String controllerAMChildOpts() {
+return get(TEMPLETON_CONTROLLER_MR_AM_JAVA_OPTS);
+  }
   public String mapperMemoryMb()   { return get(MAPPER_MEMORY_MB

svn commit: r1651445 - in /hive/trunk/hcatalog: src/test/e2e/templeton/ src/test/e2e/templeton/deployers/ src/test/e2e/templeton/drivers/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/ap

2015-01-13 Thread ekoifman
Author: ekoifman
Date: Tue Jan 13 19:15:27 2015
New Revision: 1651445

URL: http://svn.apache.org/r1651445
Log:
HIVE-9351 Running Hive Jobs with Tez cause templeton to never report percent 
complete

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/README.txt?rev=1651445r1=1651444r2=1651445view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/README.txt Tue Jan 13 19:15:27 
2015
@@ -223,3 +223,14 @@ enough map slots (10?) (mapred.tasktrack
 Adding Tests
 
 ToDo: add some guidelines
+
+Running on Tez
+1. set up Tez as in http://tez.apache.org/install.html
+2. set hive.execution.engine=tez in hive-site.xml (actually is this needed?)
+3. add hive.execution.engine=tez to templeton.hive.properties in 
webhcat-site.xml
+4. add to mapred-env.sh/yarn-env.sh (as you defined these in step 1)
+export TEZ_VERSION=0.5.3
+export TEZ_JARS=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
+export TEZ_CONF_DIR=${TEZ_JARS}/conf
+export 
HADOOP_CLASSPATH=${TEZ_CONF_DIR}:${TEZ_JARS}/*:${TEZ_JARS}/lib/*:${HADOOP_CLASSPATH}
+(w/o this you'll see something like java.lang.NoClassDefFoundError: 
org/apache/tez/dag/api/SessionNotRunning)

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh?rev=1651445r1=1651444r2=1651445view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
(original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
Tue Jan 13 19:15:27 2015
@@ -48,5 +48,17 @@ ${HADOOP_HOME}/bin/hadoop fs -put ${PIG_
 
 ${HADOOP_HOME}/bin/hadoop fs -put 
/Users/ekoifman/dev/sqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz 
/apps/templeton/sqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz
 ${HADOOP_HOME}/bin/hadoop fs -put 
/Users/ekoifman/dev/mysql-connector-java-5.1.30/mysql-connector-java-5.1.30-bin.jar
 /apps/templeton/jdbc/mysql-connector-java.jar
+
+#Tez set up (http://tez.apache.org/install.html)
+#if not using Tez - ignore this
+${HADOOP_HOME}/bin/hdfs dfs -put 
/Users/ekoifman/dev/apache-tez-${TEZ_VERSION}-src/tez-dist/target/tez-${TEZ_VERSION}.tar.gz
 /apps/tez-${TEZ_VERSION}.tar.gz
+${HADOOP_HOME}/bin/hdfs dfs -mkdir /tmp/tezin
+${HADOOP_HOME}/bin/hdfs dfs -mkdir /tmp/tezout
+${HADOOP_HOME}/bin/hdfs dfs -put /Users/ekoifman/dev/hive/build.sh /tmp/tezin
+#Above line is for Sanity Check: this is to run #6 in 
http://tez.apache.org/install.html
+#$HADOOP_HOME/bin/hadoop jar tez-examples-0.5.3.jar orderedwordcount 
/tmp/tezin /tmp/tezout
+
+
+
 #check what got deployed
-${HADOOP_HOME}/bin/hdfs dfs -ls -R /apps/templeton webhcate2e /user/templeton 
/user/hive/warehouse
+${HADOOP_HOME}/bin/hdfs dfs -ls -R /apps webhcate2e /user/templeton 
/user/hive/warehouse

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1651445r1=1651444r2=1651445view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh Tue Jan 13 
19:15:27 2015
@@ -22,14 +22,29 @@
 
 # define necessary env vars here and source it in other files
 
-export HADOOP_VERSION=2.4.1-SNAPSHOT
-#export HIVE_VERSION=0.14.0-SNAPSHOT
-export PIG_VERSION=0.12.2-SNAPSHOT
+echo ${HADOOP_VERSION};
+
+if [ -z ${HADOOP_VERSION} ]; then
+  export HADOOP_VERSION=2.4.1-SNAPSHOT
+fi
+
+if [ -z ${HIVE_VERSION} ]; then
+  export HIVE_VERSION=0.14.0-SNAPSHOT
+fi
+
+if [ -z ${PIG_VERSION} ]; then
+  export PIG_VERSION=0.12.2-SNAPSHOT
+fi
 
 #Root of project source tree
-export PROJ_HOME=/Users/${USER}/dev/hive
+if [ -z ${PROJ_HOME} ]; then
+  export PROJ_HOME=/Users/${USER}/dev/hive
+fi
 export 
HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
-export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
+
+if [ -z ${HADOOP_HOME} ]; then
+  export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION

svn commit: r1671110 - /hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml

2015-04-03 Thread ekoifman
Author: ekoifman
Date: Fri Apr  3 17:46:03 2015
New Revision: 1671110

URL: http://svn.apache.org/r1671110
Log:
HIVE-10208 templeton.hive.extra.files should be commented out in 
webhcat-default.xml

Modified:
hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml

Modified: hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml?rev=1671110r1=1671109r2=1671110view=diff
==
--- hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
(original)
+++ hive/trunk/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml Fri Apr 
 3 17:46:03 2015
@@ -111,11 +111,12 @@
 
   property
 nametempleton.hive.extra.files/name
-value/tez-client/conf/tez-site.xml,/tez-client/,/tez-client/lib/value
+value/value
 descriptionThe resources in this list will be localized to the node 
running LaunchMapper and added to HADOOP_CLASSPTH
   before launching 'hive' command.  If the path /foo/bar is a directory, 
the contents of the the entire dir will be localized
   and ./bar/* will be added to HADOOP_CLASSPATH.  Note that since 
classpath path processing does not recurse into subdirectories,
-  the paths in this property may be overlapping.  In the example above, 
./tez-site.xml:./tez-client/*:./lib/* will be added to
+  the paths in this property may be overlapping.  For example, to run Hive 
on Tez jobs, 3 items need to be localized:
+  /tez-client/conf/tez-site.xml,/tez-client/,/tez-client/lib.  In this 
example, ./tez-site.xml:./tez-client/*:./lib/* will be added to
   HADOOP_CLASSPATH.
   This can be used to specify config files, Tez artifacts, etc.  This will 
be sent -files option of hadoop jar command thus
   each path is interpreted by Generic Option Parser.  It can be local or 
hdfs path.




svn commit: r1673249 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/ErrorMsg.java test/org/apache/hadoop/hive/ql/TestErrorMsg.java test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanti

2015-04-13 Thread ekoifman
Author: ekoifman
Date: Mon Apr 13 17:48:23 2015
New Revision: 1673249

URL: http://svn.apache.org/r1673249
Log:
HIVE-10152 ErrorMsg.formatToErrorMsgMap has bad regex (Eugene Koifman, reviewed 
by Alan Gates)

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1673249r1=1673248r2=1673249view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Mon Apr 13 
17:48:23 2015
@@ -444,6 +444,9 @@ public enum ErrorMsg {
   is controlled by hive.exec.max.dynamic.partitions and 
hive.exec.max.dynamic.partitions.pernode. ),
   PARTITION_SCAN_LIMIT_EXCEEDED(20005, Number of partitions scanned (={0}) on 
table {1} exceeds limit +
(={2}). This is controlled by hive.limit.query.max.table.partition., 
true),
+  OP_NOT_ALLOWED_IN_AUTOCOMMIT(20006, Operation {0} is not allowed when 
autoCommit=true., true),//todo: better SQLState?
+  OP_NOT_ALLOWED_IN_TXN(20007, Operation {0} is not allowed in a transaction. 
 TransactionID={1}., true),
+  OP_NOT_ALLOWED_WITHOUT_TXN(2008, Operation {0} is not allowed since 
autoCommit=false and there is no active transaction, true),
 
   //== 3 range starts here 
//
   STATSPUBLISHER_NOT_OBTAINED(3, StatsPublisher cannot be obtained.  +
@@ -509,7 +512,7 @@ public enum ErrorMsg {
   static {
 for (ErrorMsg errorMsg : values()) {
   if (errorMsg.format != null) {
-String pattern = errorMsg.mesg.replaceAll(\\{.*\\}, .*);
+String pattern = errorMsg.mesg.replaceAll(\\{[0-9]+\\}, .*);
 formatToErrorMsgMap.put(Pattern.compile(^ + pattern + $), 
errorMsg);
   } else {
 mesgToErrorMsgMap.put(errorMsg.getMsg().trim(), errorMsg);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java?rev=1673249r1=1673248r2=1673249view=diff
==
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java 
(original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/TestErrorMsg.java Mon Apr 
13 17:48:23 2015
@@ -23,9 +23,11 @@ import java.util.Set;
 
 import junit.framework.Assert;
 import junit.framework.TestCase;
+import org.junit.Test;
 
-public class TestErrorMsg extends TestCase {
+public class TestErrorMsg {
 
+  @Test
   public void testUniqueErrorCode() {
 SetInteger numbers = new HashSetInteger();
 for (ErrorMsg err : ErrorMsg.values()) {
@@ -33,4 +35,15 @@ public class TestErrorMsg extends TestCa
   Assert.assertTrue(duplicated error number  + code, numbers.add(code));
 }
   }
+  @Test
+  public void testReverseMatch() {
+testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_AUTOCOMMIT, COMMIT);
+testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_IN_TXN, ALTER TABLE, 1);
+testReverseMatch(ErrorMsg.OP_NOT_ALLOWED_WITHOUT_TXN, ROLLBACK);
+  }
+  private void testReverseMatch(ErrorMsg errorMsg, String... args) {
+String parametrizedMsg = errorMsg.format(args);
+ErrorMsg canonicalMsg = ErrorMsg.getErrorMsg(parametrizedMsg);
+Assert.assertEquals(Didn't find expected msg, errorMsg.getErrorCode(), 
canonicalMsg.getErrorCode());
+  }
 }

Modified: 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java?rev=1673249r1=1673248r2=1673249view=diff
==
--- 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/parse/TestUpdateDeleteSemanticAnalyzer.java
 Mon Apr 13 17:48:23 2015
@@ -17,15 +17,12 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import static org.junit.Assert.*;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Map;
 
-import junit.framework.Assert;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -44,7 +41,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Before;
-import org.junit.Ignore;
 import

svn commit: r1674988 - in /hive/trunk/hcatalog/src/test/e2e/templeton: ./ deployers/ deployers/config/webhcat/ tests/

2015-04-20 Thread ekoifman
Author: ekoifman
Date: Mon Apr 20 21:24:51 2015
New Revision: 1674988

URL: http://svn.apache.org/r1674988
Log:
HIVE-7948 - Add an E2E test  to verify fix for HIVE-7155 (Aswathy Chellammal 
Sreekumar via Eugene Koifman)

Added:

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.updateConfig.xml

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/modify_webhcat_config.sh

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/restore_webhcat_config.sh
hive/trunk/hcatalog/src/test/e2e/templeton/tests/modifyConfiguration.conf
Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
hive/trunk/hcatalog/src/test/e2e/templeton/build.xml
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/README.txt?rev=1674988r1=1674987r2=1674988view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/README.txt Mon Apr 20 21:24:51 
2015
@@ -205,6 +205,19 @@ Here is the schema of the table writen i
 To prevent primary key violation and sqoop import directory conflict, make 
sure the PERSON table is empty
 and the folder hdfs://hostname:8020/sqoopoutputdir doesn't exist before 
running the test.
 
+Running updateConfig tests
+--
+ant test-updateConfig -Dinpdir.hdfs=location of inpdir on hdfs  
-Dtest.user.name=user the tests should run as \
+ -Dsecure.mode=yes/no   -Dharness.webhdfs.url=webhdfs url upto port num  
-Dharness.templeton.url=templeton url upto port num
+
+This test suite is trying to verify the use of property 
templeton.mapper.memory.mb in webhcat-site.xml.
+For this, an attempt is made to load data of size greater than 100MB, from one 
hive table to another hive table,
+with the templeton.mapper.memory.mb set to a very low value. This is a 
negative test case that expects the failure of map job
+due to insufficient memory.
+
+For running this test suite templeton.mapper.memory.mb property should be set 
to 0.01 in webhcat-site.xml. This could be done by
+running modify_webhcat_config.sh in deployers/. Once the test run finishes, 
the change could be reverted by running restore_webhcat_config.sh
+
 Notes
 -
 It's best to set HADOOP_HOME_WARN_SUPPRESS=true everywhere you can.

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/build.xml?rev=1674988r1=1674987r2=1674988view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/build.xml (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/build.xml Mon Apr 20 21:24:51 
2015
@@ -197,6 +197,35 @@
 /exec
 /target
 
+target name=test-updateConfig depends=property-check, init-test
+!-- fork (parallelization) factors for e2e tests execution.
+ Defaults are 1, which means *no* parellelization: --
+property name=fork.factor.group value=1/
+property name=fork.factor.conf.file value=1/
+property name=e2e.debug value=false/
+property name=tests.to.run value=/
+exec executable=perl dir=${test.location} failonerror=true
+env key=HARNESS_ROOT value=./
+env key=TH_WORKING_DIR value=${test.location}/
+env key=TH_INPDIR_LOCAL value=${inpdir.local}/
+env key=TH_INPDIR_HDFS value=${inpdir.hdfs}/
+env key=TH_OUT value=./
+env key=TH_ROOT value=./
+env key=FORK_FACTOR_GROUP value=${fork.factor.group}/
+env key=FORK_FACTOR_FILE value=${fork.factor.conf.file}/
+env key=E2E_DEBUG value=${e2e.debug}/
+env key=WEBHDFS_URL value=${harness.webhdfs.url}/
+env key=TEMPLETON_URL value=${harness.templeton.url}/
+env key=USER_NAME value=${test.user.name}/
+env key=DOAS_USER value=${doas.user}/
+env key=HARNESS_CONF value=${basedir}/conf/default.conf/
+env key=SECURE_MODE value=${secure.mode}/
+arg value=./test_harness.pl/
+arg line=${tests.to.run}/
+arg value=${basedir}/tests/modifyConfiguration.conf/
+/exec
+/target
+
 target name=clean
 delete dir=${test.location}/
 delete file=${tar.name}/

Added: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.updateConfig.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.updateConfig.xml?rev=1674988view=auto
==
--- 
hive/trunk

hive git commit: HIVE-10483 - insert overwrite partition deadlocks on itself with DbTxnManager (Eugene Koifman, reviewed by Alan Gates)

2015-04-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 e1b03dd46 - 715018ac7


HIVE-10483 - insert overwrite partition deadlocks on itself with DbTxnManager 
(Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/715018ac
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/715018ac
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/715018ac

Branch: refs/heads/branch-1.2
Commit: 715018ac79e89c75e9b2e381175127ae6dbc6343
Parents: e1b03dd
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Apr 28 17:46:24 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Apr 28 17:46:24 2015 -0700

--
 .../apache/hadoop/hive/common/JavaUtils.java|  8 ++
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 22 +++-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |  9 +--
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 27 +++-
 4 files changed, 57 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/715018ac/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index a212fb8..3dd8f75 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -136,6 +136,14 @@ public final class JavaUtils {
 LogFactory.release(loader);
   }
 
+  /**
+   * Utility method for ACID to normalize logging info
+   * @param extLockId LockResponse.lockid
+   */
+  public static String lockIdToString(long extLockId) {
+return lockid: + extLockId;
+  }
+
   private JavaUtils() {
 // prevent instantiation
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/715018ac/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 1e64fc7..704c3ed 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -28,6 +28,7 @@ import org.apache.commons.dbcp.PoolingDataSource;
 
 import org.apache.commons.pool.ObjectPool;
 import org.apache.commons.pool.impl.GenericObjectPool;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidReadTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -1115,6 +1116,8 @@ public class TxnHandler {
   private static class LockInfo {
 private final long extLockId;
 private final long intLockId;
+//0 means there is no transaction, i.e. it a select statement which is not 
part of
+//explicit transaction or a IUD statement that is not writing to ACID table
 private final long txnId;
 private final String db;
 private final String table;
@@ -1144,7 +1147,7 @@ public class TxnHandler {
 default:
   throw new MetaException(Unknown lock type  + 
rs.getString(hl_lock_type).charAt(0));
   }
-  txnId = rs.getLong(hl_txnid);
+  txnId = rs.getLong(hl_txnid);//returns 0 if value is NULL
 }
 LockInfo(ShowLocksResponseElement e, long intLockId) {
   extLockId = e.getLockid();
@@ -1166,7 +1169,7 @@ public class TxnHandler {
 
 @Override
 public String toString() {
-  return extLockId: + Long.toString(extLockId) +  intLockId: +
+  return JavaUtils.lockIdToString(extLockId) +  intLockId: +
 intLockId +  txnId: + Long.toString
 (txnId) +  db: + db +  table: + table +  partition: +
 partition +  state: + (state == null ? null : state.toString())
@@ -1642,10 +1645,17 @@ public class TxnHandler {
* on a database.
*/
   private boolean ignoreConflict(LockInfo desiredLock, LockInfo existingLock) {
-return (desiredLock.isDbLock()  desiredLock.type == LockType.SHARED_READ 

-  existingLock.isTableLock()  existingLock.type == LockType.EXCLUSIVE) ||
-  (existingLock.isDbLock()  existingLock.type == LockType.SHARED_READ 
-desiredLock.isTableLock()  desiredLock.type == LockType.EXCLUSIVE);
+return
+  ((desiredLock.isDbLock()  desiredLock.type == LockType.SHARED_READ 
+  existingLock.isTableLock()  existingLock.type == 
LockType.EXCLUSIVE) ||
+(existingLock.isDbLock()  existingLock.type == LockType.SHARED_READ 

+  desiredLock.isTableLock()  desiredLock.type == LockType.EXCLUSIVE))
+||
+

hive git commit: HIVE-10481 - ACID table update finishes but values not really updated if column names are not all lower case

2015-04-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master cd027b7c5 - 57fcbce52


HIVE-10481 - ACID table update finishes but values not really updated if column 
names are not all lower case


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/57fcbce5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/57fcbce5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/57fcbce5

Branch: refs/heads/master
Commit: 57fcbce5251c04818dd0975f921c2648f289c8f8
Parents: cd027b7
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Apr 28 16:07:44 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Apr 28 16:07:44 2015 -0700

--
 .../hive/ql/parse/UpdateDeleteSemanticAnalyzer.java   | 13 +++--
 .../org/apache/hadoop/hive/ql/TestTxnCommands2.java   | 14 ++
 2 files changed, 25 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/57fcbce5/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
index 7af68de..4c69534 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
@@ -190,7 +190,7 @@ public class UpdateDeleteSemanticAnalyzer extends 
SemanticAnalyzer {
 
 addSetRCols((ASTNode) assignment.getChildren().get(1), setRCols);
 
-String columnName = colName.getText();
+String columnName = normalizeColName(colName.getText());
 
 // Make sure this isn't one of the partitioning columns, that's not 
supported.
 if (partCols != null) {
@@ -397,11 +397,20 @@ public class UpdateDeleteSemanticAnalyzer extends 
SemanticAnalyzer {
   ASTNode colName = (ASTNode)node.getChildren().get(0);
   assert colName.getToken().getType() == HiveParser.Identifier :
   Expected column name;
-  setRCols.add(colName.getText());
+  setRCols.add(normalizeColName(colName.getText()));
 } else if (node.getChildren() != null) {
   for (Node n : node.getChildren()) {
 addSetRCols((ASTNode)n, setRCols);
   }
 }
   }
+
+  /**
+   * Column names are stored in metastore in lower case, regardless of the 
CREATE TABLE statement.
+   * Unfortunately there is no single place that normalizes the input query.
+   * @param colName not null
+   */
+  private static String normalizeColName(String colName) {
+return colName.toLowerCase();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/57fcbce5/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index 06d2ca2..ac5ae2a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -58,6 +58,7 @@ public class TestTxnCommands2 {
 
   @Before
   public void setUp() throws Exception {
+tearDown();
 hiveConf = new HiveConf(this.getClass());
 hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, );
 hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, );
@@ -108,6 +109,19 @@ public class TestTxnCommands2 {
 ListString rs1 = runStatementOnDriver(select a,b from  + 
Table.NONACIDORCTBL);
   }
   @Test
+  public void testUpdateMixedCase() throws Exception {
+int[][] tableData = {{1,2},{3,3},{5,3}};
+runStatementOnDriver(insert into  + Table.ACIDTBL + (a,b)  + 
makeValuesClause(tableData));
+runStatementOnDriver(update  + Table.ACIDTBL +  set B = 7 where A=1);
+ListString rs = runStatementOnDriver(select a,b from  + Table.ACIDTBL 
+  order by a,b);
+int[][] updatedData = {{1,7},{3,3},{5,3}};
+Assert.assertEquals(Update failed, stringifyValues(updatedData), rs);
+runStatementOnDriver(update  + Table.ACIDTBL +  set B = B + 1 where 
A=1);
+ListString rs2 = runStatementOnDriver(select a,b from  + Table.ACIDTBL 
+  order by a,b);
+int[][] updatedData2 = {{1,8},{3,3},{5,3}};
+Assert.assertEquals(Update failed, stringifyValues(updatedData2), rs2);
+  }
+  @Test
   public void testDeleteIn() throws Exception {
 int[][] tableData = {{1,2},{3,2},{5,2},{1,3},{3,3},{5,3}};
 runStatementOnDriver(insert into  + Table.ACIDTBL + (a,b)  + 
makeValuesClause(tableData));



hive git commit: HIVE-10481 - ACID table update finishes but values not really updated if column names are not all lower case (Eugene Koifman, reviewed by Alan Gates)

2015-04-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 ae10c832d - e1b03dd46


HIVE-10481 - ACID table update finishes but values not really updated if column 
names are not all lower case (Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e1b03dd4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e1b03dd4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e1b03dd4

Branch: refs/heads/branch-1.2
Commit: e1b03dd4629ae61cfdde4634dff594dab6e46322
Parents: ae10c83
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Apr 28 16:17:23 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Apr 28 16:17:23 2015 -0700

--
 .../hive/ql/parse/UpdateDeleteSemanticAnalyzer.java   | 13 +++--
 .../org/apache/hadoop/hive/ql/TestTxnCommands2.java   | 14 ++
 2 files changed, 25 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e1b03dd4/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
index 7af68de..4c69534 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/UpdateDeleteSemanticAnalyzer.java
@@ -190,7 +190,7 @@ public class UpdateDeleteSemanticAnalyzer extends 
SemanticAnalyzer {
 
 addSetRCols((ASTNode) assignment.getChildren().get(1), setRCols);
 
-String columnName = colName.getText();
+String columnName = normalizeColName(colName.getText());
 
 // Make sure this isn't one of the partitioning columns, that's not 
supported.
 if (partCols != null) {
@@ -397,11 +397,20 @@ public class UpdateDeleteSemanticAnalyzer extends 
SemanticAnalyzer {
   ASTNode colName = (ASTNode)node.getChildren().get(0);
   assert colName.getToken().getType() == HiveParser.Identifier :
   Expected column name;
-  setRCols.add(colName.getText());
+  setRCols.add(normalizeColName(colName.getText()));
 } else if (node.getChildren() != null) {
   for (Node n : node.getChildren()) {
 addSetRCols((ASTNode)n, setRCols);
   }
 }
   }
+
+  /**
+   * Column names are stored in metastore in lower case, regardless of the 
CREATE TABLE statement.
+   * Unfortunately there is no single place that normalizes the input query.
+   * @param colName not null
+   */
+  private static String normalizeColName(String colName) {
+return colName.toLowerCase();
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/e1b03dd4/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index 06d2ca2..ac5ae2a 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -58,6 +58,7 @@ public class TestTxnCommands2 {
 
   @Before
   public void setUp() throws Exception {
+tearDown();
 hiveConf = new HiveConf(this.getClass());
 hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, );
 hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, );
@@ -108,6 +109,19 @@ public class TestTxnCommands2 {
 ListString rs1 = runStatementOnDriver(select a,b from  + 
Table.NONACIDORCTBL);
   }
   @Test
+  public void testUpdateMixedCase() throws Exception {
+int[][] tableData = {{1,2},{3,3},{5,3}};
+runStatementOnDriver(insert into  + Table.ACIDTBL + (a,b)  + 
makeValuesClause(tableData));
+runStatementOnDriver(update  + Table.ACIDTBL +  set B = 7 where A=1);
+ListString rs = runStatementOnDriver(select a,b from  + Table.ACIDTBL 
+  order by a,b);
+int[][] updatedData = {{1,7},{3,3},{5,3}};
+Assert.assertEquals(Update failed, stringifyValues(updatedData), rs);
+runStatementOnDriver(update  + Table.ACIDTBL +  set B = B + 1 where 
A=1);
+ListString rs2 = runStatementOnDriver(select a,b from  + Table.ACIDTBL 
+  order by a,b);
+int[][] updatedData2 = {{1,8},{3,3},{5,3}};
+Assert.assertEquals(Update failed, stringifyValues(updatedData2), rs2);
+  }
+  @Test
   public void testDeleteIn() throws Exception {
 int[][] tableData = {{1,2},{3,2},{5,2},{1,3},{3,3},{5,3}};
 runStatementOnDriver(insert into  + Table.ACIDTBL + (a,b)  + 
makeValuesClause(tableData));



hive git commit: HIVE-10483 - insert overwrite partition deadlocks on itself with DbTxnManager (Eugene Koifman, reviewed by Alan Gates)

2015-04-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 57fcbce52 - 106e0931f


HIVE-10483 - insert overwrite partition deadlocks on itself with DbTxnManager 
(Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/106e0931
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/106e0931
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/106e0931

Branch: refs/heads/master
Commit: 106e0931f4be765b750adaf2c5cb654a233baef0
Parents: 57fcbce
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Apr 28 20:11:56 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Apr 28 20:11:56 2015 -0700

--
 .../apache/hadoop/hive/common/JavaUtils.java|  8 ++
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 22 +++-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |  9 +--
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 27 +++-
 4 files changed, 57 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/106e0931/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index a212fb8..3dd8f75 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -136,6 +136,14 @@ public final class JavaUtils {
 LogFactory.release(loader);
   }
 
+  /**
+   * Utility method for ACID to normalize logging info
+   * @param extLockId LockResponse.lockid
+   */
+  public static String lockIdToString(long extLockId) {
+return lockid: + extLockId;
+  }
+
   private JavaUtils() {
 // prevent instantiation
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/106e0931/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 1e64fc7..704c3ed 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -28,6 +28,7 @@ import org.apache.commons.dbcp.PoolingDataSource;
 
 import org.apache.commons.pool.ObjectPool;
 import org.apache.commons.pool.impl.GenericObjectPool;
+import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidReadTxnList;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -1115,6 +1116,8 @@ public class TxnHandler {
   private static class LockInfo {
 private final long extLockId;
 private final long intLockId;
+//0 means there is no transaction, i.e. it a select statement which is not 
part of
+//explicit transaction or a IUD statement that is not writing to ACID table
 private final long txnId;
 private final String db;
 private final String table;
@@ -1144,7 +1147,7 @@ public class TxnHandler {
 default:
   throw new MetaException(Unknown lock type  + 
rs.getString(hl_lock_type).charAt(0));
   }
-  txnId = rs.getLong(hl_txnid);
+  txnId = rs.getLong(hl_txnid);//returns 0 if value is NULL
 }
 LockInfo(ShowLocksResponseElement e, long intLockId) {
   extLockId = e.getLockid();
@@ -1166,7 +1169,7 @@ public class TxnHandler {
 
 @Override
 public String toString() {
-  return extLockId: + Long.toString(extLockId) +  intLockId: +
+  return JavaUtils.lockIdToString(extLockId) +  intLockId: +
 intLockId +  txnId: + Long.toString
 (txnId) +  db: + db +  table: + table +  partition: +
 partition +  state: + (state == null ? null : state.toString())
@@ -1642,10 +1645,17 @@ public class TxnHandler {
* on a database.
*/
   private boolean ignoreConflict(LockInfo desiredLock, LockInfo existingLock) {
-return (desiredLock.isDbLock()  desiredLock.type == LockType.SHARED_READ 

-  existingLock.isTableLock()  existingLock.type == LockType.EXCLUSIVE) ||
-  (existingLock.isDbLock()  existingLock.type == LockType.SHARED_READ 
-desiredLock.isTableLock()  desiredLock.type == LockType.EXCLUSIVE);
+return
+  ((desiredLock.isDbLock()  desiredLock.type == LockType.SHARED_READ 
+  existingLock.isTableLock()  existingLock.type == 
LockType.EXCLUSIVE) ||
+(existingLock.isDbLock()  existingLock.type == LockType.SHARED_READ 

+  desiredLock.isTableLock()  desiredLock.type == LockType.EXCLUSIVE))
+||
+  

hive git commit: HIVE-10151 - insert into A select from B is broken when both A and B are Acid tables and bucketed the same way (Eugene Koifman, reviewed by Alan Gates)

2015-05-01 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 0273771d6 - 343486b65


HIVE-10151 - insert into A select from B is broken when both A and B are Acid 
tables and bucketed the same way (Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/343486b6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/343486b6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/343486b6

Branch: refs/heads/branch-1.2
Commit: 343486b6542ab75f4b6049b56973d9a5d4c5a495
Parents: 0273771
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 1 09:34:37 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 1 09:34:37 2015 -0700

--
 .../org/apache/hadoop/hive/ql/exec/Operator.java  |  4 
 .../BucketingSortingReduceSinkOptimizer.java  | 12 
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java|  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java   | 18 +-
 4 files changed, 34 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/343486b6/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
index 5856cfd..d7f1b42 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
@@ -1181,6 +1181,10 @@ public abstract class OperatorT extends OperatorDesc 
implements Serializable,C
 return useBucketizedHiveInputFormat;
   }
 
+  /**
+   * Before setting this to {@code true} make sure it's not reading ACID tables
+   * @param useBucketizedHiveInputFormat
+   */
   public void setUseBucketizedHiveInputFormat(boolean 
useBucketizedHiveInputFormat) {
 this.useBucketizedHiveInputFormat = useBucketizedHiveInputFormat;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/343486b6/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
index 76cc540..7cb0f15 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -215,6 +216,9 @@ public class BucketingSortingReduceSinkOptimizer implements 
Transform {
 private void storeBucketPathMapping(TableScanOperator tsOp, FileStatus[] 
srcs) {
   MapString, Integer bucketFileNameMapping = new HashMapString, 
Integer();
   for (int pos = 0; pos  srcs.length; pos++) {
+if(!srcs[pos].isFile()) {
+  throw new RuntimeException(Was expecting ' + srcs[pos].getPath() + 
' to be bucket file.);
+}
 bucketFileNameMapping.put(srcs[pos].getPath().getName(), pos);
   }
   tsOp.getConf().setBucketFileNameMapping(bucketFileNameMapping);
@@ -376,6 +380,14 @@ public class BucketingSortingReduceSinkOptimizer 
implements Transform {
 return null;
   }
 
+  if(stack.get(0) instanceof TableScanOperator) {
+TableScanOperator tso = ((TableScanOperator)stack.get(0));
+if(SemanticAnalyzer.isAcidTable(tso.getConf().getTableMetadata())) {
+  /*ACID tables have complex directory layout and require merging of 
delta files
+  * on read thus we should not try to read bucket files directly*/
+  return null;
+}
+  }
   // Support for dynamic partitions can be added later
   if (fsOp.getConf().getDynPartCtx() != null) {
 return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/343486b6/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 8e65b59..1d2c764 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ 

hive git commit: HIVE-10151 - insert into A select from B is broken when both A and B are Acid tables and bucketed the same way (Eugene Koifman, reviewed by Alan Gates)

2015-05-01 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 283466374 - 6db33a9d1


HIVE-10151 - insert into A select from B is broken when both A and B are Acid 
tables and bucketed the same way (Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6db33a9d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6db33a9d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6db33a9d

Branch: refs/heads/master
Commit: 6db33a9d135baad489ca596c782278a71d63f597
Parents: 28346637
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 1 09:27:21 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 1 09:27:21 2015 -0700

--
 .../org/apache/hadoop/hive/ql/exec/Operator.java  |  4 
 .../BucketingSortingReduceSinkOptimizer.java  | 12 
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java|  2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands2.java   | 18 +-
 4 files changed, 34 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6db33a9d/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
index 5856cfd..d7f1b42 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java
@@ -1181,6 +1181,10 @@ public abstract class OperatorT extends OperatorDesc 
implements Serializable,C
 return useBucketizedHiveInputFormat;
   }
 
+  /**
+   * Before setting this to {@code true} make sure it's not reading ACID tables
+   * @param useBucketizedHiveInputFormat
+   */
   public void setUseBucketizedHiveInputFormat(boolean 
useBucketizedHiveInputFormat) {
 this.useBucketizedHiveInputFormat = useBucketizedHiveInputFormat;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/6db33a9d/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
index 76cc540..7cb0f15 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/BucketingSortingReduceSinkOptimizer.java
@@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -215,6 +216,9 @@ public class BucketingSortingReduceSinkOptimizer implements 
Transform {
 private void storeBucketPathMapping(TableScanOperator tsOp, FileStatus[] 
srcs) {
   MapString, Integer bucketFileNameMapping = new HashMapString, 
Integer();
   for (int pos = 0; pos  srcs.length; pos++) {
+if(!srcs[pos].isFile()) {
+  throw new RuntimeException(Was expecting ' + srcs[pos].getPath() + 
' to be bucket file.);
+}
 bucketFileNameMapping.put(srcs[pos].getPath().getName(), pos);
   }
   tsOp.getConf().setBucketFileNameMapping(bucketFileNameMapping);
@@ -376,6 +380,14 @@ public class BucketingSortingReduceSinkOptimizer 
implements Transform {
 return null;
   }
 
+  if(stack.get(0) instanceof TableScanOperator) {
+TableScanOperator tso = ((TableScanOperator)stack.get(0));
+if(SemanticAnalyzer.isAcidTable(tso.getConf().getTableMetadata())) {
+  /*ACID tables have complex directory layout and require merging of 
delta files
+  * on read thus we should not try to read bucket files directly*/
+  return null;
+}
+  }
   // Support for dynamic partitions can be added later
   if (fsOp.getConf().getDynPartCtx() != null) {
 return null;

http://git-wip-us.apache.org/repos/asf/hive/blob/6db33a9d/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 8e65b59..1d2c764 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ 

hive git commit: HIVE-10605 - Make hive version number update automatically in webhcat-default.xml during hive tar generation (Eugene Koifman, reviewed by Thejas Nair)

2015-05-05 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 02b6cd110 - eefb0718e


HIVE-10605 - Make hive version number update automatically in 
webhcat-default.xml during hive tar generation (Eugene Koifman, reviewed by 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/eefb0718
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/eefb0718
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/eefb0718

Branch: refs/heads/master
Commit: eefb0718ea347680850620c5dd9eff5ec202566d
Parents: 02b6cd1
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue May 5 18:00:15 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue May 5 18:00:15 2015 -0700

--
 .../deployers/config/webhcat/webhcat-site.xml  |  9 +
 hcatalog/src/test/e2e/templeton/deployers/env.sh   |  1 +
 hcatalog/webhcat/svr/pom.xml   | 13 +++--
 .../webhcat/svr/src/main/config/webhcat-default.xml| 10 +-
 packaging/src/main/assembly/bin.xml|  1 +
 5 files changed, 15 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/eefb0718/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
--
diff --git 
a/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml 
b/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
index 7a2d450..8bcb1f0 100644
--- a/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
@@ -24,8 +24,7 @@
 !-- install. --
 
 configuration
-!--TODO:
-1. make pig/hive versions env variables--
+!--TODO:--
 
   property
 nametempleton.hcat/name
@@ -34,12 +33,6 @@
   /property
 
 property
-nametempleton.libjars/name
-
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar/value
-descriptionJars to add to the classpath./description
-/property
-
-property
 nametempleton.pig.archive/name
 valuehdfs:///apps/templeton/pig-${env.PIG_VERSION}.tar.gz/value
 descriptionThe path to the Pig archive./description

http://git-wip-us.apache.org/repos/asf/hive/blob/eefb0718/hcatalog/src/test/e2e/templeton/deployers/env.sh
--
diff --git a/hcatalog/src/test/e2e/templeton/deployers/env.sh 
b/hcatalog/src/test/e2e/templeton/deployers/env.sh
index a9cc2d7..8b719f2 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/env.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/env.sh
@@ -22,6 +22,7 @@
 
 # define necessary env vars here and source it in other files
 
+#todo: most of these variables are defined in pom.xml - see this can be 
integrated
 echo ${HADOOP_VERSION};
 
 if [ -z ${HADOOP_VERSION} ]; then

http://git-wip-us.apache.org/repos/asf/hive/blob/eefb0718/hcatalog/webhcat/svr/pom.xml
--
diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml
index 54b8c98..5b6696e 100644
--- a/hcatalog/webhcat/svr/pom.xml
+++ b/hcatalog/webhcat/svr/pom.xml
@@ -151,12 +151,13 @@
   /profiles
 
   build
-  resources
-  resource
-  targetPath./targetPath
-  directorysrc/main/config/directory
-  /resource
-  /resources
+resources
+  resource
+targetPath./targetPath
+directorysrc/main/config/directory
+filteringtrue/filtering
+  /resource
+/resources
 plugins
   plugin
 groupIdorg.apache.maven.plugins/groupId

http://git-wip-us.apache.org/repos/asf/hive/blob/eefb0718/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
--
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
index dc6521a..801f3a5 100644
--- a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
+++ b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
@@ -39,7 +39,7 @@
 
   property
 nametempleton.libjars/name
-
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar/value
+
value${env.TEMPLETON_HOME}/../lib/zookeeper-${zookeeper.version}.jar,${env.TEMPLETON_HOME}/../lib/hive-common-${project.version}.jar/value
 descriptionJars to add to the classpath./description
   /property
 
@@ -87,7 +87,7 @@
 
   property
 nametempleton.pig.path/name
-valuepig-0.11.1.tar.gz/pig-0.11.1/bin/pig/value
+

hive git commit: HIVE-10521 - TxnHandler.timeOutTxns only times out some of the expired transactions (Alan Gates via Eugene Koifman)

2015-05-06 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 80fb89131 - 4b444082f


HIVE-10521 - TxnHandler.timeOutTxns only times out some of the expired 
transactions (Alan Gates via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4b444082
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4b444082
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4b444082

Branch: refs/heads/master
Commit: 4b444082fcae9eb8ea60ec160723a0337ead1852
Parents: 80fb891
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed May 6 19:36:48 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed May 6 19:36:48 2015 -0700

--
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 35 --
 .../hive/metastore/txn/TestTxnHandler.java  | 39 +++-
 2 files changed, 53 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/4b444082/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 704c3ed..7c3b55c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -75,6 +75,7 @@ public class TxnHandler {
   static final protected char LOCK_SEMI_SHARED = 'w';
 
   static final private int ALLOWED_REPEATED_DEADLOCKS = 10;
+  static final private int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 100;
   static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
 
   static private DataSource connPool;
@@ -130,7 +131,8 @@ public class TxnHandler {
 timeout = HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 
TimeUnit.MILLISECONDS);
 deadlockCnt = 0;
 buildJumpTable();
-retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL, TimeUnit.MILLISECONDS);
+retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL,
+TimeUnit.MILLISECONDS);
 retryLimit = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HMSHANDLERATTEMPTS);
 deadlockRetryInterval = retryInterval / 10;
 
@@ -334,9 +336,7 @@ public class TxnHandler {
   Connection dbConn = null;
   try {
 dbConn = getDbConn(Connection.TRANSACTION_SERIALIZABLE);
-ListLong txnids = new ArrayListLong(1);
-txnids.add(txnid);
-if (abortTxns(dbConn, txnids) != 1) {
+if (abortTxns(dbConn, Collections.singletonList(txnid)) != 1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
   throw new NoSuchTxnException(No such transaction:  + txnid);
@@ -1321,8 +1321,6 @@ public class TxnHandler {
   LOG.debug(Going to execute update  + buf.toString() + );
   updateCnt = stmt.executeUpdate(buf.toString());
 
-  LOG.debug(Going to commit);
-  dbConn.commit();
 } finally {
   closeStmt(stmt);
 }
@@ -1818,10 +1816,10 @@ public class TxnHandler {
 }
   }
 
-  // Abort timed out transactions.  This calls abortTxn(), which does a commit,
+  // Abort timed out transactions.  This does a commit,
   // and thus should be done before any calls to heartbeat that will leave
   // open transactions on the underlying database.
-  private void timeOutTxns(Connection dbConn) throws SQLException, 
MetaException {
+  private void timeOutTxns(Connection dbConn) throws SQLException, 
MetaException, RetryException {
 long now = getDbTime(dbConn);
 Statement stmt = null;
 try {
@@ -1834,10 +1832,23 @@ public class TxnHandler {
   ListLong deadTxns = new ArrayListLong();
   // Limit the number of timed out transactions we do in one pass to keep 
from generating a
   // huge delete statement
-  for (int i = 0; i  20  rs.next(); i++) deadTxns.add(rs.getLong(1));
-  // We don't care whether all of the transactions get deleted or not,
-  // if some didn't it most likely means someone else deleted them in the 
interum
-  if (deadTxns.size()  0) abortTxns(dbConn, deadTxns);
+  do {
+deadTxns.clear();
+for (int i = 0; i   TIMED_OUT_TXN_ABORT_BATCH_SIZE  rs.next(); i++) 
{
+  deadTxns.add(rs.getLong(1));
+}
+// We don't care whether all of the transactions get deleted or not,
+// if some didn't it most likely means someone else deleted them in 
the interum
+if (deadTxns.size()  0) abortTxns(dbConn, deadTxns);
+  } while (deadTxns.size()  0);
+  LOG.debug(Going to commit);
+  dbConn.commit();
+} catch (SQLException e) {
+  LOG.debug(Going to rollback);
+  

hive git commit: HIVE-10521 - TxnHandler.timeOutTxns only times out some of the expired transactions (Alan Gates via Eugene Koifman)

2015-05-06 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 7a8eb62db - 0e380c71c


HIVE-10521 - TxnHandler.timeOutTxns only times out some of the expired 
transactions (Alan Gates via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0e380c71
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0e380c71
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0e380c71

Branch: refs/heads/branch-1.2
Commit: 0e380c71c42b4ea9d26c5898caffdf151c5c422f
Parents: 7a8eb62
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed May 6 19:30:47 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed May 6 19:30:47 2015 -0700

--
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 35 --
 .../hive/metastore/txn/TestTxnHandler.java  | 39 +++-
 2 files changed, 53 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0e380c71/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index 704c3ed..7c3b55c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -75,6 +75,7 @@ public class TxnHandler {
   static final protected char LOCK_SEMI_SHARED = 'w';
 
   static final private int ALLOWED_REPEATED_DEADLOCKS = 10;
+  static final private int TIMED_OUT_TXN_ABORT_BATCH_SIZE = 100;
   static final private Log LOG = LogFactory.getLog(TxnHandler.class.getName());
 
   static private DataSource connPool;
@@ -130,7 +131,8 @@ public class TxnHandler {
 timeout = HiveConf.getTimeVar(conf, HiveConf.ConfVars.HIVE_TXN_TIMEOUT, 
TimeUnit.MILLISECONDS);
 deadlockCnt = 0;
 buildJumpTable();
-retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL, TimeUnit.MILLISECONDS);
+retryInterval = HiveConf.getTimeVar(conf, 
HiveConf.ConfVars.HMSHANDLERINTERVAL,
+TimeUnit.MILLISECONDS);
 retryLimit = HiveConf.getIntVar(conf, 
HiveConf.ConfVars.HMSHANDLERATTEMPTS);
 deadlockRetryInterval = retryInterval / 10;
 
@@ -334,9 +336,7 @@ public class TxnHandler {
   Connection dbConn = null;
   try {
 dbConn = getDbConn(Connection.TRANSACTION_SERIALIZABLE);
-ListLong txnids = new ArrayListLong(1);
-txnids.add(txnid);
-if (abortTxns(dbConn, txnids) != 1) {
+if (abortTxns(dbConn, Collections.singletonList(txnid)) != 1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
   throw new NoSuchTxnException(No such transaction:  + txnid);
@@ -1321,8 +1321,6 @@ public class TxnHandler {
   LOG.debug(Going to execute update  + buf.toString() + );
   updateCnt = stmt.executeUpdate(buf.toString());
 
-  LOG.debug(Going to commit);
-  dbConn.commit();
 } finally {
   closeStmt(stmt);
 }
@@ -1818,10 +1816,10 @@ public class TxnHandler {
 }
   }
 
-  // Abort timed out transactions.  This calls abortTxn(), which does a commit,
+  // Abort timed out transactions.  This does a commit,
   // and thus should be done before any calls to heartbeat that will leave
   // open transactions on the underlying database.
-  private void timeOutTxns(Connection dbConn) throws SQLException, 
MetaException {
+  private void timeOutTxns(Connection dbConn) throws SQLException, 
MetaException, RetryException {
 long now = getDbTime(dbConn);
 Statement stmt = null;
 try {
@@ -1834,10 +1832,23 @@ public class TxnHandler {
   ListLong deadTxns = new ArrayListLong();
   // Limit the number of timed out transactions we do in one pass to keep 
from generating a
   // huge delete statement
-  for (int i = 0; i  20  rs.next(); i++) deadTxns.add(rs.getLong(1));
-  // We don't care whether all of the transactions get deleted or not,
-  // if some didn't it most likely means someone else deleted them in the 
interum
-  if (deadTxns.size()  0) abortTxns(dbConn, deadTxns);
+  do {
+deadTxns.clear();
+for (int i = 0; i   TIMED_OUT_TXN_ABORT_BATCH_SIZE  rs.next(); i++) 
{
+  deadTxns.add(rs.getLong(1));
+}
+// We don't care whether all of the transactions get deleted or not,
+// if some didn't it most likely means someone else deleted them in 
the interum
+if (deadTxns.size()  0) abortTxns(dbConn, deadTxns);
+  } while (deadTxns.size()  0);
+  LOG.debug(Going to commit);
+  dbConn.commit();
+} catch (SQLException e) {
+  LOG.debug(Going to rollback);
+  

hive git commit: HIVE-10423 - HIVE-7948 breaks deploy_e2e_artifacts.sh (Aswathy Chellammal Sreekumar via Eugene Koifman)

2015-05-04 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master c42666ed5 - ce736af2a


HIVE-10423 - HIVE-7948 breaks deploy_e2e_artifacts.sh (Aswathy Chellammal 
Sreekumar via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ce736af2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ce736af2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ce736af2

Branch: refs/heads/master
Commit: ce736af2a5025a4bb07b39362b064bd64aecdeef
Parents: c42666e
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Mon May 4 18:58:41 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Mon May 4 18:58:41 2015 -0700

--
 hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ce736af2/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
--
diff --git a/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
b/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
index f446424..b834075 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
@@ -25,6 +25,9 @@ source ./env.sh
 
 echo Deploying artifacts to HDFS...
 
+rm -rf movielens-data
+mkdir -p movielens-data
+cd movielens-data
 curl -O http://files.grouplens.org/datasets/movielens/ml-1m.zip
 unzip ml-1m.zip
 mv ml-1m/ratings.dat 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/inpdir/ratings.txt



hive git commit: HIVE-10630:Renaming tables across encryption zones renames table even though the operation throws error (Eugene Koifman, reviewed by Sergio Pena and Ashutosh Chauhan)

2015-05-14 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 05a3d2ab7 - bc0138c43


HIVE-10630:Renaming tables across encryption zones renames table even though 
the operation throws error (Eugene Koifman, reviewed by Sergio Pena and 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bc0138c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bc0138c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bc0138c4

Branch: refs/heads/master
Commit: bc0138c436add2335d2045b6c7bf86bc6a15cc27
Parents: 05a3d2a
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 14 10:29:00 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 14 10:29:00 2015 -0700

--
 .../test/resources/testconfiguration.properties |  3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 10 -
 .../hadoop/hive/metastore/HiveAlterHandler.java | 28 ++---
 .../hadoop/hive/metastore/ObjectStore.java  |  2 +-
 .../clientpositive/encryption_move_tbl.q| 18 
 .../encrypted/encryption_move_tbl.q.out | 43 
 6 files changed, 96 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bc0138c4/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index eeb46cc..f9c9351 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -337,7 +337,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_select_read_only_encrypted_tbl.q,\
   encryption_select_read_only_unencrypted_tbl.q,\
   encryption_load_data_to_encrypted_tables.q, \
-  encryption_unencrypted_nonhdfs_external_tables.q
+  encryption_unencrypted_nonhdfs_external_tables.q \
+  encryption_move_tbl.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/bc0138c4/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 3e29d3c..f357c14 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1029,7 +1029,7 @@ public class QTestUtil {
 rc = cliDriver.processLine(command);
   }
 
-  if (rc != 0) {
+  if (rc != 0  !ignoreErrors()) {
 break;
   }
   command = ;
@@ -1040,6 +1040,14 @@ public class QTestUtil {
 return rc;
   }
 
+  /**
+   * This allows a .q file to continue executing after a statement runs into 
an error which is convenient
+   * if you want to use another hive cmd after the failure to sanity check the 
state of the system.
+   */
+  private boolean ignoreErrors() {
+return conf.getBoolVar(HiveConf.ConfVars.CLIIGNOREERRORS);
+  }
+
   private boolean isHiveCommand(String command) {
 String[] cmd = command.trim().split(\\s+);
 if (HiveCommand.find(cmd) != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/bc0138c4/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 50ec1e8..a3f2359 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -45,6 +45,7 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hive.common.util.HiveStringUtils;
 
 import com.google.common.collect.Lists;
@@ -260,17 +261,18 @@ public class HiveAlterHandler implements AlterHandler {
 // rename the src to destination
 try {
   if (srcFs.exists(srcPath)  !srcFs.rename(srcPath, destPath)) {
-throw new IOException(Renaming  + srcPath +  to  + destPath + 
 is failed);
+throw new IOException(Renaming  + srcPath +  to  + destPath + 
 failed);
   }
 } catch (IOException e) {
+  LOG.error(Alter Table operation for  + dbname + . + name +  
failed., e);
 

svn commit: r1674120 - in /hive/trunk: hcatalog/src/test/e2e/templeton/deployers/config/hive/ ql/src/test/org/apache/hadoop/hive/ql/lockmgr/

2015-04-16 Thread ekoifman
Author: ekoifman
Date: Thu Apr 16 17:59:33 2015
New Revision: 1674120

URL: http://svn.apache.org/r1674120
Log:
HIVE-10242 ACID: insert overwrite prevents create table command (Eugene 
Koifman, reviewed by Alan Gates)

Added:

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties

hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager2.java

Added: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties?rev=1674120view=auto
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
 (added)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
 Thu Apr 16 17:59:33 2015
@@ -0,0 +1,88 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# License); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Define some default values that can be overridden by system properties
+hive.log.threshold=ALL
+hive.root.logger=DEBUG,DRFA
+hive.log.dir=/tmp/ekoifman
+hive.log.file=hive.log
+
+# Define the root logger to the system property hadoop.root.logger.
+log4j.rootLogger=${hive.root.logger}, EventCounter
+
+# Logging Threshold
+log4j.threshold=${hive.log.threshold}
+
+#
+# Daily Rolling File Appender
+#
+# Use the PidDailyerRollingFileAppend class instead if you want to use 
separate log files
+# for different CLI session.
+#
+# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender
+
+log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender
+
+log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file}
+
+# Rollver at midnight
+log4j.appender.DRFA.DatePattern=.-MM-dd
+
+# 30-day backup
+#log4j.appender.DRFA.MaxBackupIndex=30
+log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout
+
+# Pattern format: Date LogLevel LoggerName LogMessage
+#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
+# Debugging Pattern format
+log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} 
(%F:%M(%L)) - %m%n
+
+
+#
+# console
+# Add console to rootlogger above if you want to use this
+#
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p 
%c{2}: %m%n
+log4j.appender.console.encoding=UTF-8
+
+#custom logging levels
+#log4j.logger.xxx=DEBUG
+
+#
+# Event Counter Appender
+# Sends counts of logging messages at different severity levels to Hadoop 
Metrics.
+#
+log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter
+
+
+log4j.category.DataNucleus=ERROR,DRFA
+log4j.category.Datastore=ERROR,DRFA
+log4j.category.Datastore.Schema=ERROR,DRFA
+log4j.category.JPOX.Datastore=ERROR,DRFA
+log4j.category.JPOX.Plugin=ERROR,DRFA
+log4j.category.JPOX.MetaData=ERROR,DRFA
+log4j.category.JPOX.Query=ERROR,DRFA
+log4j.category.JPOX.General=ERROR,DRFA
+log4j.category.JPOX.Enhancer=ERROR,DRFA
+
+
+# Silence useless ZK logs
+log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA
+log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA

Added: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml?rev=1674120view=auto
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
 (added)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
 Thu Apr 16 17:59:33 2015
@@ -0,0 +1,77 @@
+?xml version=1.0?
+?xml-stylesheet type=text/xsl href=configuration.xsl?
+!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding

svn commit: r1674119 - in /hive/trunk: hcatalog/src/test/e2e/templeton/deployers/ metastore/src/java/org/apache/hadoop/hive/metastore/txn/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache

2015-04-16 Thread ekoifman
Author: ekoifman
Date: Thu Apr 16 17:58:38 2015
New Revision: 1674119

URL: http://svn.apache.org/r1674119
Log:
HIVE-10242 ACID: insert overwrite prevents create table command (Eugene 
Koifman, reviewed by Alan Gates)

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbLockManager.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/DbTxnManager.java

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1674119r1=1674118r2=1674119view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh Thu Apr 16 
17:58:38 2015
@@ -50,6 +50,11 @@ if [ -z ${HADOOP_HOME} ]; then
   export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
 fi
 
+if [ -z ${MYSQL_CLIENT_JAR} ]; then
+  #if using MySQL backed metastore
+  export 
MYSQL_CLIENT_JAR=/Users/${USER}/dev/mysql-connector-java-5.1.30/mysql-connector-java-5.1.30-bin.jar
+fi
+
 export TEZ_CLIENT_HOME=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
 #Make sure Pig is built for the Hadoop version you are running
 export PIG_TAR_PATH=/Users/${USER}/dev/pig-${PIG_VERSION}-src/build

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh?rev=1674119r1=1674118r2=1674119view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh 
Thu Apr 16 17:58:38 2015
@@ -25,10 +25,17 @@
 source ./env.sh
 
 #decide which DB to run against
+#Derby
 cp 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
 ${HIVE_HOME}/conf/hive-site.xml
+#cp 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
 ${HIVE_HOME}/conf/hive-site.xml
 #cp 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml
 ${HIVE_HOME}/conf/hive-site.xml
 
 cp 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
 ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml
+cp 
${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties
 ${HIVE_HOME}/conf/hive-log4j.properties
+
+if [ -f ${MYSQL_CLIENT_JAR} ]; then
+  cp ${MYSQL_CLIENT_JAR} ${HIVE_HOME}/lib
+fi
 
 if [ -d ${WEBHCAT_LOG_DIR} ]; then
   rm -Rf ${WEBHCAT_LOG_DIR};

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1674119r1=1674118r2=1674119view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Thu Apr 16 17:58:38 2015
@@ -533,18 +533,29 @@ public class TxnHandler {
 }
   }
 
+  /**
+   * used to sort entries in {@link 
org.apache.hadoop.hive.metastore.api.ShowLocksResponse}
+   */
+  private static class LockInfoExt extends LockInfo {
+private final ShowLocksResponseElement e;
+LockInfoExt(ShowLocksResponseElement e, long intLockId) {
+  super(e, intLockId);
+  this.e = e;
+}
+  }
   public ShowLocksResponse showLocks(ShowLocksRequest rqst) throws 
MetaException {
 try {
   Connection dbConn = null;
   ShowLocksResponse rsp = new ShowLocksResponse();
   ListShowLocksResponseElement elems = new 
ArrayListShowLocksResponseElement();
+  ListLockInfoExt sortedList = new ArrayListLockInfoExt();
   Statement stmt = null;
   try {
 dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
 stmt = dbConn.createStatement();
 
 String s = select hl_lock_ext_id, hl_txnid, hl_db, hl_table, 
hl_partition, hl_lock_state,  +
-  hl_lock_type, hl_last_heartbeat, hl_acquired_at, hl_user, hl_host 
from HIVE_LOCKS;
+  hl_lock_type, hl_last_heartbeat, hl_acquired_at, hl_user, hl_host, 
hl_lock_int_id from HIVE_LOCKS;
 LOG.debug(Doing to execute query  + s + );
 ResultSet rs

hive git commit: HIVE-10629 Dropping table in an encrypted zone does not drop warehouse directory(Eugene Koifman, reviewed by Sergio Pena)

2015-05-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master fc05556ce - 8d3108c41


HIVE-10629 Dropping table in an encrypted zone does not drop warehouse 
directory(Eugene Koifman, reviewed by Sergio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8d3108c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8d3108c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8d3108c4

Branch: refs/heads/master
Commit: 8d3108c41802d7d1766c0d85bc81e895ad348dfb
Parents: fc05556
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed May 20 10:05:31 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed May 20 10:05:31 2015 -0700

--
 data/scripts/q_test_cleanup_for_encryption.sql  |  2 +-
 data/scripts/q_test_init_for_encryption.sql |  2 +-
 .../test/resources/testconfiguration.properties |  3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java|  3 +-
 .../hadoop/hive/metastore/HiveMetaStore.java| 36 +
 .../clientpositive/encryption_drop_table.q  | 18 +++
 .../encryption_insert_partition_dynamic.q   |  8 +--
 .../encryption_insert_partition_static.q|  8 +--
 .../encryption_join_unencrypted_tbl.q   |  4 +-
 ...yption_join_with_different_encryption_keys.q |  8 +--
 .../encryption_load_data_to_encrypted_tables.q  |  4 +-
 .../clientpositive/encryption_move_tbl.q|  4 +-
 .../encryption_select_read_only_encrypted_tbl.q |  4 +-
 .../encrypted/encryption_drop_table.q.out   | 56 
 .../encryption_insert_partition_dynamic.q.out   | 16 +++---
 .../encryption_insert_partition_static.q.out| 30 +--
 .../encryption_join_unencrypted_tbl.q.out   |  8 +--
 ...on_join_with_different_encryption_keys.q.out | 16 +++---
 ...cryption_load_data_to_encrypted_tables.q.out |  8 +--
 .../encrypted/encryption_move_tbl.q.out | 12 -
 ...ryption_select_read_only_encrypted_tbl.q.out |  8 +--
 .../apache/hadoop/hive/shims/Hadoop23Shims.java |  3 ++
 22 files changed, 184 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8d3108c4/data/scripts/q_test_cleanup_for_encryption.sql
--
diff --git a/data/scripts/q_test_cleanup_for_encryption.sql 
b/data/scripts/q_test_cleanup_for_encryption.sql
index 08264ee..070cdbe 100644
--- a/data/scripts/q_test_cleanup_for_encryption.sql
+++ b/data/scripts/q_test_cleanup_for_encryption.sql
@@ -1 +1 @@
-DROP TABLE IF EXISTS src;
+DROP TABLE IF EXISTS src PURGE;

http://git-wip-us.apache.org/repos/asf/hive/blob/8d3108c4/data/scripts/q_test_init_for_encryption.sql
--
diff --git a/data/scripts/q_test_init_for_encryption.sql 
b/data/scripts/q_test_init_for_encryption.sql
index 1822ebb..56b44e0 100644
--- a/data/scripts/q_test_init_for_encryption.sql
+++ b/data/scripts/q_test_init_for_encryption.sql
@@ -1,4 +1,4 @@
-DROP TABLE IF EXISTS src;
+DROP TABLE IF EXISTS src PURGE;
 
 CREATE TABLE src(key STRING COMMENT 'default', value STRING COMMENT 'default') 
STORED AS TEXTFILE;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8d3108c4/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index a485408..b9d85f6 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -349,7 +349,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_select_read_only_unencrypted_tbl.q,\
   encryption_load_data_to_encrypted_tables.q, \
   encryption_unencrypted_nonhdfs_external_tables.q \
-  encryption_move_tbl.q
+  encryption_move_tbl.q \
+  encryption_drop_table.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/8d3108c4/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 2b4bd85..39d5d9e 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -372,6 +372,7 @@ public class QTestUtil {
 // Set the security key provider so that the MiniDFS cluster is 
initialized
 // with encryption
 conf.set(SECURITY_KEY_PROVIDER_URI_NAME, getKeyProviderURI());
+conf.setInt(fs.trash.interval, 50);
 
 dfs = 

hive git commit: HIVE-11066 Ensure tests don't share directories on FS

2015-06-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 ad308db50 - 85676d5dd


HIVE-11066 Ensure tests don't share directories on FS


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/85676d5d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/85676d5d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/85676d5d

Branch: refs/heads/branch-1.2
Commit: 85676d5dd05d42fdadf36bcc47f8e27fc79d3aec
Parents: ad308db
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Jun 20 17:49:24 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Jun 20 17:49:24 2015 -0700

--
 .../hcatalog/pig/TestHCatLoaderEncryption.java| 18 ++
 .../hive/ql/txn/compactor/TestCompactor.java  | 11 +++
 2 files changed, 21 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/85676d5d/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index da8deb5..3b8076b 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -28,7 +28,9 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Random;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.io.FileUtils;
 
@@ -84,12 +86,13 @@ import static org.junit.Assume.assumeTrue;
 
 @RunWith(Parameterized.class)
 public class TestHCatLoaderEncryption {
+  private static final AtomicInteger salt = new AtomicInteger(new 
Random().nextInt());
   private static final Logger LOG = 
LoggerFactory.getLogger(TestHCatLoader.class);
-  private static final String TEST_DATA_DIR = 
HCatUtil.makePathASafeFileName(System.getProperty
+  private final String TEST_DATA_DIR = 
HCatUtil.makePathASafeFileName(System.getProperty
   (java.io.tmpdir) + File.separator + 
TestHCatLoader.class.getCanonicalName() + - +
-  System.currentTimeMillis());
-  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
/warehouse;
-  private static final String BASIC_FILE_NAME = TEST_DATA_DIR + 
/basic.input.data;
+  System.currentTimeMillis() + _ + salt.getAndIncrement());
+  private final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + /warehouse;
+  private final String BASIC_FILE_NAME = TEST_DATA_DIR + /basic.input.data;
   private static final String BASIC_TABLE = junit_unparted_basic;
   private static final String ENCRYPTED_TABLE = encrypted_table;
   private static final String SECURITY_KEY_PROVIDER_URI_NAME = 
dfs.encryption.key.provider.uri;
@@ -180,6 +183,13 @@ public class TestHCatLoaderEncryption {
 hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, false);
 hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, 
TEST_WAREHOUSE_DIR);
 
+String s = hiveConf.get(hdfs.minidfs.basedir);
+if(s == null || s.length() = 0) {
+  //return System.getProperty(test.build.data, build/test/data) + 
/dfs/;
+  hiveConf.set(hdfs.minidfs.basedir, 
+System.getProperty(test.build.data, build/test/data) + _ + 
System.currentTimeMillis() +
+  _ + salt.getAndIncrement() + /dfs/);
+}
 if (Shell.WINDOWS) {
   WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/85676d5d/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index c682c3e..abca1ce 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -54,18 +54,21 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.Random;
 import java.util.SortedSet;
 import java.util.TreeSet;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  */
 public class TestCompactor {
+  private static final AtomicInteger salt = new AtomicInteger(new 
Random().nextInt());
   private 

hive git commit: HIVE-11066 Ensure tests don't share directories on FS

2015-06-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master e09e14a48 - 84d38ef0a


HIVE-11066 Ensure tests don't share directories on FS


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84d38ef0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84d38ef0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84d38ef0

Branch: refs/heads/master
Commit: 84d38ef0a5aac97d44c99dd252dfc8c0c58bfd12
Parents: e09e14a
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Jun 20 17:10:03 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Jun 20 17:11:21 2015 -0700

--
 .../hcatalog/pig/TestHCatLoaderEncryption.java| 18 ++
 .../hive/ql/txn/compactor/TestCompactor.java  | 11 +++
 2 files changed, 21 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/84d38ef0/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
--
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index da8deb5..3b8076b 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -28,7 +28,9 @@ import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
+import java.util.Random;
 import java.util.Set;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.commons.io.FileUtils;
 
@@ -84,12 +86,13 @@ import static org.junit.Assume.assumeTrue;
 
 @RunWith(Parameterized.class)
 public class TestHCatLoaderEncryption {
+  private static final AtomicInteger salt = new AtomicInteger(new 
Random().nextInt());
   private static final Logger LOG = 
LoggerFactory.getLogger(TestHCatLoader.class);
-  private static final String TEST_DATA_DIR = 
HCatUtil.makePathASafeFileName(System.getProperty
+  private final String TEST_DATA_DIR = 
HCatUtil.makePathASafeFileName(System.getProperty
   (java.io.tmpdir) + File.separator + 
TestHCatLoader.class.getCanonicalName() + - +
-  System.currentTimeMillis());
-  private static final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + 
/warehouse;
-  private static final String BASIC_FILE_NAME = TEST_DATA_DIR + 
/basic.input.data;
+  System.currentTimeMillis() + _ + salt.getAndIncrement());
+  private final String TEST_WAREHOUSE_DIR = TEST_DATA_DIR + /warehouse;
+  private final String BASIC_FILE_NAME = TEST_DATA_DIR + /basic.input.data;
   private static final String BASIC_TABLE = junit_unparted_basic;
   private static final String ENCRYPTED_TABLE = encrypted_table;
   private static final String SECURITY_KEY_PROVIDER_URI_NAME = 
dfs.encryption.key.provider.uri;
@@ -180,6 +183,13 @@ public class TestHCatLoaderEncryption {
 hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, false);
 hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, 
TEST_WAREHOUSE_DIR);
 
+String s = hiveConf.get(hdfs.minidfs.basedir);
+if(s == null || s.length() = 0) {
+  //return System.getProperty(test.build.data, build/test/data) + 
/dfs/;
+  hiveConf.set(hdfs.minidfs.basedir, 
+System.getProperty(test.build.data, build/test/data) + _ + 
System.currentTimeMillis() +
+  _ + salt.getAndIncrement() + /dfs/);
+}
 if (Shell.WINDOWS) {
   WindowsPathUtil.convertPathsFromWindowsToHdfs(hiveConf);
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/84d38ef0/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
index c682c3e..abca1ce 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/txn/compactor/TestCompactor.java
@@ -54,18 +54,21 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
+import java.util.Random;
 import java.util.SortedSet;
 import java.util.TreeSet;
 import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
 
 /**
  */
 public class TestCompactor {
+  private static final AtomicInteger salt = new AtomicInteger(new 
Random().nextInt());
   private static 

hive git commit: HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan Gates)

2015-06-17 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master a3792b7b7 - 37e82baaf


HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/37e82baa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/37e82baa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/37e82baa

Branch: refs/heads/master
Commit: 37e82baafbdf171757cd07939ee9745e4c3b5adb
Parents: a3792b7
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed Jun 17 13:05:10 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed Jun 17 13:05:10 2015 -0700

--
 .../hive/metastore/txn/CompactionInfo.java  |  9 +
 .../metastore/txn/CompactionTxnHandler.java | 19 +--
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 36 +++-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |  4 +--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  2 +-
 5 files changed, 41 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/37e82baa/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
index 939df3f..1dae7b9 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
@@ -73,4 +73,13 @@ public class CompactionInfo implements 
ComparableCompactionInfo {
   public int compareTo(CompactionInfo o) {
 return getFullPartitionName().compareTo(o.getFullPartitionName());
   }
+  public String toString() {
+return id: + id + , +
+  dbname: + dbname + , +
+  tableName: + tableName + , +
+  partName: + partName + , +
+  type: + type + , +
+  runAs: + runAs + , +
+  tooManyAborts: + tooManyAborts;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/37e82baa/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
index 26e72be..328a65c 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
@@ -95,7 +95,7 @@ public class CompactionTxnHandler extends TxnHandler {
 dbConn.rollback();
   } catch (SQLException e) {
 LOG.error(Unable to connect to transaction database  + 
e.getMessage());
-checkRetryable(dbConn, e, findPotentialCompactions);
+checkRetryable(dbConn, e, findPotentialCompactions(maxAborted: + 
maxAborted + ));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -133,7 +133,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue,  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, setRunAs);
+checkRetryable(dbConn, e, setRunAs(cq_id: + cq_id + ,user: + user 
+));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -194,7 +194,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to select next element for compaction,  + 
e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, findNextToCompact);
+checkRetryable(dbConn, e, findNextToCompact(workerId: + workerId + 
));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -232,7 +232,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCompacted);
+checkRetryable(dbConn, e, markCompacted( + info + ));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -374,7 +374,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to delete from compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCleaned);
+

hive git commit: HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan Gates)

2015-06-17 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 6675a7332 - cae46469b


HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cae46469
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cae46469
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cae46469

Branch: refs/heads/branch-1
Commit: cae46469bec75389849f44835136cd22d527f269
Parents: 6675a73
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed Jun 17 13:19:20 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed Jun 17 13:19:20 2015 -0700

--
 .../hive/metastore/txn/CompactionInfo.java  |  9 +
 .../metastore/txn/CompactionTxnHandler.java | 19 +--
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 36 +++-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |  4 +--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  2 +-
 5 files changed, 41 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cae46469/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
index 939df3f..1dae7b9 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
@@ -73,4 +73,13 @@ public class CompactionInfo implements 
ComparableCompactionInfo {
   public int compareTo(CompactionInfo o) {
 return getFullPartitionName().compareTo(o.getFullPartitionName());
   }
+  public String toString() {
+return id: + id + , +
+  dbname: + dbname + , +
+  tableName: + tableName + , +
+  partName: + partName + , +
+  type: + type + , +
+  runAs: + runAs + , +
+  tooManyAborts: + tooManyAborts;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/cae46469/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
index 26e72be..328a65c 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
@@ -95,7 +95,7 @@ public class CompactionTxnHandler extends TxnHandler {
 dbConn.rollback();
   } catch (SQLException e) {
 LOG.error(Unable to connect to transaction database  + 
e.getMessage());
-checkRetryable(dbConn, e, findPotentialCompactions);
+checkRetryable(dbConn, e, findPotentialCompactions(maxAborted: + 
maxAborted + ));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -133,7 +133,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue,  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, setRunAs);
+checkRetryable(dbConn, e, setRunAs(cq_id: + cq_id + ,user: + user 
+));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -194,7 +194,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to select next element for compaction,  + 
e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, findNextToCompact);
+checkRetryable(dbConn, e, findNextToCompact(workerId: + workerId + 
));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -232,7 +232,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCompacted);
+checkRetryable(dbConn, e, markCompacted( + info + ));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -374,7 +374,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to delete from compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCleaned);
+

hive git commit: HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan Gates)

2015-06-17 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 70108e115 - caf4ecc7b


HIVE-11006 improve logging wrt ACID module (Eugene Koifman, reviewed by Alan 
Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/caf4ecc7
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/caf4ecc7
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/caf4ecc7

Branch: refs/heads/branch-1.2
Commit: caf4ecc7b00bd053ea5e5af9d1bb5c6d6898032d
Parents: 70108e1
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed Jun 17 13:11:33 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed Jun 17 13:11:33 2015 -0700

--
 .../hive/metastore/txn/CompactionInfo.java  |  9 +
 .../metastore/txn/CompactionTxnHandler.java | 19 +--
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 36 +++-
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   |  4 +--
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  2 +-
 5 files changed, 41 insertions(+), 29 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/caf4ecc7/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
index 939df3f..1dae7b9 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionInfo.java
@@ -73,4 +73,13 @@ public class CompactionInfo implements 
ComparableCompactionInfo {
   public int compareTo(CompactionInfo o) {
 return getFullPartitionName().compareTo(o.getFullPartitionName());
   }
+  public String toString() {
+return id: + id + , +
+  dbname: + dbname + , +
+  tableName: + tableName + , +
+  partName: + partName + , +
+  type: + type + , +
+  runAs: + runAs + , +
+  tooManyAborts: + tooManyAborts;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/caf4ecc7/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
index 52147bc..141cf3d 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/CompactionTxnHandler.java
@@ -95,7 +95,7 @@ public class CompactionTxnHandler extends TxnHandler {
 dbConn.rollback();
   } catch (SQLException e) {
 LOG.error(Unable to connect to transaction database  + 
e.getMessage());
-checkRetryable(dbConn, e, findPotentialCompactions);
+checkRetryable(dbConn, e, findPotentialCompactions(maxAborted: + 
maxAborted + ));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -133,7 +133,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue,  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, setRunAs);
+checkRetryable(dbConn, e, setRunAs(cq_id: + cq_id + ,user: + user 
+));
   } finally {
 closeDbConn(dbConn);
 closeStmt(stmt);
@@ -194,7 +194,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to select next element for compaction,  + 
e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, findNextToCompact);
+checkRetryable(dbConn, e, findNextToCompact(workerId: + workerId + 
));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -232,7 +232,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to update compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCompacted);
+checkRetryable(dbConn, e, markCompacted( + info + ));
 throw new MetaException(Unable to connect to transaction database  +
   StringUtils.stringifyException(e));
   } finally {
@@ -374,7 +374,7 @@ public class CompactionTxnHandler extends TxnHandler {
 LOG.error(Unable to delete from compaction queue  + e.getMessage());
 LOG.debug(Going to rollback);
 rollbackDBConn(dbConn);
-checkRetryable(dbConn, e, markCleaned);
+

hive git commit: HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not enabled

2015-06-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 494ab8db5 - cb705ef40


HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not 
enabled


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cb705ef4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cb705ef4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cb705ef4

Branch: refs/heads/branch-1
Commit: cb705ef40475043f16891b8355b07038fd5d8d07
Parents: 494ab8d
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Jun 13 10:35:57 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Jun 13 10:35:57 2015 -0700

--
 .../hive/hcatalog/templeton/tool/TempletonControllerJob.java  | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/cb705ef4/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index 5c7de80..f47feeb 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -133,9 +133,10 @@ public class TempletonControllerJob extends Configured 
implements Tool, JobSubmi
 
 JobClient jc = new JobClient(new JobConf(job.getConfiguration()));
 
-TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new Text(mr 
token));
-job.getCredentials().addToken(new Text(mr token), mrdt);
-
+if(UserGroupInformation.isSecurityEnabled()) {
+  TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new 
Text(mr token));
+  job.getCredentials().addToken(new Text(mr token), mrdt);
+}
 String metastoreTokenStrForm = addHMSToken(job, user);
 
 job.submit();



hive git commit: HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not enabled

2015-06-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 5fd11679a - 051bf0b17


HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not 
enabled


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/051bf0b1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/051bf0b1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/051bf0b1

Branch: refs/heads/master
Commit: 051bf0b1785262e29a269b76c8aa798e8e85117c
Parents: 5fd1167
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Jun 13 10:21:02 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Jun 13 10:21:02 2015 -0700

--
 .../hive/hcatalog/templeton/tool/TempletonControllerJob.java  | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/051bf0b1/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index 5c7de80..f47feeb 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -133,9 +133,10 @@ public class TempletonControllerJob extends Configured 
implements Tool, JobSubmi
 
 JobClient jc = new JobClient(new JobConf(job.getConfiguration()));
 
-TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new Text(mr 
token));
-job.getCredentials().addToken(new Text(mr token), mrdt);
-
+if(UserGroupInformation.isSecurityEnabled()) {
+  TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new 
Text(mr token));
+  job.getCredentials().addToken(new Text(mr token), mrdt);
+}
 String metastoreTokenStrForm = addHMSToken(job, user);
 
 job.submit();



hive git commit: HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not enabled

2015-06-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 0a90c2bef - 2b340b0f9


HIVE-10992:WebHCat should not create delegation tokens when Kerberos is not 
enabled


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2b340b0f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2b340b0f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2b340b0f

Branch: refs/heads/branch-1.2
Commit: 2b340b0f97d62529ebe4b345551014bb061856d9
Parents: 0a90c2b
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Jun 13 10:29:03 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Jun 13 10:29:03 2015 -0700

--
 .../hive/hcatalog/templeton/tool/TempletonControllerJob.java  | 7 ---
 1 file changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2b340b0f/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index 5c7de80..f47feeb 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -133,9 +133,10 @@ public class TempletonControllerJob extends Configured 
implements Tool, JobSubmi
 
 JobClient jc = new JobClient(new JobConf(job.getConfiguration()));
 
-TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new Text(mr 
token));
-job.getCredentials().addToken(new Text(mr token), mrdt);
-
+if(UserGroupInformation.isSecurityEnabled()) {
+  TokenDelegationTokenIdentifier mrdt = jc.getDelegationToken(new 
Text(mr token));
+  job.getCredentials().addToken(new Text(mr token), mrdt);
+}
 String metastoreTokenStrForm = addHMSToken(job, user);
 
 job.submit();



hive git commit: HIVE-10828 - Insert with schema and dynamic partitions NullPointerException (Eugene Koifman, reviewed by Ashutosh Chauhan)

2015-05-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 1293f3d38 - 49cc02873


HIVE-10828 - Insert with schema and dynamic partitions NullPointerException 
(Eugene Koifman, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/49cc0287
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/49cc0287
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/49cc0287

Branch: refs/heads/master
Commit: 49cc02873fb7b1d43d5e1dbb3ef3435c877f61b2
Parents: 1293f3d
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 28 10:50:45 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 28 10:50:45 2015 -0700

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 14 
 .../clientpositive/insert_into_with_schema2.q   | 11 ++
 .../insert_into_with_schema2.q.out  | 37 
 3 files changed, 55 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/49cc0287/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 7f355e5..351c267 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -3864,7 +3864,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 }
 selectStar = selectStar  exprList.getChildCount() == posn + 1;
 
-handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, qb, 
selExprList);
+out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, 
qb, selExprList);
 
 ArrayListString columnNames = new ArrayListString();
 MapString, ExprNodeDesc colExprMap = new HashMapString, ExprNodeDesc();
@@ -3909,14 +3909,14 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
* @see #handleInsertStatementSpecPhase1(ASTNode, QBParseInfo, 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.Phase1Ctx)
* @throws SemanticException
*/
-  private void handleInsertStatementSpec(ListExprNodeDesc col_list, String 
dest,
+  private RowResolver handleInsertStatementSpec(ListExprNodeDesc col_list, 
String dest,
  RowResolver outputRR, RowResolver 
inputRR, QB qb,
  ASTNode selExprList) throws 
SemanticException {
 //(z,x)
 ListString targetTableSchema = 
qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query
 if(targetTableSchema == null) {
   //no insert schema was specified
-  return;
+  return outputRR;
 }
 if(targetTableSchema.size() != col_list.size()) {
   Table target = qb.getMetaData().getDestTableForAlias(dest);
@@ -3959,6 +3959,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 }
   }
 }
+RowResolver newOutputRR = new RowResolver();
 //now make the select produce regular columns,dynamic partition 
columns with
 //where missing columns are NULL-filled
 for(String f : targetTableColNames) {
@@ -3967,7 +3968,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 new_col_list.add(targetCol2Projection.get(f));
 ColumnInfo ci = targetCol2ColumnInfo.get(f);//todo: is this OK?
 ci.setInternalName(getColumnInternalName(colListPos));
-newSchema.add(ci);
+newOutputRR.put(ci.getTabAlias(), ci.getInternalName(), ci);
   }
   else {
 //add new 'synthetic' columns for projections not provided by Select
@@ -3979,14 +3980,13 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 final String tableAlias = null;//this column doesn't come from any 
table
 ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos),
   exp.getWritableObjectInspector(), tableAlias, false);
-newSchema.add(colInfo);
-outputRR.addMappingOnly(colInfo.getTabAlias(), 
colInfo.getInternalName(), colInfo);
+newOutputRR.put(colInfo.getTabAlias(), colInfo.getInternalName(), 
colInfo);
   }
   colListPos++;
 }
 col_list.clear();
 col_list.addAll(new_col_list);
-outputRR.setRowSchema(new RowSchema(newSchema));
+return newOutputRR;
   }
   String recommendName(ExprNodeDesc exp, String colAlias) {
 if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/49cc0287/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
--
diff --git 

hive git commit: HIVE-10828 - Insert with schema and dynamic partitions NullPointerException (Eugene Koifman, reviewed by Ashutosh Chauhan)

2015-05-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 f0c790df2 - 2e135e269


HIVE-10828 - Insert with schema and dynamic partitions NullPointerException 
(Eugene Koifman, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/2e135e26
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/2e135e26
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/2e135e26

Branch: refs/heads/branch-1.2
Commit: 2e135e269909a8d69a9fa81ab7e73385409a3b11
Parents: f0c790d
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 28 10:57:26 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 28 10:57:26 2015 -0700

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 14 
 .../clientpositive/insert_into_with_schema2.q   | 11 ++
 .../insert_into_with_schema2.q.out  | 37 
 3 files changed, 55 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/2e135e26/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 50c57fc..04fd6cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -3861,7 +3861,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 }
 selectStar = selectStar  exprList.getChildCount() == posn + 1;
 
-handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, qb, 
selExprList);
+out_rwsch = handleInsertStatementSpec(col_list, dest, out_rwsch, inputRR, 
qb, selExprList);
 
 ArrayListString columnNames = new ArrayListString();
 MapString, ExprNodeDesc colExprMap = new HashMapString, ExprNodeDesc();
@@ -3906,14 +3906,14 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
* @see #handleInsertStatementSpecPhase1(ASTNode, QBParseInfo, 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.Phase1Ctx)
* @throws SemanticException
*/
-  private void handleInsertStatementSpec(ListExprNodeDesc col_list, String 
dest,
+  private RowResolver handleInsertStatementSpec(ListExprNodeDesc col_list, 
String dest,
  RowResolver outputRR, RowResolver 
inputRR, QB qb,
  ASTNode selExprList) throws 
SemanticException {
 //(z,x)
 ListString targetTableSchema = 
qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query
 if(targetTableSchema == null) {
   //no insert schema was specified
-  return;
+  return outputRR;
 }
 if(targetTableSchema.size() != col_list.size()) {
   Table target = qb.getMetaData().getDestTableForAlias(dest);
@@ -3956,6 +3956,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 }
   }
 }
+RowResolver newOutputRR = new RowResolver();
 //now make the select produce regular columns,dynamic partition 
columns with
 //where missing columns are NULL-filled
 for(String f : targetTableColNames) {
@@ -3964,7 +3965,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 new_col_list.add(targetCol2Projection.get(f));
 ColumnInfo ci = targetCol2ColumnInfo.get(f);//todo: is this OK?
 ci.setInternalName(getColumnInternalName(colListPos));
-newSchema.add(ci);
+newOutputRR.put(ci.getTabAlias(), ci.getInternalName(), ci);
   }
   else {
 //add new 'synthetic' columns for projections not provided by Select
@@ -3976,14 +3977,13 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 final String tableAlias = null;//this column doesn't come from any 
table
 ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos),
   exp.getWritableObjectInspector(), tableAlias, false);
-newSchema.add(colInfo);
-outputRR.addMappingOnly(colInfo.getTabAlias(), 
colInfo.getInternalName(), colInfo);
+newOutputRR.put(colInfo.getTabAlias(), colInfo.getInternalName(), 
colInfo);
   }
   colListPos++;
 }
 col_list.clear();
 col_list.addAll(new_col_list);
-outputRR.setRowSchema(new RowSchema(newSchema));
+return newOutputRR;
   }
   String recommendName(ExprNodeDesc exp, String colAlias) {
 if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/2e135e26/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
--
diff --git 

hive git commit: HIVE-10858 WebHCat specific resources should be added to HADOOP_CLASSPATH first(Eugene Koifman, Reviewed by Thejas Nair)

2015-05-29 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 7ad9b1489 - fa2dd8391


HIVE-10858 WebHCat specific resources should be added to HADOOP_CLASSPATH 
first(Eugene Koifman, Reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fa2dd839
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fa2dd839
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fa2dd839

Branch: refs/heads/master
Commit: fa2dd83913b2464d81504a97a9244187889a3c9f
Parents: 7ad9b14
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 29 15:28:21 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 29 15:28:21 2015 -0700

--
 .../templeton/tool/JobSubmissionConstants.java  |  1 +
 .../hcatalog/templeton/tool/LaunchMapper.java   | 27 +---
 2 files changed, 19 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fa2dd839/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
index 1d560b6..d3dc3f7 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
@@ -55,6 +55,7 @@ public interface JobSubmissionConstants {
   public static final String MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER =
 __MR_JOB_TAGS_OPTION=MR_JOB_TAGS_JOBID__;
 
+  public static final String HADOOP_CLASSPATH = HADOOP_CLASSPATH;
   /**
* constants needed for Pig job submission
* The string values here are what Pig expects to see in it's environment

http://git-wip-us.apache.org/repos/asf/hive/blob/fa2dd839/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index 3edd449..91fe247 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -107,12 +107,7 @@ public class LaunchMapper extends MapperNullWritable, 
NullWritable, Text, Text
   }
   jdbcJars.setLength(jdbcJars.length() - 1);
   //this makes the jars available to Sqoop client
-  if(TempletonUtils.isset(System.getenv(HADOOP_CLASSPATH))) {
-env.put(HADOOP_CLASSPATH, System.getenv(HADOOP_CLASSPATH) + 
File.pathSeparator + jdbcJars.toString());
-  }
-  else {
-env.put(HADOOP_CLASSPATH, jdbcJars.toString());
-  }
+  prependPathToVariable(HADOOP_CLASSPATH, env, jdbcJars.toString());
 }
   }
   private static void handleHadoopClasspathExtras(Configuration conf, 
MapString, String env)
@@ -134,11 +129,25 @@ public class LaunchMapper extends MapperNullWritable, 
NullWritable, Text, Text
   paths.append(File.pathSeparator);
 }
 paths.setLength(paths.length() - 1);
-if(TempletonUtils.isset(System.getenv(HADOOP_CLASSPATH))) {
-  env.put(HADOOP_CLASSPATH, System.getenv(HADOOP_CLASSPATH) + 
File.pathSeparator + paths);
+prependPathToVariable(HADOOP_CLASSPATH, env, paths.toString());
+  }
+  /**
+   * Ensures that {@code paths} are prepended to {@code pathVarName} and made 
available to forked child
+   * process.
+   * @param paths properly separated list of paths
+   */
+  private static void prependPathToVariable(String pathVarName, MapString, 
String env, String paths) {
+if(!TempletonUtils.isset(pathVarName) || !TempletonUtils.isset(paths) || 
env == null) {
+  return;
+}
+if(TempletonUtils.isset(env.get(pathVarName))) {
+  env.put(pathVarName, paths + File.pathSeparator + env.get(pathVarName));
+}
+else if(TempletonUtils.isset(System.getenv(pathVarName))) {
+  env.put(pathVarName, paths + File.pathSeparator + 
System.getenv(pathVarName));
 }
 else {
-  env.put(HADOOP_CLASSPATH, paths.toString());
+  env.put(pathVarName, paths);
 }
   }
   protected Process startJob(Context context, String user, String 
overrideClasspath)



hive git commit: HIVE-10858 WebHCat specific resources should be added to HADOOP_CLASSPATH first(Eugene Koifman, Reviewed by Thejas Nair)

2015-05-29 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 369aba5bd - 03be35e02


HIVE-10858 WebHCat specific resources should be added to HADOOP_CLASSPATH 
first(Eugene Koifman, Reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/03be35e0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/03be35e0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/03be35e0

Branch: refs/heads/branch-1.2
Commit: 03be35e02579bbb93367b4e2be5899e4232dda42
Parents: 369aba5
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 29 15:22:21 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 29 15:22:21 2015 -0700

--
 .../templeton/tool/JobSubmissionConstants.java  |  1 +
 .../hcatalog/templeton/tool/LaunchMapper.java   | 27 +---
 2 files changed, 19 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/03be35e0/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
index 1d560b6..d3dc3f7 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
@@ -55,6 +55,7 @@ public interface JobSubmissionConstants {
   public static final String MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER =
 __MR_JOB_TAGS_OPTION=MR_JOB_TAGS_JOBID__;
 
+  public static final String HADOOP_CLASSPATH = HADOOP_CLASSPATH;
   /**
* constants needed for Pig job submission
* The string values here are what Pig expects to see in it's environment

http://git-wip-us.apache.org/repos/asf/hive/blob/03be35e0/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index 3edd449..91fe247 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -107,12 +107,7 @@ public class LaunchMapper extends MapperNullWritable, 
NullWritable, Text, Text
   }
   jdbcJars.setLength(jdbcJars.length() - 1);
   //this makes the jars available to Sqoop client
-  if(TempletonUtils.isset(System.getenv(HADOOP_CLASSPATH))) {
-env.put(HADOOP_CLASSPATH, System.getenv(HADOOP_CLASSPATH) + 
File.pathSeparator + jdbcJars.toString());
-  }
-  else {
-env.put(HADOOP_CLASSPATH, jdbcJars.toString());
-  }
+  prependPathToVariable(HADOOP_CLASSPATH, env, jdbcJars.toString());
 }
   }
   private static void handleHadoopClasspathExtras(Configuration conf, 
MapString, String env)
@@ -134,11 +129,25 @@ public class LaunchMapper extends MapperNullWritable, 
NullWritable, Text, Text
   paths.append(File.pathSeparator);
 }
 paths.setLength(paths.length() - 1);
-if(TempletonUtils.isset(System.getenv(HADOOP_CLASSPATH))) {
-  env.put(HADOOP_CLASSPATH, System.getenv(HADOOP_CLASSPATH) + 
File.pathSeparator + paths);
+prependPathToVariable(HADOOP_CLASSPATH, env, paths.toString());
+  }
+  /**
+   * Ensures that {@code paths} are prepended to {@code pathVarName} and made 
available to forked child
+   * process.
+   * @param paths properly separated list of paths
+   */
+  private static void prependPathToVariable(String pathVarName, MapString, 
String env, String paths) {
+if(!TempletonUtils.isset(pathVarName) || !TempletonUtils.isset(paths) || 
env == null) {
+  return;
+}
+if(TempletonUtils.isset(env.get(pathVarName))) {
+  env.put(pathVarName, paths + File.pathSeparator + env.get(pathVarName));
+}
+else if(TempletonUtils.isset(System.getenv(pathVarName))) {
+  env.put(pathVarName, paths + File.pathSeparator + 
System.getenv(pathVarName));
 }
 else {
-  env.put(HADOOP_CLASSPATH, paths.toString());
+  env.put(pathVarName, paths);
 }
   }
   protected Process startJob(Context context, String user, String 
overrideClasspath)



hive git commit: HIVE-10605:Make hive version number update automatically in webhcat-default.xml during hive tar generation

2015-05-29 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 98a20e4ac - 369aba5bd


HIVE-10605:Make hive version number update automatically in webhcat-default.xml 
during hive tar generation


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/369aba5b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/369aba5b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/369aba5b

Branch: refs/heads/branch-1.2
Commit: 369aba5bdbf43d6adc9f58c3e1e723337ded0c22
Parents: 98a20e4
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 29 14:02:56 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 29 14:02:56 2015 -0700

--
 .../deployers/config/webhcat/webhcat-site.xml  |  9 +
 hcatalog/src/test/e2e/templeton/deployers/env.sh   |  1 +
 hcatalog/webhcat/svr/pom.xml   | 13 +++--
 .../webhcat/svr/src/main/config/webhcat-default.xml| 10 +-
 packaging/src/main/assembly/bin.xml|  1 +
 5 files changed, 15 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/369aba5b/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
--
diff --git 
a/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml 
b/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
index 7a2d450..8bcb1f0 100644
--- a/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml
@@ -24,8 +24,7 @@
 !-- install. --
 
 configuration
-!--TODO:
-1. make pig/hive versions env variables--
+!--TODO:--
 
   property
 nametempleton.hcat/name
@@ -34,12 +33,6 @@
   /property
 
 property
-nametempleton.libjars/name
-
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0-SNAPSHOT.jar/value
-descriptionJars to add to the classpath./description
-/property
-
-property
 nametempleton.pig.archive/name
 valuehdfs:///apps/templeton/pig-${env.PIG_VERSION}.tar.gz/value
 descriptionThe path to the Pig archive./description

http://git-wip-us.apache.org/repos/asf/hive/blob/369aba5b/hcatalog/src/test/e2e/templeton/deployers/env.sh
--
diff --git a/hcatalog/src/test/e2e/templeton/deployers/env.sh 
b/hcatalog/src/test/e2e/templeton/deployers/env.sh
index a9cc2d7..8b719f2 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/env.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/env.sh
@@ -22,6 +22,7 @@
 
 # define necessary env vars here and source it in other files
 
+#todo: most of these variables are defined in pom.xml - see this can be 
integrated
 echo ${HADOOP_VERSION};
 
 if [ -z ${HADOOP_VERSION} ]; then

http://git-wip-us.apache.org/repos/asf/hive/blob/369aba5b/hcatalog/webhcat/svr/pom.xml
--
diff --git a/hcatalog/webhcat/svr/pom.xml b/hcatalog/webhcat/svr/pom.xml
index 6f3f9e5..1a6dc92 100644
--- a/hcatalog/webhcat/svr/pom.xml
+++ b/hcatalog/webhcat/svr/pom.xml
@@ -151,12 +151,13 @@
   /profiles
 
   build
-  resources
-  resource
-  targetPath./targetPath
-  directorysrc/main/config/directory
-  /resource
-  /resources
+resources
+  resource
+targetPath./targetPath
+directorysrc/main/config/directory
+filteringtrue/filtering
+  /resource
+/resources
 plugins
   plugin
 groupIdorg.apache.maven.plugins/groupId

http://git-wip-us.apache.org/repos/asf/hive/blob/369aba5b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
--
diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml 
b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
index a46eab7..ce7441e 100644
--- a/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
+++ b/hcatalog/webhcat/svr/src/main/config/webhcat-default.xml
@@ -39,7 +39,7 @@
 
   property
 nametempleton.libjars/name
-
value${env.TEMPLETON_HOME}/../lib/zookeeper-3.4.6.jar,${env.TEMPLETON_HOME}/../lib/hive-common-1.2.0.jar/value
+
value${env.TEMPLETON_HOME}/../lib/zookeeper-${zookeeper.version}.jar,${env.TEMPLETON_HOME}/../lib/hive-common-${project.version}.jar/value
 descriptionJars to add to the classpath./description
   /property
 
@@ -87,7 +87,7 @@
 
   property
 nametempleton.pig.path/name
-valuepig-0.11.1.tar.gz/pig-0.11.1/bin/pig/value
+valuepig-${pig.version}.tar.gz/pig-${pig.version}/bin/pig/value
 

hive git commit: HIVE-10776 - Schema on insert for bucketed tables throwing NullPointerException(Eugene Koifman, reviewed by Alan Gates)

2015-05-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 95929308b - 0d93438a3


HIVE-10776 - Schema on insert for bucketed tables throwing 
NullPointerException(Eugene Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0d93438a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0d93438a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0d93438a

Branch: refs/heads/master
Commit: 0d93438a3543cb64cbe2ebcdc21e5b40c1dd86e6
Parents: 9592930
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 22 13:50:40 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 22 13:50:40 2015 -0700

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  9 +-
 .../clientpositive/insert_into_with_schema2.q   | 23 +
 .../insert_into_with_schema2.q.out  | 98 
 3 files changed, 126 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0d93438a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 4346f60..7f355e5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -3910,7 +3910,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
* @throws SemanticException
*/
   private void handleInsertStatementSpec(ListExprNodeDesc col_list, String 
dest,
- RowResolver out_rwsch, RowResolver 
inputRR, QB qb,
+ RowResolver outputRR, RowResolver 
inputRR, QB qb,
  ASTNode selExprList) throws 
SemanticException {
 //(z,x)
 ListString targetTableSchema = 
qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query
@@ -3932,7 +3932,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 MapString, ColumnInfo targetCol2ColumnInfo = new HashMapString, 
ColumnInfo();
 int colListPos = 0;
 for(String targetCol : targetTableSchema) {
-  targetCol2ColumnInfo.put(targetCol, 
out_rwsch.getColumnInfos().get(colListPos));
+  targetCol2ColumnInfo.put(targetCol, 
outputRR.getColumnInfos().get(colListPos));
   targetCol2Projection.put(targetCol, col_list.get(colListPos++));
 }
 Table target = qb.getMetaData().getDestTableForAlias(dest);
@@ -3976,16 +3976,17 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 t.setText(TOK_NULL);
 ExprNodeDesc exp = genExprNodeDesc(new ASTNode(t), inputRR, tcCtx);
 new_col_list.add(exp);
-final String tableAlias = ;//is this OK? this column doesn't come 
from any table
+final String tableAlias = null;//this column doesn't come from any 
table
 ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos),
   exp.getWritableObjectInspector(), tableAlias, false);
 newSchema.add(colInfo);
+outputRR.addMappingOnly(colInfo.getTabAlias(), 
colInfo.getInternalName(), colInfo);
   }
   colListPos++;
 }
 col_list.clear();
 col_list.addAll(new_col_list);
-out_rwsch.setRowSchema(new RowSchema(newSchema));
+outputRR.setRowSchema(new RowSchema(newSchema));
   }
   String recommendName(ExprNodeDesc exp, String colAlias) {
 if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/0d93438a/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
--
diff --git a/ql/src/test/queries/clientpositive/insert_into_with_schema2.q 
b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
new file mode 100644
index 000..b7c6b58
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
@@ -0,0 +1,23 @@
+-- SORT_QUERY_RESULTS;
+
+set hive.enforce.bucketing=true;
+
+create table studenttab10k (age2 int);
+insert into studenttab10k values(1);
+
+create table student_acid (age int, grade int)
+ clustered by (age) into 1 buckets;
+
+insert into student_acid(age) select * from studenttab10k;
+
+select * from student_acid;
+
+insert into student_acid(grade, age) select 3 g, * from studenttab10k;
+
+select * from student_acid;
+
+insert into student_acid(grade, age) values(20, 2);
+
+insert into student_acid(age) values(22);
+
+select * from student_acid;

http://git-wip-us.apache.org/repos/asf/hive/blob/0d93438a/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out

hive git commit: HIVE-10776 - Schema on insert for bucketed tables throwing NullPointerException

2015-05-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 7b89fad81 - 613b559f2


HIVE-10776 - Schema on insert for bucketed tables throwing NullPointerException


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/613b559f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/613b559f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/613b559f

Branch: refs/heads/branch-1.2
Commit: 613b559f2e1b027c64f265120c202f6af4138930
Parents: 7b89fad
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 22 15:14:54 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 22 15:14:54 2015 -0700

--
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  9 +-
 .../clientpositive/insert_into_with_schema2.q   | 23 +
 .../insert_into_with_schema2.q.out  | 98 
 3 files changed, 126 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/613b559f/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index bf889fc..50c57fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -3907,7 +3907,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
* @throws SemanticException
*/
   private void handleInsertStatementSpec(ListExprNodeDesc col_list, String 
dest,
- RowResolver out_rwsch, RowResolver 
inputRR, QB qb,
+ RowResolver outputRR, RowResolver 
inputRR, QB qb,
  ASTNode selExprList) throws 
SemanticException {
 //(z,x)
 ListString targetTableSchema = 
qb.getParseInfo().getDestSchemaForClause(dest);//specified in the query
@@ -3929,7 +3929,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 MapString, ColumnInfo targetCol2ColumnInfo = new HashMapString, 
ColumnInfo();
 int colListPos = 0;
 for(String targetCol : targetTableSchema) {
-  targetCol2ColumnInfo.put(targetCol, 
out_rwsch.getColumnInfos().get(colListPos));
+  targetCol2ColumnInfo.put(targetCol, 
outputRR.getColumnInfos().get(colListPos));
   targetCol2Projection.put(targetCol, col_list.get(colListPos++));
 }
 Table target = qb.getMetaData().getDestTableForAlias(dest);
@@ -3973,16 +3973,17 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 t.setText(TOK_NULL);
 ExprNodeDesc exp = genExprNodeDesc(new ASTNode(t), inputRR, tcCtx);
 new_col_list.add(exp);
-final String tableAlias = ;//is this OK? this column doesn't come 
from any table
+final String tableAlias = null;//this column doesn't come from any 
table
 ColumnInfo colInfo = new ColumnInfo(getColumnInternalName(colListPos),
   exp.getWritableObjectInspector(), tableAlias, false);
 newSchema.add(colInfo);
+outputRR.addMappingOnly(colInfo.getTabAlias(), 
colInfo.getInternalName(), colInfo);
   }
   colListPos++;
 }
 col_list.clear();
 col_list.addAll(new_col_list);
-out_rwsch.setRowSchema(new RowSchema(newSchema));
+outputRR.setRowSchema(new RowSchema(newSchema));
   }
   String recommendName(ExprNodeDesc exp, String colAlias) {
 if (!colAlias.startsWith(autogenColAliasPrfxLbl)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/613b559f/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
--
diff --git a/ql/src/test/queries/clientpositive/insert_into_with_schema2.q 
b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
new file mode 100644
index 000..b7c6b58
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/insert_into_with_schema2.q
@@ -0,0 +1,23 @@
+-- SORT_QUERY_RESULTS;
+
+set hive.enforce.bucketing=true;
+
+create table studenttab10k (age2 int);
+insert into studenttab10k values(1);
+
+create table student_acid (age int, grade int)
+ clustered by (age) into 1 buckets;
+
+insert into student_acid(age) select * from studenttab10k;
+
+select * from student_acid;
+
+insert into student_acid(grade, age) select 3 g, * from studenttab10k;
+
+select * from student_acid;
+
+insert into student_acid(grade, age) values(20, 2);
+
+insert into student_acid(age) values(22);
+
+select * from student_acid;

http://git-wip-us.apache.org/repos/asf/hive/blob/613b559f/ql/src/test/results/clientpositive/insert_into_with_schema2.q.out
--

hive git commit: HIVE-10658 - Insert with values clause may expose data that should be encrypted(Eugene Koifman, reviewed by Serio Pena)

2015-05-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master b4d8c018b - 95929308b


HIVE-10658 - Insert with values clause may expose data that should be 
encrypted(Eugene Koifman, reviewed by Serio Pena)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/95929308
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/95929308
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/95929308

Branch: refs/heads/master
Commit: 95929308b43bd741220eeca60896eadb92496510
Parents: b4d8c01
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 22 13:28:13 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 22 13:28:13 2015 -0700

--
 .../test/resources/testconfiguration.properties |  3 +-
 .../org/apache/hadoop/hive/ql/parse/QB.java | 19 ++
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 64 --
 .../apache/hadoop/hive/ql/parse/TestIUD.java|  7 ++
 .../clientpositive/encryption_insert_values.q   | 15 +
 .../encryption_insert_partition_dynamic.q.out   |  6 +-
 .../encryption_insert_partition_static.q.out| 30 +
 .../encrypted/encryption_insert_values.q.out| 71 
 8 files changed, 194 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/95929308/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index b9d85f6..9e95d1b 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -350,7 +350,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_load_data_to_encrypted_tables.q, \
   encryption_unencrypted_nonhdfs_external_tables.q \
   encryption_move_tbl.q \
-  encryption_drop_table.q
+  encryption_drop_table.q \
+  encryption_insert_values.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/95929308/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
index 7f4d0ff..0ddc221 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -27,6 +28,7 @@ import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
@@ -55,6 +57,7 @@ public class QB {
   private boolean isAnalyzeRewrite;
   private CreateTableDesc tblDesc = null; // table descriptor of the final
   private CreateTableDesc directoryDesc = null ;
+  private ListPath encryptedTargetTablePaths;
 
   // used by PTFs
   /*
@@ -387,4 +390,20 @@ public class QB {
 return havingClauseSubQueryPredicate;
   }
 
+  void addEncryptedTargetTablePath(Path p) {
+if(encryptedTargetTablePaths == null) {
+  encryptedTargetTablePaths = new ArrayList();
+}
+encryptedTargetTablePaths.add(p);
+  }
+  /**
+   * List of dbName.tblName of encrypted target tables of insert statement
+   * Used to support Insert ... values(...)
+   */
+  ListPath getEncryptedTargetTablePaths() {
+if(encryptedTargetTablePaths == null) {
+  return Collections.emptyList();
+}
+return encryptedTargetTablePaths;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/95929308/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 086d9a2..4346f60 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -206,6 +206,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.InputFormat;

hive git commit: HIVE-10658 - Insert with values clause may expose data that should be encrypted

2015-05-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 9253f5a0d - 7b89fad81


HIVE-10658 - Insert with values clause may expose data that should be encrypted


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7b89fad8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7b89fad8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7b89fad8

Branch: refs/heads/branch-1.2
Commit: 7b89fad8107b678a27d26931d5d93d91e9544a5a
Parents: 9253f5a
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri May 22 15:05:06 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri May 22 15:05:06 2015 -0700

--
 .../test/resources/testconfiguration.properties |  3 +-
 .../org/apache/hadoop/hive/ql/parse/QB.java | 19 ++
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 64 --
 .../apache/hadoop/hive/ql/parse/TestIUD.java|  7 ++
 .../clientpositive/encryption_insert_values.q   | 15 +
 .../encryption_insert_partition_dynamic.q.out   |  6 +-
 .../encryption_insert_partition_static.q.out|  6 +-
 .../encrypted/encryption_insert_values.q.out| 71 
 8 files changed, 182 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/7b89fad8/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index b9d85f6..9e95d1b 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -350,7 +350,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_load_data_to_encrypted_tables.q, \
   encryption_unencrypted_nonhdfs_external_tables.q \
   encryption_move_tbl.q \
-  encryption_drop_table.q
+  encryption_drop_table.q \
+  encryption_insert_values.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/7b89fad8/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
index 7f4d0ff..0ddc221 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.parse;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
@@ -27,6 +28,7 @@ import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
 
@@ -55,6 +57,7 @@ public class QB {
   private boolean isAnalyzeRewrite;
   private CreateTableDesc tblDesc = null; // table descriptor of the final
   private CreateTableDesc directoryDesc = null ;
+  private ListPath encryptedTargetTablePaths;
 
   // used by PTFs
   /*
@@ -387,4 +390,20 @@ public class QB {
 return havingClauseSubQueryPredicate;
   }
 
+  void addEncryptedTargetTablePath(Path p) {
+if(encryptedTargetTablePaths == null) {
+  encryptedTargetTablePaths = new ArrayList();
+}
+encryptedTargetTablePaths.add(p);
+  }
+  /**
+   * List of dbName.tblName of encrypted target tables of insert statement
+   * Used to support Insert ... values(...)
+   */
+  ListPath getEncryptedTargetTablePaths() {
+if(encryptedTargetTablePaths == null) {
+  return Collections.emptyList();
+}
+return encryptedTargetTablePaths;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/7b89fad8/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 675ad7a..bf889fc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -206,6 +206,7 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.shims.HadoopShims;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.InputFormat;
@@ -718,8 +719,19 @@ public class 

hive git commit: HIVE-10724 - WebHCat e2e test TestStreaming_5 fails on Windows (Deepesh Khandelwal via Eugene Koifman)

2015-05-21 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master b0e95070b - b4d8c018b


HIVE-10724 - WebHCat e2e test TestStreaming_5 fails on Windows (Deepesh 
Khandelwal via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b4d8c018
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b4d8c018
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b4d8c018

Branch: refs/heads/master
Commit: b4d8c018b64f43525f2f1dc84d6cabb4ca6dff24
Parents: b0e9507
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 21 11:37:01 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 21 11:38:04 2015 -0700

--
 hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b4d8c018/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
--
diff --git a/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf 
b/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
index 9b85796..eb37dd7 100644
--- a/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
+++ b/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
@@ -116,7 +116,7 @@ $cfg =
  'method' = 'POST',
  'url' = ':TEMPLETON_URL:/templeton/v1/mapreduce/streaming',
  'post_options' = 
['user.name=:UNAME:','input=:INPDIR_HDFS:/xml/file1.xml','input=:INPDIR_HDFS:/xml/file2.xml','output=:OUTDIR:/xmlout',
-'mapper=python 
:INPDIR_HDFS:/xmlmapper.py','reducer=python :INPDIR_HDFS:/xmlreducer.py', 
'inputreader=StreamXmlRecordReader,begin=xml,end=/xml'],
+'mapper=python 
:INPDIR_HDFS:/xmlmapper.py','reducer=python :INPDIR_HDFS:/xmlreducer.py', 
'inputreader=StreamXmlRecordReader,begin=xml,end=/xml'],
  'json_field_substr_match' = { 'id' = '\d+'},
 #results
  'status_code' = 200,



hive git commit: HIVE-10747 - Enable the cleanup of side effect for the Encryption related qfile test

2015-05-21 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 3a70b0a1e - 9253f5a0d


HIVE-10747 - Enable the cleanup of side effect for the Encryption related qfile 
test


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/9253f5a0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/9253f5a0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/9253f5a0

Branch: refs/heads/branch-1.2
Commit: 9253f5a0d9415144c1efd3edc487162438af1c57
Parents: 3a70b0a
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 21 18:04:58 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 21 18:04:58 2015 -0700

--
 .../java/org/apache/hadoop/hive/ql/QTestUtil.java | 14 ++
 1 file changed, 6 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/9253f5a0/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 925f1ee..3c4b8de 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -731,14 +731,12 @@ public class QTestUtil {
 clearTablesCreatedDuringTests();
 clearKeysCreatedInTests();
 
-if (clusterType != MiniClusterType.encrypted) {
-  // allocate and initialize a new conf since a test can
-  // modify conf by using 'set' commands
-  conf = new HiveConf (Driver.class);
-  initConf();
-  // renew the metastore since the cluster type is unencrypted
-  db = Hive.get(conf);  // propagate new conf to meta store
-}
+// allocate and initialize a new conf since a test can
+// modify conf by using 'set' commands
+conf = new HiveConf(Driver.class);
+initConf();
+// renew the metastore since the cluster type is unencrypted
+db = Hive.get(conf);  // propagate new conf to meta store
 
 setup.preTest(conf);
   }



hive git commit: HIVE-10630 - Renaming tables across encryption zones renames table even though the operation throws error

2015-05-21 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 8d2dfe19f - 45490eab5


HIVE-10630 - Renaming tables across encryption zones renames table even though 
the operation throws error


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45490eab
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45490eab
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45490eab

Branch: refs/heads/branch-1.2
Commit: 45490eab58f924cdb58b4495b577e5b2a4e1c288
Parents: 8d2dfe1
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu May 21 17:40:12 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu May 21 17:40:12 2015 -0700

--
 .../test/resources/testconfiguration.properties |  3 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java| 10 -
 .../hadoop/hive/metastore/HiveAlterHandler.java | 28 ++---
 .../hadoop/hive/metastore/ObjectStore.java  |  2 +-
 .../clientpositive/encryption_move_tbl.q| 18 
 .../encrypted/encryption_move_tbl.q.out | 43 
 6 files changed, 96 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/45490eab/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 252490b..a485408 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -348,7 +348,8 @@ encrypted.query.files=encryption_join_unencrypted_tbl.q,\
   encryption_select_read_only_encrypted_tbl.q,\
   encryption_select_read_only_unencrypted_tbl.q,\
   encryption_load_data_to_encrypted_tables.q, \
-  encryption_unencrypted_nonhdfs_external_tables.q
+  encryption_unencrypted_nonhdfs_external_tables.q \
+  encryption_move_tbl.q
 
 beeline.positive.exclude=add_part_exist.q,\
   alter1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/45490eab/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index d1104b3..9ce4e12 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1026,7 +1026,7 @@ public class QTestUtil {
 rc = cliDriver.processLine(command);
   }
 
-  if (rc != 0) {
+  if (rc != 0  !ignoreErrors()) {
 break;
   }
   command = ;
@@ -1037,6 +1037,14 @@ public class QTestUtil {
 return rc;
   }
 
+  /**
+   * This allows a .q file to continue executing after a statement runs into 
an error which is convenient
+   * if you want to use another hive cmd after the failure to sanity check the 
state of the system.
+   */
+  private boolean ignoreErrors() {
+return conf.getBoolVar(HiveConf.ConfVars.CLIIGNOREERRORS);
+  }
+
   private boolean isHiveCommand(String command) {
 String[] cmd = command.trim().split(\\s+);
 if (HiveCommand.find(cmd) != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/45490eab/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index d0351da..5391171 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -45,6 +45,7 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
+import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hive.common.util.HiveStringUtils;
 
 import com.google.common.collect.Lists;
@@ -260,17 +261,18 @@ public class HiveAlterHandler implements AlterHandler {
 // rename the src to destination
 try {
   if (srcFs.exists(srcPath)  !srcFs.rename(srcPath, destPath)) {
-throw new IOException(Renaming  + srcPath +  to  + destPath + 
 is failed);
+throw new IOException(Renaming  + srcPath +  to  + destPath + 
 failed);
   }
 } catch (IOException e) {
+  LOG.error(Alter Table operation for  + dbname + . + name +  
failed., e);
   boolean revertMetaDataTransaction = false;
 

hive git commit: HIVE-11140 auto compute PROJ_HOME in hcatalog/src/test/e2e/templeton/deployers/env.sh (Eugene Koifman, reviewed by Thejas Nair)

2015-06-30 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 994d98c09 - ffce22589


HIVE-11140 auto compute PROJ_HOME in 
hcatalog/src/test/e2e/templeton/deployers/env.sh (Eugene Koifman, reviewed by 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ffce2258
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ffce2258
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ffce2258

Branch: refs/heads/master
Commit: ffce225896d73ac0af1af4afb9d5bef53699ab37
Parents: 994d98c
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Jun 30 15:11:43 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Jun 30 15:11:43 2015 -0700

--
 hcatalog/src/test/e2e/templeton/deployers/env.sh | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ffce2258/hcatalog/src/test/e2e/templeton/deployers/env.sh
--
diff --git a/hcatalog/src/test/e2e/templeton/deployers/env.sh 
b/hcatalog/src/test/e2e/templeton/deployers/env.sh
index 8b719f2..804bdd4 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/env.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/env.sh
@@ -42,9 +42,8 @@ if [ -z ${TEZ_VERSION} ]; then
 fi
 
 #Root of project source tree
-if [ -z ${PROJ_HOME} ]; then
-  export PROJ_HOME=/Users/${USER}/dev/hive
-fi
+current_dir=$( cd $( dirname ${BASH_SOURCE[0]} )/../../../../..  pwd )
+export PROJ_HOME=`dirname $current_dir`
 export 
HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
 
 if [ -z ${HADOOP_HOME} ]; then



hive git commit: HIVE-11140 auto compute PROJ_HOME in hcatalog/src/test/e2e/templeton/deployers/env.sh (Eugene Koifman, reviewed by Thejas Nair)

2015-06-30 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 ad54cce7d - ed36b0230


HIVE-11140 auto compute PROJ_HOME in 
hcatalog/src/test/e2e/templeton/deployers/env.sh (Eugene Koifman, reviewed by 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ed36b023
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ed36b023
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ed36b023

Branch: refs/heads/branch-1
Commit: ed36b02302384ef8eb6bca25e81d0b54f5042a49
Parents: ad54cce
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Jun 30 15:35:53 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Jun 30 15:35:53 2015 -0700

--
 hcatalog/src/test/e2e/templeton/deployers/env.sh | 5 ++---
 1 file changed, 2 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/ed36b023/hcatalog/src/test/e2e/templeton/deployers/env.sh
--
diff --git a/hcatalog/src/test/e2e/templeton/deployers/env.sh 
b/hcatalog/src/test/e2e/templeton/deployers/env.sh
index 8b719f2..804bdd4 100755
--- a/hcatalog/src/test/e2e/templeton/deployers/env.sh
+++ b/hcatalog/src/test/e2e/templeton/deployers/env.sh
@@ -42,9 +42,8 @@ if [ -z ${TEZ_VERSION} ]; then
 fi
 
 #Root of project source tree
-if [ -z ${PROJ_HOME} ]; then
-  export PROJ_HOME=/Users/${USER}/dev/hive
-fi
+current_dir=$( cd $( dirname ${BASH_SOURCE[0]} )/../../../../..  pwd )
+export PROJ_HOME=`dirname $current_dir`
 export 
HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
 
 if [ -z ${HADOOP_HOME} ]; then



hive git commit: HIVE-11425 - submitting a query via CLI against... (Eugene Koifman, reviewed by Prasanth Jayachandran)

2015-07-31 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 8c0016aa6 - bc528ba35


HIVE-11425 - submitting a query via CLI against... (Eugene Koifman, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bc528ba3
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bc528ba3
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bc528ba3

Branch: refs/heads/master
Commit: bc528ba35d58af61f4d854003d99af50818f909a
Parents: 8c0016a
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Fri Jul 31 13:20:23 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Fri Jul 31 13:22:14 2015 -0700

--
 ql/pom.xml | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bc528ba3/ql/pom.xml
--
diff --git a/ql/pom.xml b/ql/pom.xml
index 6026c49..fc66591 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -707,6 +707,7 @@
   includeorg.apache.hive.shims:hive-shims-0.23/include
   includeorg.apache.hive.shims:hive-shims-0.23/include
   includeorg.apache.hive.shims:hive-shims-common/include
+  includeorg.apache.hive:hive-storage-api/include
   includecom.googlecode.javaewah:JavaEWAH/include
   includejavolution:javolution/include
   includecom.google.protobuf:protobuf-java/include



hive git commit: HIVE-11087 - DbTxnManager exceptions should include txnid (Eugene Koifman, reviewed by Alan Gates)

2015-08-06 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 8398fbf3d - d9663b586


HIVE-11087 - DbTxnManager exceptions should include txnid (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d9663b58
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d9663b58
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d9663b58

Branch: refs/heads/master
Commit: d9663b5866d5da66ba7bd8ee83d18fa711d67a6c
Parents: 8398fbf
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu Aug 6 11:13:55 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu Aug 6 11:13:55 2015 -0700

--
 .../apache/hadoop/hive/common/JavaUtils.java| 11 +++-
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 25 -
 .../org/apache/hadoop/hive/ql/ErrorMsg.java | 10 ++--
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   | 12 ++---
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java| 37 ++---
 .../hive/ql/lockmgr/TestDbTxnManager.java   | 55 ++--
 .../hive/ql/lockmgr/TestDbTxnManager2.java  |  2 +-
 7 files changed, 105 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d9663b58/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index 3dd8f75..dc3a4ae 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -137,12 +137,19 @@ public final class JavaUtils {
   }
 
   /**
-   * Utility method for ACID to normalize logging info
-   * @param extLockId LockResponse.lockid
+   * Utility method for ACID to normalize logging info.  Matches
+   * {@link org.apache.hadoop.hive.metastore.api.LockRequest#toString()}
*/
   public static String lockIdToString(long extLockId) {
 return lockid: + extLockId;
   }
+  /**
+   * Utility method for ACID to normalize logging info.  Matches
+   * {@link org.apache.hadoop.hive.metastore.api.LockResponse#toString()}
+   */
+  public static String txnIdToString(long txnId) {
+return txnid: + txnId;
+  }
 
   private JavaUtils() {
 // prevent instantiation

http://git-wip-us.apache.org/repos/asf/hive/blob/d9663b58/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index c0e83c6..88e007c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -339,7 +339,7 @@ public class TxnHandler {
 if (abortTxns(dbConn, Collections.singletonList(txnid)) != 1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
-  throw new NoSuchTxnException(No such transaction:  + txnid);
+  throw new NoSuchTxnException(No such transaction  + 
JavaUtils.txnIdToString(txnid));
 }
 
 LOG.debug(Going to commit);
@@ -382,7 +382,7 @@ public class TxnHandler {
 if (stmt.executeUpdate(s)  1) {
   //this can be reasonable for an empty txn START/COMMIT
   LOG.info(Expected to move at least one record from txn_components 
to  +
-completed_txn_components when committing txn! txnid: + txnid);
+completed_txn_components when committing txn!  + 
JavaUtils.txnIdToString(txnid));
 }
 
 // Always access TXN_COMPONENTS before HIVE_LOCKS;
@@ -508,8 +508,8 @@ public class TxnHandler {
   LOG.debug(Going to rollback);
   dbConn.rollback();
   String msg = Unlocking locks associated with transaction +
- not permitted.  Lockid  + extLockId +  is associated with  +
-transaction  + txnid;
+ not permitted.  Lockid  + JavaUtils.lockIdToString(extLockId) + 
 is associated with  +
+transaction  + JavaUtils.txnIdToString(txnid);
   LOG.error(msg);
   throw new TxnOpenException(msg);
 }
@@ -520,7 +520,7 @@ public class TxnHandler {
 if (rc  1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
-  throw new NoSuchLockException(No such lock:  + extLockId);
+  throw new NoSuchLockException(No such lock  + 
JavaUtils.lockIdToString(extLockId));
 }
 LOG.debug(Going to commit);
 dbConn.commit();
@@ -1175,8 +1175,8 @@ public class TxnHandler {
 @Override
 public String toString() 

hive git commit: HIVE-11087 - DbTxnManager exceptions should include txnid (Eugene Koifman, reviewed by Alan Gates)

2015-08-06 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 a59e81c31 - b8418b861


HIVE-11087 - DbTxnManager exceptions should include txnid (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b8418b86
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b8418b86
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b8418b86

Branch: refs/heads/branch-1
Commit: b8418b861250972c1de09236f64f1fef1628edc0
Parents: a59e81c
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Thu Aug 6 11:22:15 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Thu Aug 6 11:22:15 2015 -0700

--
 .../apache/hadoop/hive/common/JavaUtils.java| 11 +++-
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 25 -
 .../org/apache/hadoop/hive/ql/ErrorMsg.java | 10 ++--
 .../hadoop/hive/ql/lockmgr/DbLockManager.java   | 12 ++---
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java| 37 ++---
 .../hive/ql/lockmgr/TestDbTxnManager.java   | 55 ++--
 .../hive/ql/lockmgr/TestDbTxnManager2.java  |  2 +-
 7 files changed, 105 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b8418b86/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java 
b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
index 3dd8f75..dc3a4ae 100644
--- a/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/JavaUtils.java
@@ -137,12 +137,19 @@ public final class JavaUtils {
   }
 
   /**
-   * Utility method for ACID to normalize logging info
-   * @param extLockId LockResponse.lockid
+   * Utility method for ACID to normalize logging info.  Matches
+   * {@link org.apache.hadoop.hive.metastore.api.LockRequest#toString()}
*/
   public static String lockIdToString(long extLockId) {
 return lockid: + extLockId;
   }
+  /**
+   * Utility method for ACID to normalize logging info.  Matches
+   * {@link org.apache.hadoop.hive.metastore.api.LockResponse#toString()}
+   */
+  public static String txnIdToString(long txnId) {
+return txnid: + txnId;
+  }
 
   private JavaUtils() {
 // prevent instantiation

http://git-wip-us.apache.org/repos/asf/hive/blob/b8418b86/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index c0e83c6..88e007c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -339,7 +339,7 @@ public class TxnHandler {
 if (abortTxns(dbConn, Collections.singletonList(txnid)) != 1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
-  throw new NoSuchTxnException(No such transaction:  + txnid);
+  throw new NoSuchTxnException(No such transaction  + 
JavaUtils.txnIdToString(txnid));
 }
 
 LOG.debug(Going to commit);
@@ -382,7 +382,7 @@ public class TxnHandler {
 if (stmt.executeUpdate(s)  1) {
   //this can be reasonable for an empty txn START/COMMIT
   LOG.info(Expected to move at least one record from txn_components 
to  +
-completed_txn_components when committing txn! txnid: + txnid);
+completed_txn_components when committing txn!  + 
JavaUtils.txnIdToString(txnid));
 }
 
 // Always access TXN_COMPONENTS before HIVE_LOCKS;
@@ -508,8 +508,8 @@ public class TxnHandler {
   LOG.debug(Going to rollback);
   dbConn.rollback();
   String msg = Unlocking locks associated with transaction +
- not permitted.  Lockid  + extLockId +  is associated with  +
-transaction  + txnid;
+ not permitted.  Lockid  + JavaUtils.lockIdToString(extLockId) + 
 is associated with  +
+transaction  + JavaUtils.txnIdToString(txnid);
   LOG.error(msg);
   throw new TxnOpenException(msg);
 }
@@ -520,7 +520,7 @@ public class TxnHandler {
 if (rc  1) {
   LOG.debug(Going to rollback);
   dbConn.rollback();
-  throw new NoSuchLockException(No such lock:  + extLockId);
+  throw new NoSuchLockException(No such lock  + 
JavaUtils.lockIdToString(extLockId));
 }
 LOG.debug(Going to commit);
 dbConn.commit();
@@ -1175,8 +1175,8 @@ public class TxnHandler {
 @Override
 public String 

hive git commit: HIVE-11317 - ACID: Improve transaction Abort logic due to timeout (Eugene Koifman, reviewed by Alan Gates)

2015-08-15 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master e8329ee00 - 97a6cd35a


HIVE-11317 - ACID: Improve transaction Abort logic due to timeout (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/97a6cd35
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/97a6cd35
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/97a6cd35

Branch: refs/heads/master
Commit: 97a6cd35a444315885008f11c20c7c28249bd42c
Parents: e8329ee
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Aug 15 10:22:55 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Aug 15 10:22:55 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   4 +
 .../deployers/config/hive/hive-site.mysql.xml   |  22 +++
 .../hive/hcatalog/streaming/TestStreaming.java  |  54 ++-
 .../hadoop/hive/metastore/HiveMetaStore.java|  18 +++
 .../hive/metastore/HouseKeeperService.java  |  39 +
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 153 +--
 .../hive/metastore/txn/TestTxnHandler.java  |   7 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  15 +-
 .../hive/ql/txn/AcidHouseKeeperService.java | 104 +
 .../hive/ql/txn/compactor/CompactorMR.java  |   6 +-
 .../hadoop/hive/ql/txn/compactor/Initiator.java |   1 +
 .../hadoop/hive/ql/txn/compactor/Worker.java|   2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  |  21 +++
 .../apache/hadoop/hive/ql/TestTxnCommands2.java |   1 +
 .../hive/ql/lockmgr/TestDbTxnManager.java   |  35 +++--
 15 files changed, 421 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/97a6cd35/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 730f5be..9a6781b 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1507,6 +1507,10 @@ public class HiveConf extends Configuration {
 
 HIVE_COMPACTOR_CLEANER_RUN_INTERVAL(hive.compactor.cleaner.run.interval, 
5000ms,
 new TimeValidator(TimeUnit.MILLISECONDS), Time between runs of the 
cleaner thread),
+HIVE_TIMEDOUT_TXN_REAPER_START(hive.timedout.txn.reaper.start, 100s,
+  new TimeValidator(TimeUnit.MILLISECONDS), Time delay of 1st reaper run 
after metastore start),
+HIVE_TIMEDOUT_TXN_REAPER_INTERVAL(hive.timedout.txn.reaper.interval, 
180s,
+  new TimeValidator(TimeUnit.MILLISECONDS), Time interval describing how 
often the reaper runs),
 
 // For HBase storage handler
 HIVE_HBASE_WAL_ENABLED(hive.hbase.wal.enabled, true,

http://git-wip-us.apache.org/repos/asf/hive/blob/97a6cd35/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
--
diff --git 
a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml 
b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
index 70ccc31..b6f1ab7 100644
--- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
@@ -62,6 +62,28 @@
 namehive.exec.dynamic.partition.mode/name
 valuenonstrict/value
 /property
+property
+namehive.compactor.initiator.on/name
+valuefalse/value
+/property
+property
+namehive.compactor.worker.threads/name
+value2/value
+/property
+property
+namehive.timedout.txn.reaper.start/name
+value2s/value
+/property
+!--property
+namehive.txn.timeout/name
+value60s/value
+/property
+--
+property
+namehive.timedout.txn.reaper.interval/name
+value30s/value
+/property
+
 !--end ACID related properties--
 !--
 property

http://git-wip-us.apache.org/repos/asf/hive/blob/97a6cd35/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
--
diff --git 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index c0af533..c28d4aa 100644
--- 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -18,7 +18,6 @@
 
 package org.apache.hive.hcatalog.streaming;
 
-import junit.framework.Assert;
 import org.apache.hadoop.fs.FileStatus;
 import 

hive git commit: HIVE-11317 - ACID: Improve transaction Abort logic due to timeout (Eugene Koifman, reviewed by Alan Gates)

2015-08-15 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 a6e7cfa9e - 738709117


HIVE-11317 - ACID: Improve transaction Abort logic due to timeout (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/73870911
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/73870911
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/73870911

Branch: refs/heads/branch-1
Commit: 738709117f046744730648c4c6df6c2af0465969
Parents: a6e7cfa
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Sat Aug 15 10:36:47 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Sat Aug 15 10:36:47 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   4 +
 .../deployers/config/hive/hive-site.mysql.xml   |  22 +++
 .../hive/hcatalog/streaming/TestStreaming.java  |  54 ++-
 .../hadoop/hive/metastore/HiveMetaStore.java|  18 +++
 .../hive/metastore/HouseKeeperService.java  |  39 +
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 153 +--
 .../hive/metastore/txn/TestTxnHandler.java  |   7 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  15 +-
 .../hive/ql/txn/AcidHouseKeeperService.java | 104 +
 .../hive/ql/txn/compactor/CompactorMR.java  |   6 +-
 .../hadoop/hive/ql/txn/compactor/Initiator.java |   1 +
 .../hadoop/hive/ql/txn/compactor/Worker.java|   2 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  |  21 +++
 .../apache/hadoop/hive/ql/TestTxnCommands2.java |   1 +
 .../hive/ql/lockmgr/TestDbTxnManager.java   |  35 +++--
 15 files changed, 421 insertions(+), 61 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/73870911/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 95aaf55..acc72c8 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1507,6 +1507,10 @@ public class HiveConf extends Configuration {
 
 HIVE_COMPACTOR_CLEANER_RUN_INTERVAL(hive.compactor.cleaner.run.interval, 
5000ms,
 new TimeValidator(TimeUnit.MILLISECONDS), Time between runs of the 
cleaner thread),
+HIVE_TIMEDOUT_TXN_REAPER_START(hive.timedout.txn.reaper.start, 100s,
+  new TimeValidator(TimeUnit.MILLISECONDS), Time delay of 1st reaper run 
after metastore start),
+HIVE_TIMEDOUT_TXN_REAPER_INTERVAL(hive.timedout.txn.reaper.interval, 
180s,
+  new TimeValidator(TimeUnit.MILLISECONDS), Time interval describing how 
often the reaper runs),
 
 // For HBase storage handler
 HIVE_HBASE_WAL_ENABLED(hive.hbase.wal.enabled, true,

http://git-wip-us.apache.org/repos/asf/hive/blob/73870911/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
--
diff --git 
a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml 
b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
index 70ccc31..b6f1ab7 100644
--- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mysql.xml
@@ -62,6 +62,28 @@
 namehive.exec.dynamic.partition.mode/name
 valuenonstrict/value
 /property
+property
+namehive.compactor.initiator.on/name
+valuefalse/value
+/property
+property
+namehive.compactor.worker.threads/name
+value2/value
+/property
+property
+namehive.timedout.txn.reaper.start/name
+value2s/value
+/property
+!--property
+namehive.txn.timeout/name
+value60s/value
+/property
+--
+property
+namehive.timedout.txn.reaper.interval/name
+value30s/value
+/property
+
 !--end ACID related properties--
 !--
 property

http://git-wip-us.apache.org/repos/asf/hive/blob/73870911/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
--
diff --git 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
index c0af533..c28d4aa 100644
--- 
a/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
+++ 
b/hcatalog/streaming/src/test/org/apache/hive/hcatalog/streaming/TestStreaming.java
@@ -18,7 +18,6 @@
 
 package org.apache.hive.hcatalog.streaming;
 
-import junit.framework.Assert;
 import org.apache.hadoop.fs.FileStatus;
 

[2/2] hive git commit: HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, reviewed by Alan Gates)

2015-07-22 Thread ekoifman
HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e57c3602
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e57c3602
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e57c3602

Branch: refs/heads/master
Commit: e57c3602b831340519d5d004cf4119da2f3e7ef8
Parents: 2240dbd
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed Jul 22 12:44:40 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed Jul 22 12:44:40 2015 -0700

--
 .../hadoop/hive/cli/TestOptionsProcessor.java   |   1 -
 .../hadoop/hive/common/ValidReadTxnList.java|   2 +-
 .../hadoop/hive/metastore/txn/TxnHandler.java   |   9 +-
 .../metastore/txn/ValidCompactorTxnList.java|   2 +-
 .../java/org/apache/hadoop/hive/ql/Context.java |   1 -
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 196 +---
 .../org/apache/hadoop/hive/ql/ErrorMsg.java |   2 +-
 .../org/apache/hadoop/hive/ql/QueryPlan.java|  18 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|   2 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  36 +-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |   8 +
 .../hadoop/hive/ql/lockmgr/HiveTxnManager.java  |  21 +
 .../hive/ql/lockmgr/HiveTxnManagerImpl.java |  10 +
 .../hadoop/hive/ql/lockmgr/LockException.java   |   8 +-
 .../hadoop/hive/ql/metadata/HiveException.java  |   3 +
 .../hive/ql/parse/BaseSemanticAnalyzer.java |  13 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |  11 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|  70 +++
 .../hadoop/hive/ql/parse/IdentifiersParser.g|  19 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  25 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |  12 +
 .../hadoop/hive/ql/plan/HiveOperation.java  |  32 +-
 .../ql/processors/CommandProcessorResponse.java |  21 +-
 .../hadoop/hive/ql/processors/HiveCommand.java  |   3 +
 .../authorization/plugin/HiveOperationType.java |   5 +
 .../plugin/sqlstd/Operation2Privilege.java  |  11 +
 .../hadoop/hive/ql/session/SessionState.java|  34 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  | 473 +++
 .../positive/TestTransactionStatement.java  | 102 
 .../hive/ql/session/TestSessionState.java   |   2 +-
 .../clientnegative/exchange_partition.q.out |   2 +-
 .../clientpositive/exchange_partition.q.out |   4 +-
 .../clientpositive/exchange_partition2.q.out|   4 +-
 .../clientpositive/exchange_partition3.q.out|   4 +-
 34 files changed, 1020 insertions(+), 146 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e57c3602/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
--
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java 
b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
index 9d0399a..ac22ab1 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
@@ -56,7 +56,6 @@ public class TestOptionsProcessor {
 assertEquals(execString, sessionState.execString);
 assertEquals(0, sessionState.initFiles.size());
 assertTrue(sessionState.getIsVerbose());
-sessionState.setConf(null);
 assertTrue(sessionState.getIsSilent());
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/e57c3602/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java 
b/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
index 479e0df..fda242d 100644
--- a/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
+++ b/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
@@ -105,7 +105,7 @@ public class ValidReadTxnList implements ValidTxnList {
 
   @Override
   public void readFromString(String src) {
-if (src == null) {
+if (src == null || src.length() == 0) {
   highWatermark = Long.MAX_VALUE;
   exceptions = new long[0];
 } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/e57c3602/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index fd9c275..c0e83c6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java

[1/2] hive git commit: HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, reviewed by Alan Gates)

2015-07-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 2240dbd6d - e57c3602b


http://git-wip-us.apache.org/repos/asf/hive/blob/e57c3602/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
new file mode 100644
index 000..b7f8263
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.positive;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Basic parser tests for multi-statement transactions
+ */
+public class TestTransactionStatement {
+  private static SessionState sessionState;
+  private ParseDriver pd;
+
+  @BeforeClass
+  public static void initialize() {
+HiveConf conf = new HiveConf(SemanticAnalyzer.class);
+sessionState = SessionState.start(conf);
+  }
+  @AfterClass
+  public static void cleanUp() throws IOException {
+if(sessionState != null) {
+  sessionState.close();
+}
+  }
+
+  @Before
+  public void setup() throws SemanticException {
+pd = new ParseDriver();
+  }
+
+  ASTNode parse(String query) throws ParseException {
+ASTNode nd = pd.parse(query);
+return (ASTNode) nd.getChild(0);
+  }
+  @Test
+  public void testTxnStart() throws ParseException {
+ASTNode ast = parse(START TRANSACTION);
+Assert.assertEquals(AST doesn't match,
+  TOK_START_TRANSACTION, ast.toStringTree());
+
+ast = parse(START TRANSACTION ISOLATION LEVEL SNAPSHOT);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_ISOLATION_LEVEL TOK_ISOLATION_SNAPSHOT)), 
ast.toStringTree());
+
+ast = parse(START TRANSACTION READ ONLY);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_TXN_ACCESS_MODE TOK_TXN_READ_ONLY)), 
ast.toStringTree());
+
+ast = parse(START TRANSACTION READ WRITE, ISOLATION LEVEL SNAPSHOT);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_TXN_ACCESS_MODE TOK_TXN_READ_WRITE) 
(TOK_ISOLATION_LEVEL TOK_ISOLATION_SNAPSHOT)), ast.toStringTree());
+
+  }
+  @Test
+  public void testTxnCommitRollback() throws ParseException {
+ASTNode ast = parse(COMMIT);
+Assert.assertEquals(AST doesn't match, TOK_COMMIT, ast.toStringTree());
+ast = parse(COMMIT WORK);
+Assert.assertEquals(AST doesn't match, TOK_COMMIT, ast.toStringTree());
+ast = parse(ROLLBACK);
+Assert.assertEquals(AST doesn't match, TOK_ROLLBACK, 
ast.toStringTree());
+ast = parse(ROLLBACK WORK);
+Assert.assertEquals(AST doesn't match, TOK_ROLLBACK, 
ast.toStringTree());
+  }
+  
+  @Test
+  public void testAutoCommit() throws ParseException {
+ASTNode ast = parse(SET AUTOCOMMIT TRUE);
+Assert.assertEquals(AST doesn't match, (TOK_SET_AUTOCOMMIT TOK_TRUE), 
ast.toStringTree());
+ast = parse(SET AUTOCOMMIT FALSE);
+Assert.assertEquals(AST doesn't match, (TOK_SET_AUTOCOMMIT TOK_FALSE), 
ast.toStringTree());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/e57c3602/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java 
b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
index 9e16c0c..70985b3 100644
--- 

[2/2] hive git commit: HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, reviewed by Alan Gates)

2015-07-22 Thread ekoifman
HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/012c99ff
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/012c99ff
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/012c99ff

Branch: refs/heads/branch-1
Commit: 012c99ff22f3f6978bd4f520716cb6d26ab1138a
Parents: 8e8e391
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Wed Jul 22 12:55:09 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Wed Jul 22 12:55:09 2015 -0700

--
 .../hadoop/hive/cli/TestOptionsProcessor.java   |   1 -
 .../hadoop/hive/common/ValidReadTxnList.java|   2 +-
 .../hadoop/hive/metastore/txn/TxnHandler.java   |   9 +-
 .../metastore/txn/ValidCompactorTxnList.java|   2 +-
 .../java/org/apache/hadoop/hive/ql/Context.java |   1 -
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 196 +---
 .../org/apache/hadoop/hive/ql/ErrorMsg.java |   2 +-
 .../org/apache/hadoop/hive/ql/QueryPlan.java|  18 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|   2 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  36 +-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |   8 +
 .../hadoop/hive/ql/lockmgr/HiveTxnManager.java  |  21 +
 .../hive/ql/lockmgr/HiveTxnManagerImpl.java |  10 +
 .../hadoop/hive/ql/lockmgr/LockException.java   |   8 +-
 .../hadoop/hive/ql/metadata/HiveException.java  |   3 +
 .../hive/ql/parse/BaseSemanticAnalyzer.java |  13 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |  11 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|  70 +++
 .../hadoop/hive/ql/parse/IdentifiersParser.g|  19 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  25 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java  |  12 +
 .../hadoop/hive/ql/plan/HiveOperation.java  |  32 +-
 .../ql/processors/CommandProcessorResponse.java |  21 +-
 .../hadoop/hive/ql/processors/HiveCommand.java  |   3 +
 .../authorization/plugin/HiveOperationType.java |   5 +
 .../plugin/sqlstd/Operation2Privilege.java  |  11 +
 .../hadoop/hive/ql/session/SessionState.java|  34 +-
 .../apache/hadoop/hive/ql/TestTxnCommands.java  | 473 +++
 .../positive/TestTransactionStatement.java  | 102 
 .../hive/ql/session/TestSessionState.java   |   2 +-
 .../clientnegative/exchange_partition.q.out |   2 +-
 .../clientpositive/exchange_partition.q.out |   4 +-
 .../clientpositive/exchange_partition2.q.out|   4 +-
 .../clientpositive/exchange_partition3.q.out|   4 +-
 34 files changed, 1020 insertions(+), 146 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/012c99ff/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
--
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java 
b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
index 9d0399a..ac22ab1 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestOptionsProcessor.java
@@ -56,7 +56,6 @@ public class TestOptionsProcessor {
 assertEquals(execString, sessionState.execString);
 assertEquals(0, sessionState.initFiles.size());
 assertTrue(sessionState.getIsVerbose());
-sessionState.setConf(null);
 assertTrue(sessionState.getIsSilent());
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/012c99ff/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java 
b/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
index 479e0df..fda242d 100644
--- a/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
+++ b/common/src/java/org/apache/hadoop/hive/common/ValidReadTxnList.java
@@ -105,7 +105,7 @@ public class ValidReadTxnList implements ValidTxnList {
 
   @Override
   public void readFromString(String src) {
-if (src == null) {
+if (src == null || src.length() == 0) {
   highWatermark = Long.MAX_VALUE;
   exceptions = new long[0];
 } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/012c99ff/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index fd9c275..c0e83c6 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ 

[1/2] hive git commit: HIVE-11077 Add support in parser and wire up to txn manager (Eugene Koifman, reviewed by Alan Gates)

2015-07-22 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 8e8e39152 - 012c99ff2


http://git-wip-us.apache.org/repos/asf/hive/blob/012c99ff/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
new file mode 100644
index 000..b7f8263
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/positive/TestTransactionStatement.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.parse.positive;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.parse.SemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.io.File;
+import java.io.IOException;
+
+/**
+ * Basic parser tests for multi-statement transactions
+ */
+public class TestTransactionStatement {
+  private static SessionState sessionState;
+  private ParseDriver pd;
+
+  @BeforeClass
+  public static void initialize() {
+HiveConf conf = new HiveConf(SemanticAnalyzer.class);
+sessionState = SessionState.start(conf);
+  }
+  @AfterClass
+  public static void cleanUp() throws IOException {
+if(sessionState != null) {
+  sessionState.close();
+}
+  }
+
+  @Before
+  public void setup() throws SemanticException {
+pd = new ParseDriver();
+  }
+
+  ASTNode parse(String query) throws ParseException {
+ASTNode nd = pd.parse(query);
+return (ASTNode) nd.getChild(0);
+  }
+  @Test
+  public void testTxnStart() throws ParseException {
+ASTNode ast = parse(START TRANSACTION);
+Assert.assertEquals(AST doesn't match,
+  TOK_START_TRANSACTION, ast.toStringTree());
+
+ast = parse(START TRANSACTION ISOLATION LEVEL SNAPSHOT);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_ISOLATION_LEVEL TOK_ISOLATION_SNAPSHOT)), 
ast.toStringTree());
+
+ast = parse(START TRANSACTION READ ONLY);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_TXN_ACCESS_MODE TOK_TXN_READ_ONLY)), 
ast.toStringTree());
+
+ast = parse(START TRANSACTION READ WRITE, ISOLATION LEVEL SNAPSHOT);
+Assert.assertEquals(AST doesn't match,
+  (TOK_START_TRANSACTION (TOK_TXN_ACCESS_MODE TOK_TXN_READ_WRITE) 
(TOK_ISOLATION_LEVEL TOK_ISOLATION_SNAPSHOT)), ast.toStringTree());
+
+  }
+  @Test
+  public void testTxnCommitRollback() throws ParseException {
+ASTNode ast = parse(COMMIT);
+Assert.assertEquals(AST doesn't match, TOK_COMMIT, ast.toStringTree());
+ast = parse(COMMIT WORK);
+Assert.assertEquals(AST doesn't match, TOK_COMMIT, ast.toStringTree());
+ast = parse(ROLLBACK);
+Assert.assertEquals(AST doesn't match, TOK_ROLLBACK, 
ast.toStringTree());
+ast = parse(ROLLBACK WORK);
+Assert.assertEquals(AST doesn't match, TOK_ROLLBACK, 
ast.toStringTree());
+  }
+  
+  @Test
+  public void testAutoCommit() throws ParseException {
+ASTNode ast = parse(SET AUTOCOMMIT TRUE);
+Assert.assertEquals(AST doesn't match, (TOK_SET_AUTOCOMMIT TOK_TRUE), 
ast.toStringTree());
+ast = parse(SET AUTOCOMMIT FALSE);
+Assert.assertEquals(AST doesn't match, (TOK_SET_AUTOCOMMIT TOK_FALSE), 
ast.toStringTree());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/012c99ff/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java 
b/ql/src/test/org/apache/hadoop/hive/ql/session/TestSessionState.java
index 45ad22a..0bb8ebd 100644
--- 

hive git commit: HIVE-11320 ACID enable predicate pushdown for insert-only delta file (Eugene Koifman, reviewed by Alan Gates)

2015-07-21 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 684d0e5e1 - 77aefd6c8


HIVE-11320 ACID enable predicate pushdown for insert-only delta file (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/77aefd6c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/77aefd6c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/77aefd6c

Branch: refs/heads/branch-1
Commit: 77aefd6c8d0a59bdc20a3ba74ccec1e955888fcb
Parents: 684d0e5
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Jul 21 11:57:03 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Jul 21 11:57:03 2015 -0700

--
 .../hive/ql/io/orc/OrcRawRecordMerger.java  | 20 --
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 68 ++--
 2 files changed, 75 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/77aefd6c/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index 2f11611..58b85ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -478,10 +478,6 @@ public class OrcRawRecordMerger implements 
AcidInputFormat.RawReaderOrcStruct{
 
 // we always want to read all of the deltas
 eventOptions.range(0, Long.MAX_VALUE);
-// Turn off the sarg before pushing it to delta.  We never want to push a 
sarg to a delta as
-// it can produce wrong results (if the latest valid version of the record 
is filtered out by
-// the sarg) or ArrayOutOfBounds errors (when the sarg is applied to a 
delete record)
-eventOptions.searchArgument(null, null);
 if (deltaDirectory != null) {
   for(Path delta: deltaDirectory) {
 ReaderKey key = new ReaderKey();
@@ -492,8 +488,20 @@ public class OrcRawRecordMerger implements 
AcidInputFormat.RawReaderOrcStruct{
 if (length != -1  fs.exists(deltaFile)) {
   Reader deltaReader = OrcFile.createReader(deltaFile,
   OrcFile.readerOptions(conf).maxLength(length));
-  ReaderPair deltaPair = new ReaderPair(key, deltaReader, bucket, 
minKey,
-maxKey, eventOptions, deltaDir.getStatementId());
+  Reader.Options deltaEventOptions = null;
+  if(eventOptions.getSearchArgument() != null) {
+// Turn off the sarg before pushing it to delta.  We never want to 
push a sarg to a delta as
+// it can produce wrong results (if the latest valid version of 
the record is filtered out by
+// the sarg) or ArrayOutOfBounds errors (when the sarg is applied 
to a delete record)
+// unless the delta only has insert events
+OrcRecordUpdater.AcidStats acidStats = 
OrcRecordUpdater.parseAcidStats(deltaReader);
+if(acidStats.deletes  0 || acidStats.updates  0) {
+  deltaEventOptions = eventOptions.clone().searchArgument(null, 
null);
+}
+  }
+  ReaderPair deltaPair;
+  deltaPair = new ReaderPair(key, deltaReader, bucket, minKey,
+maxKey, deltaEventOptions != null ? deltaEventOptions : 
eventOptions, deltaDir.getStatementId());
   if (deltaPair.nextRecord != null) {
 readers.put(key, deltaPair);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/77aefd6c/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index 33ca998..57e4fb9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hive.ql;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.hadoop.hive.ql.io.AcidUtils;
-import org.apache.hadoop.hive.ql.io.orc.FileDump;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.txn.compactor.Worker;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -36,13 +34,11 @@ import org.junit.Test;
 import org.junit.rules.TestName;
 
 import java.io.File;
-import 

hive git commit: HIVE-11320 ACID enable predicate pushdown for insert-only delta file (Eugene Koifman, reviewed by Alan Gates)

2015-07-21 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 6ec72de79 - 990416249


HIVE-11320 ACID enable predicate pushdown for insert-only delta file (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/99041624
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/99041624
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/99041624

Branch: refs/heads/master
Commit: 990416249833e722ca8a32dd9dd425883da0caaf
Parents: 6ec72de
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Tue Jul 21 11:42:14 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Tue Jul 21 11:42:14 2015 -0700

--
 .../hive/ql/io/orc/OrcRawRecordMerger.java  | 20 --
 .../apache/hadoop/hive/ql/TestTxnCommands2.java | 68 ++--
 2 files changed, 75 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/99041624/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
index 2f11611..58b85ef 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRawRecordMerger.java
@@ -478,10 +478,6 @@ public class OrcRawRecordMerger implements 
AcidInputFormat.RawReaderOrcStruct{
 
 // we always want to read all of the deltas
 eventOptions.range(0, Long.MAX_VALUE);
-// Turn off the sarg before pushing it to delta.  We never want to push a 
sarg to a delta as
-// it can produce wrong results (if the latest valid version of the record 
is filtered out by
-// the sarg) or ArrayOutOfBounds errors (when the sarg is applied to a 
delete record)
-eventOptions.searchArgument(null, null);
 if (deltaDirectory != null) {
   for(Path delta: deltaDirectory) {
 ReaderKey key = new ReaderKey();
@@ -492,8 +488,20 @@ public class OrcRawRecordMerger implements 
AcidInputFormat.RawReaderOrcStruct{
 if (length != -1  fs.exists(deltaFile)) {
   Reader deltaReader = OrcFile.createReader(deltaFile,
   OrcFile.readerOptions(conf).maxLength(length));
-  ReaderPair deltaPair = new ReaderPair(key, deltaReader, bucket, 
minKey,
-maxKey, eventOptions, deltaDir.getStatementId());
+  Reader.Options deltaEventOptions = null;
+  if(eventOptions.getSearchArgument() != null) {
+// Turn off the sarg before pushing it to delta.  We never want to 
push a sarg to a delta as
+// it can produce wrong results (if the latest valid version of 
the record is filtered out by
+// the sarg) or ArrayOutOfBounds errors (when the sarg is applied 
to a delete record)
+// unless the delta only has insert events
+OrcRecordUpdater.AcidStats acidStats = 
OrcRecordUpdater.parseAcidStats(deltaReader);
+if(acidStats.deletes  0 || acidStats.updates  0) {
+  deltaEventOptions = eventOptions.clone().searchArgument(null, 
null);
+}
+  }
+  ReaderPair deltaPair;
+  deltaPair = new ReaderPair(key, deltaReader, bucket, minKey,
+maxKey, deltaEventOptions != null ? deltaEventOptions : 
eventOptions, deltaDir.getStatementId());
   if (deltaPair.nextRecord != null) {
 readers.put(key, deltaPair);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/99041624/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java 
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
index 33ca998..57e4fb9 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands2.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.hive.ql;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.apache.hadoop.hive.ql.io.AcidUtils;
-import org.apache.hadoop.hive.ql.io.orc.FileDump;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.txn.compactor.Worker;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -36,13 +34,11 @@ import org.junit.Test;
 import org.junit.rules.TestName;
 
 import java.io.File;
-import 

[2/2] hive git commit: HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene Koifman, reviewed by Alan Gates)

2015-07-13 Thread ekoifman
HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/66feedc5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/66feedc5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/66feedc5

Branch: refs/heads/master
Commit: 66feedc5569de959a383e0a58d9e8768bbad0e2c
Parents: 5c94bda
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Mon Jul 13 09:11:28 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Mon Jul 13 09:11:28 2015 -0700

--
 .../streaming/AbstractRecordWriter.java |   4 +-
 .../streaming/mutate/worker/MutatorImpl.java|   4 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |   1 +
 .../hadoop/hive/ql/io/AcidInputFormat.java  |  60 +++-
 .../hadoop/hive/ql/io/AcidOutputFormat.java |  49 +-
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java | 152 +++
 .../hadoop/hive/ql/io/HiveFileFormatUtils.java  |  19 +--
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |  20 +--
 .../hadoop/hive/ql/io/orc/OrcNewSplit.java  |  13 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java  |  66 ++--
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |  58 +++
 .../apache/hadoop/hive/ql/io/orc/OrcSplit.java  |  16 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  20 ++-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |   4 +
 .../hadoop/hive/ql/lockmgr/HiveTxnManager.java  |   3 +
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   3 +-
 .../hadoop/hive/ql/plan/FileSinkDesc.java   |  27 +++-
 .../hive/ql/txn/compactor/CompactorMR.java  |   4 +-
 .../hive/ql/exec/TestFileSinkOperator.java  |   3 +-
 .../apache/hadoop/hive/ql/io/TestAcidUtils.java |  73 -
 .../hive/ql/io/orc/TestInputOutputFormat.java   |  13 +-
 .../hive/ql/io/orc/TestOrcRawRecordMerger.java  |  57 ---
 .../hive/ql/io/orc/TestOrcRecordUpdater.java|   6 +-
 .../hive/ql/txn/compactor/CompactorTest.java|  20 ++-
 .../hive/ql/txn/compactor/TestCleaner.java  |   8 +-
 .../hive/ql/txn/compactor/TestCleaner2.java |  14 ++
 .../hive/ql/txn/compactor/TestInitiator.java|   4 +
 .../hive/ql/txn/compactor/TestWorker.java   |  49 +++---
 .../hive/ql/txn/compactor/TestWorker2.java  |  16 ++
 29 files changed, 645 insertions(+), 141 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/66feedc5/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
index ed46bca..c959222 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
@@ -143,7 +143,9 @@ abstract class AbstractRecordWriter implements RecordWriter 
{
   .inspector(getSerde().getObjectInspector())
   .bucket(bucketId)
   .minimumTransactionId(minTxnId)
-  .maximumTransactionId(maxTxnID));
+  .maximumTransactionId(maxTxnID)
+  .statementId(-1)
+  .finalDestination(partitionPath));
 } catch (SerDeException e) {
   throw new SerializationError(Failed to get object inspector from Serde 
   + getSerde().getClass().getName(), e);

http://git-wip-us.apache.org/repos/asf/hive/blob/66feedc5/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
index 0fe41d5..52062f8 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/worker/MutatorImpl.java
@@ -78,7 +78,9 @@ public class MutatorImpl implements Mutator {
 .bucket(bucketId)
 .minimumTransactionId(transactionId)
 .maximumTransactionId(transactionId)
-.recordIdColumn(recordIdColumn));
+.recordIdColumn(recordIdColumn)
+.finalDestination(partitionPath)
+.statementId(-1));
   }
 
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/66feedc5/ql/src/java/org/apache/hadoop/hive/ql/Driver.java

[1/2] hive git commit: HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene Koifman, reviewed by Alan Gates)

2015-07-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 5c94bda99 - 66feedc55


http://git-wip-us.apache.org/repos/asf/hive/blob/66feedc5/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java 
b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
index bebac54..11e5333 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
@@ -281,7 +281,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_021_024)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 
24))) {
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -296,6 +296,10 @@ public class TestWorker extends CompactorTest {
 Assert.assertTrue(sawNewDelta);
   }
 
+  /**
+   * todo: fix https://issues.apache.org/jira/browse/HIVE-9995
+   * @throws Exception
+   */
   @Test
   public void minorWithOpenInMiddle() throws Exception {
 LOG.debug(Starting minorWithOpenInMiddle);
@@ -321,15 +325,18 @@ public class TestWorker extends CompactorTest {
 // There should still now be 5 directories in the location
 FileSystem fs = FileSystem.get(conf);
 FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
-Assert.assertEquals(5, stat.length);
+boolean is130 = this instanceof TestWorker2;
+Assert.assertEquals(is130 ? 5 : 4, stat.length);
 
 // Find the new delta file and make sure it has the right contents
 Arrays.sort(stat);
 Assert.assertEquals(base_20, stat[0].getPath().getName());
-Assert.assertEquals(delta_021_022, stat[1].getPath().getName());
-Assert.assertEquals(delta_21_22, stat[2].getPath().getName());
-Assert.assertEquals(delta_23_25, stat[3].getPath().getName());
-Assert.assertEquals(delta_26_27, stat[4].getPath().getName());
+if(is130) {//in1.3.0 orig delta is delta_00021_00022_ and compacted 
one is delta_00021_00022...
+  Assert.assertEquals(makeDeltaDirNameCompacted(21, 22), 
stat[1].getPath().getName());
+}
+Assert.assertEquals(makeDeltaDirName(21, 22), stat[1 + (is130 ? 1 : 
0)].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(23, 25), stat[2 + (is130 ? 1 : 
0)].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(26, 27), stat[3 + (is130 ? 1 : 
0)].getPath().getName());
   }
 
   @Test
@@ -362,10 +369,10 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 Arrays.sort(stat);
 Assert.assertEquals(base_20, stat[0].getPath().getName());
-Assert.assertEquals(delta_021_027, stat[1].getPath().getName());
-Assert.assertEquals(delta_21_22, stat[2].getPath().getName());
-Assert.assertEquals(delta_23_25, stat[3].getPath().getName());
-Assert.assertEquals(delta_26_27, stat[4].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(21, 22), stat[1].getPath().getName());
+Assert.assertEquals(makeDeltaDirNameCompacted(21, 27), 
stat[2].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName());
   }
 
   @Test
@@ -398,7 +405,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_021_024)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 
24))) {
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -441,7 +448,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_001_004)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(1, 4))) 
{
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -661,7 +668,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if 

[2/2] hive git commit: HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene Koifman, reviewed by Alan Gates)

2015-07-13 Thread ekoifman
HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene 
Koifman, reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c30ab468
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c30ab468
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c30ab468

Branch: refs/heads/branch-1
Commit: c30ab4686cbfe73c3cf4552fa7e07c8ded3b4b17
Parents: 16d1b74
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Mon Jul 13 09:31:17 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Mon Jul 13 09:31:17 2015 -0700

--
 .../streaming/AbstractRecordWriter.java |   4 +-
 .../java/org/apache/hadoop/hive/ql/Driver.java  |   1 +
 .../hadoop/hive/ql/io/AcidInputFormat.java  |  60 +++-
 .../hadoop/hive/ql/io/AcidOutputFormat.java |  49 +-
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java | 152 +++
 .../hadoop/hive/ql/io/HiveFileFormatUtils.java  |  19 +--
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |  20 +--
 .../hadoop/hive/ql/io/orc/OrcNewSplit.java  |  13 +-
 .../hive/ql/io/orc/OrcRawRecordMerger.java  |  66 ++--
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |  58 +++
 .../apache/hadoop/hive/ql/io/orc/OrcSplit.java  |  16 +-
 .../hadoop/hive/ql/lockmgr/DbTxnManager.java|  20 ++-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |   4 +
 .../hadoop/hive/ql/lockmgr/HiveTxnManager.java  |   3 +
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   3 +-
 .../hadoop/hive/ql/plan/FileSinkDesc.java   |  27 +++-
 .../hive/ql/txn/compactor/CompactorMR.java  |   4 +-
 .../hive/ql/exec/TestFileSinkOperator.java  |   3 +-
 .../apache/hadoop/hive/ql/io/TestAcidUtils.java |  73 -
 .../hive/ql/io/orc/TestInputOutputFormat.java   |  13 +-
 .../hive/ql/io/orc/TestOrcRawRecordMerger.java  |  57 ---
 .../hive/ql/io/orc/TestOrcRecordUpdater.java|   6 +-
 .../hive/ql/txn/compactor/CompactorTest.java|  20 ++-
 .../hive/ql/txn/compactor/TestCleaner.java  |   8 +-
 .../hive/ql/txn/compactor/TestCleaner2.java |  14 ++
 .../hive/ql/txn/compactor/TestInitiator.java|   4 +
 .../hive/ql/txn/compactor/TestWorker.java   |  49 +++---
 .../hive/ql/txn/compactor/TestWorker2.java  |  16 ++
 28 files changed, 642 insertions(+), 140 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c30ab468/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
index ed46bca..c959222 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
@@ -143,7 +143,9 @@ abstract class AbstractRecordWriter implements RecordWriter 
{
   .inspector(getSerde().getObjectInspector())
   .bucket(bucketId)
   .minimumTransactionId(minTxnId)
-  .maximumTransactionId(maxTxnID));
+  .maximumTransactionId(maxTxnID)
+  .statementId(-1)
+  .finalDestination(partitionPath));
 } catch (SerDeException e) {
   throw new SerializationError(Failed to get object inspector from Serde 
   + getSerde().getClass().getName(), e);

http://git-wip-us.apache.org/repos/asf/hive/blob/c30ab468/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index e04165b..d161503 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -986,6 +986,7 @@ public class Driver implements CommandProcessor {
 if (acidSinks != null) {
   for (FileSinkDesc desc : acidSinks) {
 desc.setTransactionId(txnId);
+desc.setStatementId(txnMgr.getStatementId());
   }
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c30ab468/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
index e1d2395..24506b7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
+++ 

[1/2] hive git commit: HIVE-11030 - Enhance storage layer to create one delta file per write (Eugene Koifman, reviewed by Alan Gates)

2015-07-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 16d1b7459 - c30ab4686


http://git-wip-us.apache.org/repos/asf/hive/blob/c30ab468/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java 
b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
index bebac54..11e5333 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestWorker.java
@@ -281,7 +281,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_021_024)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 
24))) {
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -296,6 +296,10 @@ public class TestWorker extends CompactorTest {
 Assert.assertTrue(sawNewDelta);
   }
 
+  /**
+   * todo: fix https://issues.apache.org/jira/browse/HIVE-9995
+   * @throws Exception
+   */
   @Test
   public void minorWithOpenInMiddle() throws Exception {
 LOG.debug(Starting minorWithOpenInMiddle);
@@ -321,15 +325,18 @@ public class TestWorker extends CompactorTest {
 // There should still now be 5 directories in the location
 FileSystem fs = FileSystem.get(conf);
 FileStatus[] stat = fs.listStatus(new Path(t.getSd().getLocation()));
-Assert.assertEquals(5, stat.length);
+boolean is130 = this instanceof TestWorker2;
+Assert.assertEquals(is130 ? 5 : 4, stat.length);
 
 // Find the new delta file and make sure it has the right contents
 Arrays.sort(stat);
 Assert.assertEquals(base_20, stat[0].getPath().getName());
-Assert.assertEquals(delta_021_022, stat[1].getPath().getName());
-Assert.assertEquals(delta_21_22, stat[2].getPath().getName());
-Assert.assertEquals(delta_23_25, stat[3].getPath().getName());
-Assert.assertEquals(delta_26_27, stat[4].getPath().getName());
+if(is130) {//in1.3.0 orig delta is delta_00021_00022_ and compacted 
one is delta_00021_00022...
+  Assert.assertEquals(makeDeltaDirNameCompacted(21, 22), 
stat[1].getPath().getName());
+}
+Assert.assertEquals(makeDeltaDirName(21, 22), stat[1 + (is130 ? 1 : 
0)].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(23, 25), stat[2 + (is130 ? 1 : 
0)].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(26, 27), stat[3 + (is130 ? 1 : 
0)].getPath().getName());
   }
 
   @Test
@@ -362,10 +369,10 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 Arrays.sort(stat);
 Assert.assertEquals(base_20, stat[0].getPath().getName());
-Assert.assertEquals(delta_021_027, stat[1].getPath().getName());
-Assert.assertEquals(delta_21_22, stat[2].getPath().getName());
-Assert.assertEquals(delta_23_25, stat[3].getPath().getName());
-Assert.assertEquals(delta_26_27, stat[4].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(21, 22), stat[1].getPath().getName());
+Assert.assertEquals(makeDeltaDirNameCompacted(21, 27), 
stat[2].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(23, 25), stat[3].getPath().getName());
+Assert.assertEquals(makeDeltaDirName(26, 27), stat[4].getPath().getName());
   }
 
   @Test
@@ -398,7 +405,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_021_024)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(21, 
24))) {
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -441,7 +448,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if (stat[i].getPath().getName().equals(delta_001_004)) {
+  if (stat[i].getPath().getName().equals(makeDeltaDirNameCompacted(1, 4))) 
{
 sawNewDelta = true;
 FileStatus[] buckets = fs.listStatus(stat[i].getPath());
 Assert.assertEquals(2, buckets.length);
@@ -661,7 +668,7 @@ public class TestWorker extends CompactorTest {
 // Find the new delta file and make sure it has the right contents
 boolean sawNewDelta = false;
 for (int i = 0; i  stat.length; i++) {
-  if 

hive git commit: HIVE-11228 - Mutation API should use semi-shared locks. (Elliot West, via Eugene Koifman)

2015-07-13 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 17f759d63 - 3301b92bc


HIVE-11228 - Mutation API should use semi-shared locks. (Elliot West, via 
Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3301b92b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3301b92b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3301b92b

Branch: refs/heads/master
Commit: 3301b92bcb2a1f779e76d174cd9ac6d83fc66938
Parents: 17f759d
Author: Eugene Koifman ekoif...@hortonworks.com
Authored: Mon Jul 13 09:42:07 2015 -0700
Committer: Eugene Koifman ekoif...@hortonworks.com
Committed: Mon Jul 13 09:42:26 2015 -0700

--
 .../streaming/mutate/client/MutatorClient.java  |  11 +-
 .../streaming/mutate/client/lock/Lock.java  |  73 +++
 .../hive/hcatalog/streaming/mutate/package.html |   8 +-
 .../streaming/mutate/client/lock/TestLock.java  | 121 ---
 4 files changed, 136 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3301b92b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
index 2724525..29b828d 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/MutatorClient.java
@@ -42,7 +42,16 @@ public class MutatorClient implements Closeable {
 .lockFailureListener(lockFailureListener == null ? 
LockFailureListener.NULL_LISTENER : lockFailureListener)
 .user(user);
 for (AcidTable table : tables) {
-  lockOptions.addTable(table.getDatabaseName(), table.getTableName());
+  switch (table.getTableType()) {
+  case SOURCE:
+lockOptions.addSourceTable(table.getDatabaseName(), 
table.getTableName());
+break;
+  case SINK:
+lockOptions.addSinkTable(table.getDatabaseName(), 
table.getTableName());
+break;
+  default:
+throw new IllegalArgumentException(Unknown TableType:  + 
table.getTableType());
+  }
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/3301b92b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
index 21604df..ad0b303 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/mutate/client/lock/Lock.java
@@ -2,6 +2,7 @@ package org.apache.hive.hcatalog.streaming.mutate.client.lock;
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
@@ -35,7 +36,8 @@ public class Lock {
   private final IMetaStoreClient metaStoreClient;
   private final HeartbeatFactory heartbeatFactory;
   private final LockFailureListener listener;
-  private final CollectionTable tableDescriptors;
+  private final CollectionTable sinks;
+  private final CollectionTable tables = new HashSet();
   private final int lockRetries;
   private final int retryWaitSeconds;
   private final String user;
@@ -46,23 +48,26 @@ public class Lock {
   private Long transactionId;
 
   public Lock(IMetaStoreClient metaStoreClient, Options options) {
-this(metaStoreClient, new HeartbeatFactory(), options.hiveConf, 
options.listener, options.user,
-options.descriptors, options.lockRetries, options.retryWaitSeconds);
+this(metaStoreClient, new HeartbeatFactory(), options.hiveConf, 
options.listener, options.user, options.sources,
+options.sinks, options.lockRetries, options.retryWaitSeconds);
   }
 
   /** Visible for testing only. */
   Lock(IMetaStoreClient metaStoreClient, HeartbeatFactory heartbeatFactory, 
HiveConf hiveConf,
-  LockFailureListener listener, String user, CollectionTable 
tableDescriptors, int lockRetries,
+  LockFailureListener listener, String user, CollectionTable sources, 
CollectionTable sinks, int lockRetries,
   int retryWaitSeconds) {
 this.metaStoreClient = metaStoreClient;
 this.heartbeatFactory = heartbeatFactory;
 this.hiveConf = hiveConf;
 this.user = user;
-this.tableDescriptors = tableDescriptors;
 

hive git commit: HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates)

2015-10-24 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 24ec6beda -> e3ef96f2b


HIVE-11540 - Too many delta files during Compaction - OOM (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e3ef96f2
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e3ef96f2
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e3ef96f2

Branch: refs/heads/master
Commit: e3ef96f2b83ffa932dd59fc3df79dff8747309ba
Parents: 24ec6be
Author: Eugene Koifman 
Authored: Sat Oct 24 18:44:05 2015 -0700
Committer: Eugene Koifman 
Committed: Sat Oct 24 18:44:05 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java |  15 ++-
 .../hive/ql/txn/compactor/CompactorMR.java  |  96 ++-
 .../hadoop/hive/ql/txn/compactor/Worker.java|   6 +-
 .../hive/ql/txn/compactor/CompactorTest.java|   4 +
 .../hive/ql/txn/compactor/TestWorker.java   | 120 +--
 6 files changed, 201 insertions(+), 42 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index f065048..dc79415 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1551,6 +1551,8 @@ public class HiveConf extends Configuration {
 HIVE_COMPACTOR_DELTA_PCT_THRESHOLD("hive.compactor.delta.pct.threshold", 
0.1f,
 "Percentage (fractional) size of the delta files relative to the base 
that will trigger\n" +
 "a major compaction. (1.0 = 100%, so the default 0.1 = 10%.)"),
+COMPACTOR_MAX_NUM_DELTA("hive.compactor.max.num.delta", 500, "Maximum 
number of delta files that " +
+  "the compactor will attempt to handle in a single job."),
 
 HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD("hive.compactor.abortedtxn.threshold", 
1000,
 "Number of aborted transactions involving a given table or partition 
that will trigger\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e3ef96f2/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index 30db513..e8d070c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -132,6 +132,9 @@ public class AcidUtils {
 return deltaSubdir(min, max) + "_" + String.format(STATEMENT_DIGITS, 
statementId);
   }
 
+  public static String baseDir(long txnId) {
+return BASE_PREFIX + String.format(DELTA_DIGITS, txnId);
+  }
   /**
* Create a filename for a bucket file.
* @param directory the partition directory
@@ -221,14 +224,16 @@ public class AcidUtils {
 Path getBaseDirectory();
 
 /**
- * Get the list of original files.
+ * Get the list of original files.  Not {@code null}.
  * @return the list of original files (eg. 00_0)
  */
 List getOriginalFiles();
 
 /**
  * Get the list of base and delta directories that are valid and not
- * obsolete.
+ * obsolete.  Not {@code null}.  List must be sorted in a specific way.
+ * See {@link 
org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta#compareTo(org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta)}
+ * for details.
  * @return the minimal list of current directories
  */
 List getCurrentDirectories();
@@ -237,7 +242,7 @@ public class AcidUtils {
  * Get the list of obsolete directories. After filtering out bases and
  * deltas that are not selected by the valid transaction list, return the
  * list of original files, bases, and deltas that have been replaced by
- * more up to date ones.
+ * more up to date ones.  Not {@code null}.
  */
 List getObsolete();
   }
@@ -284,6 +289,7 @@ public class AcidUtils {
  * happens in a different process; thus it's possible to have bases/deltas 
with
  * overlapping txnId boundaries.  The sort order helps figure out the 
"best" set of files
  * to use to get data.
+ * This sorts "wider" delta before "narrower" i.e. delta_5_20 sorts before 
delta_5_10 (and delta_11_20)
  */
 @Override
 public int compareTo(ParsedDelta parsedDelta) {
@@ -499,6 +505,9 @@ public class AcidUtils {
 }
 
 Collections.sort(working);
+//so now, 'working' should be sorted like delta_5_20 delta_5_10 

hive git commit: HVIE-11540 Too many delta files during Compaction - OOM (Eugene Koifman, reviewed by Alan Gates)

2015-10-24 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 f4020cfce -> e654efeb3


HVIE-11540 Too many delta files during Compaction - OOM (Eugene Koifman, 
reviewed by Alan Gates)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/e654efeb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/e654efeb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/e654efeb

Branch: refs/heads/branch-1
Commit: e654efeb32c62fb5cd56214b823526173cb009bb
Parents: f4020cf
Author: Eugene Koifman 
Authored: Sat Oct 24 22:01:20 2015 -0700
Committer: Eugene Koifman 
Committed: Sat Oct 24 22:01:20 2015 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   2 +
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java |  15 +-
 .../hive/ql/txn/compactor/CompactorMR.java  | 143 ---
 .../hadoop/hive/ql/txn/compactor/Worker.java|   6 +-
 .../hive/ql/txn/compactor/CompactorTest.java|   4 +
 .../hive/ql/txn/compactor/TestWorker.java   | 120 ++--
 6 files changed, 225 insertions(+), 65 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/e654efeb/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 2febd39..4724523 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1500,6 +1500,8 @@ public class HiveConf extends Configuration {
 HIVE_COMPACTOR_DELTA_PCT_THRESHOLD("hive.compactor.delta.pct.threshold", 
0.1f,
 "Percentage (fractional) size of the delta files relative to the base 
that will trigger\n" +
 "a major compaction. (1.0 = 100%, so the default 0.1 = 10%.)"),
+COMPACTOR_MAX_NUM_DELTA("hive.compactor.max.num.delta", 500, "Maximum 
number of delta files that " +
+  "the compactor will attempt to handle in a single job."),
 
 HIVE_COMPACTOR_ABORTEDTXN_THRESHOLD("hive.compactor.abortedtxn.threshold", 
1000,
 "Number of aborted transactions involving a given table or partition 
that will trigger\n" +

http://git-wip-us.apache.org/repos/asf/hive/blob/e654efeb/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
index c7e0780..8f60e9d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java
@@ -129,6 +129,9 @@ public class AcidUtils {
 return deltaSubdir(min, max) + "_" + String.format(STATEMENT_DIGITS, 
statementId);
   }
 
+  public static String baseDir(long txnId) {
+return BASE_PREFIX + String.format(DELTA_DIGITS, txnId);
+  }
   /**
* Create a filename for a bucket file.
* @param directory the partition directory
@@ -218,14 +221,16 @@ public class AcidUtils {
 Path getBaseDirectory();
 
 /**
- * Get the list of original files.
+ * Get the list of original files.  Not {@code null}.
  * @return the list of original files (eg. 00_0)
  */
 List getOriginalFiles();
 
 /**
  * Get the list of base and delta directories that are valid and not
- * obsolete.
+ * obsolete.  Not {@code null}.  List must be sorted in a specific way.
+ * See {@link 
org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta#compareTo(org.apache.hadoop.hive.ql.io.AcidUtils.ParsedDelta)}
+ * for details.
  * @return the minimal list of current directories
  */
 List getCurrentDirectories();
@@ -234,7 +239,7 @@ public class AcidUtils {
  * Get the list of obsolete directories. After filtering out bases and
  * deltas that are not selected by the valid transaction list, return the
  * list of original files, bases, and deltas that have been replaced by
- * more up to date ones.
+ * more up to date ones.  Not {@code null}.
  */
 List getObsolete();
   }
@@ -281,6 +286,7 @@ public class AcidUtils {
  * happens in a different process; thus it's possible to have bases/deltas 
with
  * overlapping txnId boundaries.  The sort order helps figure out the 
"best" set of files
  * to use to get data.
+ * This sorts "wider" delta before "narrower" i.e. delta_5_20 sorts before 
delta_5_10 (and delta_11_20)
  */
 @Override
 public int compareTo(ParsedDelta parsedDelta) {
@@ -493,6 +499,9 @@ public class AcidUtils {
 }
 
 Collections.sort(working);
+//so now, 'working' should be sorted like delta_5_20 delta_5_10 

hive git commit: HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)

2015-10-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 e075acd5a -> fdfd2cea6


HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fdfd2cea
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fdfd2cea
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fdfd2cea

Branch: refs/heads/branch-1
Commit: fdfd2cea6bc0e57441f515083624e4b768dc9274
Parents: e075acd
Author: Eugene Koifman 
Authored: Wed Oct 28 15:35:33 2015 -0700
Committer: Eugene Koifman 
Committed: Wed Oct 28 15:35:33 2015 -0700

--
 .../java/org/apache/hive/hcatalog/streaming/InvalidTable.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fdfd2cea/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
index 98ef688..d61dfbb 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
@@ -29,10 +29,10 @@ public class InvalidTable extends StreamingException {
   }
 
   public InvalidTable(String db, String table, String msg) {
-super(msg);
+super(makeMsg(db, table) + ": " + msg, null);
   }
 
   public InvalidTable(String db, String table, Exception inner) {
-super(inner.getMessage(), inner);
+super(makeMsg(db, table) + ": " + inner.getMessage(), inner);
   }
 }



hive git commit: HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)

2015-10-28 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 01580af2e -> 53fc31931


HIVE-12276 Fix messages in InvalidTable (Eugene Koifman, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/53fc3193
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/53fc3193
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/53fc3193

Branch: refs/heads/master
Commit: 53fc3193194a742429170a7c5a0a809ab3c5341f
Parents: 01580af
Author: Eugene Koifman 
Authored: Wed Oct 28 16:00:46 2015 -0700
Committer: Eugene Koifman 
Committed: Wed Oct 28 16:00:46 2015 -0700

--
 .../java/org/apache/hive/hcatalog/streaming/InvalidTable.java| 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/53fc3193/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
index 98ef688..d61dfbb 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/InvalidTable.java
@@ -29,10 +29,10 @@ public class InvalidTable extends StreamingException {
   }
 
   public InvalidTable(String db, String table, String msg) {
-super(msg);
+super(makeMsg(db, table) + ": " + msg, null);
   }
 
   public InvalidTable(String db, String table, Exception inner) {
-super(inner.getMessage(), inner);
+super(makeMsg(db, table) + ": " + inner.getMessage(), inner);
   }
 }



hive git commit: HIVE-12202 NPE thrown when reading legacy ACID delta files(missed a file)(Elliot West via Eugene Koifman)

2015-11-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master f21c58e83 -> f9d1436b2


HIVE-12202 NPE thrown when reading legacy ACID delta files(missed a 
file)(Elliot West via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f9d1436b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f9d1436b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f9d1436b

Branch: refs/heads/master
Commit: f9d1436b28e96e1435a2aa779ccc8d7ddf4514b2
Parents: f21c58e
Author: Eugene Koifman 
Authored: Tue Nov 3 10:46:36 2015 -0800
Committer: Eugene Koifman 
Committed: Tue Nov 3 10:46:36 2015 -0800

--
 .../hadoop/hive/ql/io/TestAcidInputFormat.java  | 88 
 1 file changed, 88 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f9d1436b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java 
b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
new file mode 100644
index 000..6a77670
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
@@ -0,0 +1,88 @@
+package org.apache.hadoop.hive.ql.io;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.DataInput;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.class)
+public class TestAcidInputFormat {
+
+  @Mock
+  private DataInput mockDataInput;
+
+  @Test
+  public void testDeltaMetaDataReadFieldsNoStatementIds() throws Exception {
+when(mockDataInput.readLong()).thenReturn(1L, 2L);
+when(mockDataInput.readInt()).thenReturn(0);
+
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
+deltaMetaData.readFields(mockDataInput);
+
+verify(mockDataInput, times(1)).readInt();
+assertThat(deltaMetaData.getMinTxnId(), is(1L));
+assertThat(deltaMetaData.getMaxTxnId(), is(2L));
+assertThat(deltaMetaData.getStmtIds().isEmpty(), is(true));
+  }
+
+  @Test
+  public void testDeltaMetaDataReadFieldsWithStatementIds() throws Exception {
+when(mockDataInput.readLong()).thenReturn(1L, 2L);
+when(mockDataInput.readInt()).thenReturn(2, 100, 101);
+
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
+deltaMetaData.readFields(mockDataInput);
+
+verify(mockDataInput, times(3)).readInt();
+assertThat(deltaMetaData.getMinTxnId(), is(1L));
+assertThat(deltaMetaData.getMaxTxnId(), is(2L));
+assertThat(deltaMetaData.getStmtIds().size(), is(2));
+assertThat(deltaMetaData.getStmtIds().get(0), is(100));
+assertThat(deltaMetaData.getStmtIds().get(1), is(101));
+  }
+
+  @Test
+  public void testDeltaMetaConstructWithState() throws Exception {
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 
2001L, Arrays.asList(97, 98, 99));
+
+assertThat(deltaMetaData.getMinTxnId(), is(2000L));
+assertThat(deltaMetaData.getMaxTxnId(), is(2001L));
+assertThat(deltaMetaData.getStmtIds().size(), is(3));
+assertThat(deltaMetaData.getStmtIds().get(0), is(97));
+assertThat(deltaMetaData.getStmtIds().get(1), is(98));
+assertThat(deltaMetaData.getStmtIds().get(2), is(99));
+  }
+
+  @Test
+  public void testDeltaMetaDataReadFieldsWithStatementIdsResetsState() throws 
Exception {
+when(mockDataInput.readLong()).thenReturn(1L, 2L);
+when(mockDataInput.readInt()).thenReturn(2, 100, 101);
+
+List statementIds = new ArrayList<>();
+statementIds.add(97);
+statementIds.add(98);
+statementIds.add(99);
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData(2000L, 
2001L, statementIds);
+deltaMetaData.readFields(mockDataInput);
+
+verify(mockDataInput, times(3)).readInt();
+assertThat(deltaMetaData.getMinTxnId(), is(1L));
+assertThat(deltaMetaData.getMaxTxnId(), is(2L));
+assertThat(deltaMetaData.getStmtIds().size(), is(2));
+assertThat(deltaMetaData.getStmtIds().get(0), is(100));
+assertThat(deltaMetaData.getStmtIds().get(1), is(101));
+  }
+
+}



hive git commit: HIVE-12202 NPE thrown when reading legacy ACID delta files(Elliot West via Eugene Koifman)

2015-11-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 aca86 -> 703526bc3


HIVE-12202 NPE thrown when reading legacy ACID delta files(Elliot West via 
Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/703526bc
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/703526bc
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/703526bc

Branch: refs/heads/branch-1
Commit: 703526bc38d73affd1bf1450831927b5e9e24678
Parents: aca
Author: Eugene Koifman 
Authored: Tue Nov 3 10:56:43 2015 -0800
Committer: Eugene Koifman 
Committed: Tue Nov 3 10:56:43 2015 -0800

--
 .../hadoop/hive/ql/io/AcidInputFormat.java  | 14 ++--
 .../hadoop/hive/ql/io/TestAcidInputFormat.java  | 88 
 2 files changed, 93 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/703526bc/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
index 24506b7..7c7074d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
@@ -33,7 +33,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 
 /**
@@ -115,11 +114,14 @@ public interface AcidInputFormat
 private List stmtIds;
 
 public DeltaMetaData() {
-  this(0,0,null);
+  this(0,0,new ArrayList());
 }
 DeltaMetaData(long minTxnId, long maxTxnId, List stmtIds) {
   this.minTxnId = minTxnId;
   this.maxTxnId = maxTxnId;
+  if (stmtIds == null) {
+throw new IllegalArgumentException("stmtIds == null");
+  }
   this.stmtIds = stmtIds;
 }
 long getMinTxnId() {
@@ -136,9 +138,6 @@ public interface AcidInputFormat
   out.writeLong(minTxnId);
   out.writeLong(maxTxnId);
   out.writeInt(stmtIds.size());
-  if(stmtIds == null) {
-return;
-  }
   for(Integer id : stmtIds) {
 out.writeInt(id);
   }
@@ -147,11 +146,8 @@ public interface AcidInputFormat
 public void readFields(DataInput in) throws IOException {
   minTxnId = in.readLong();
   maxTxnId = in.readLong();
+  stmtIds.clear();
   int numStatements = in.readInt();
-  if(numStatements <= 0) {
-return;
-  }
-  stmtIds = new ArrayList<>();
   for(int i = 0; i < numStatements; i++) {
 stmtIds.add(in.readInt());
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/703526bc/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
--
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java 
b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
new file mode 100644
index 000..6a77670
--- /dev/null
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/TestAcidInputFormat.java
@@ -0,0 +1,88 @@
+package org.apache.hadoop.hive.ql.io;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.junit.Assert.assertThat;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.DataInput;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.io.AcidInputFormat.DeltaMetaData;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.runners.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.class)
+public class TestAcidInputFormat {
+
+  @Mock
+  private DataInput mockDataInput;
+
+  @Test
+  public void testDeltaMetaDataReadFieldsNoStatementIds() throws Exception {
+when(mockDataInput.readLong()).thenReturn(1L, 2L);
+when(mockDataInput.readInt()).thenReturn(0);
+
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
+deltaMetaData.readFields(mockDataInput);
+
+verify(mockDataInput, times(1)).readInt();
+assertThat(deltaMetaData.getMinTxnId(), is(1L));
+assertThat(deltaMetaData.getMaxTxnId(), is(2L));
+assertThat(deltaMetaData.getStmtIds().isEmpty(), is(true));
+  }
+
+  @Test
+  public void testDeltaMetaDataReadFieldsWithStatementIds() throws Exception {
+when(mockDataInput.readLong()).thenReturn(1L, 2L);
+when(mockDataInput.readInt()).thenReturn(2, 100, 101);
+
+DeltaMetaData deltaMetaData = new AcidInputFormat.DeltaMetaData();
+deltaMetaData.readFields(mockDataInput);
+
+

[1/2] hive git commit: HIVE-12266 When client exists abnormally, it doesn't release ACID locks (Wei Zheng, via Eugene Koifman)

2015-11-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 5a5f8e49f -> 89703e7d0


HIVE-12266 When client exists abnormally, it doesn't release ACID locks (Wei 
Zheng, via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/595fa998
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/595fa998
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/595fa998

Branch: refs/heads/master
Commit: 595fa9988fcb3e67b60845b44e1df4cc49ce38b2
Parents: 5a5f8e4
Author: Eugene Koifman 
Authored: Tue Nov 3 09:03:54 2015 -0800
Committer: Eugene Koifman 
Committed: Tue Nov 3 09:03:54 2015 -0800

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 43 +++-
 1 file changed, 32 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/595fa998/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 18052f3..93c7a54 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -121,12 +121,14 @@ import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.common.util.ShutdownHookManager;
 
 public class Driver implements CommandProcessor {
 
   static final private String CLASS_NAME = Driver.class.getName();
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
+  static final int SHUTDOWN_HOOK_PRIORITY = 0;
 
   private int maxRows = 100;
   ByteStream.Output bos = new ByteStream.Output();
@@ -390,7 +392,20 @@ public class Driver implements CommandProcessor {
 
 try {
   // Initialize the transaction manager.  This must be done before analyze 
is called.
-  SessionState.get().initTxnMgr(conf);
+  final HiveTxnManager txnManager = SessionState.get().initTxnMgr(conf);
+  // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
+  ShutdownHookManager.addShutdownHook(
+  new Runnable() {
+@Override
+public void run() {
+  try {
+releaseLocksAndCommitOrRollback(false, txnManager);
+  } catch (LockException e) {
+LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
+e.getMessage());
+  }
+}
+  }, SHUTDOWN_HOOK_PRIORITY);
 
   command = new VariableSubstitution(new HiveVariableSource() {
 @Override
@@ -537,7 +552,7 @@ public class Driver implements CommandProcessor {
*
* @param sem semantic analyzer for analyzed query
* @param plan query plan
-   * @param astStringTree AST tree dump
+   * @param astTree AST tree dump
* @throws java.io.IOException
*/
   private String getExplainOutput(BaseSemanticAnalyzer sem, QueryPlan plan,
@@ -1049,15 +1064,21 @@ public class Driver implements CommandProcessor {
   /**
* @param commit if there is an open transaction and if true, commit,
*   if false rollback.  If there is no open transaction this 
parameter is ignored.
+   * @param txnManager an optional existing transaction manager retrieved 
earlier from the session
*
**/
-  private void releaseLocksAndCommitOrRollback(boolean commit)
+  private void releaseLocksAndCommitOrRollback(boolean commit, HiveTxnManager 
txnManager)
   throws LockException {
 PerfLogger perfLogger = SessionState.getPerfLogger();
 perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RELEASE_LOCKS);
 
-SessionState ss = SessionState.get();
-HiveTxnManager txnMgr = ss.getTxnMgr();
+HiveTxnManager txnMgr;
+if (txnManager == null) {
+  SessionState ss = SessionState.get();
+  txnMgr = ss.getTxnMgr();
+} else {
+  txnMgr = txnManager;
+}
 // If we've opened a transaction we need to commit or rollback rather than 
explicitly
 // releasing the locks.
 if (txnMgr.isTxnOpen()) {
@@ -1206,7 +1227,7 @@ public class Driver implements CommandProcessor {
 }
 if (ret != 0) {
   try {
-releaseLocksAndCommitOrRollback(false);
+releaseLocksAndCommitOrRollback(false, null);
   } catch (LockException e) {
 LOG.warn("Exception in releasing locks. "
 + org.apache.hadoop.util.StringUtils.stringifyException(e));
@@ -1287,7 +1308,7 @@ public class Driver implements CommandProcessor {
 if(plan.getAutoCommitValue() && !txnManager.getAutoCommit()) {
   

[2/2] hive git commit: HIVE-12202 NPE thrown when reading legacy ACID delta files(Elliot West via Eugene Koifman)

2015-11-03 Thread ekoifman
HIVE-12202 NPE thrown when reading legacy ACID delta files(Elliot West via 
Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/89703e7d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/89703e7d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/89703e7d

Branch: refs/heads/master
Commit: 89703e7d0f385a5e93208f55703d4cbf85329fef
Parents: 595fa99
Author: Eugene Koifman 
Authored: Tue Nov 3 09:06:19 2015 -0800
Committer: Eugene Koifman 
Committed: Tue Nov 3 09:06:19 2015 -0800

--
 .../org/apache/hadoop/hive/ql/io/AcidInputFormat.java | 14 +-
 1 file changed, 5 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/89703e7d/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
index 24506b7..7c7074d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/AcidInputFormat.java
@@ -33,7 +33,6 @@ import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.List;
 
 /**
@@ -115,11 +114,14 @@ public interface AcidInputFormat
 private List stmtIds;
 
 public DeltaMetaData() {
-  this(0,0,null);
+  this(0,0,new ArrayList());
 }
 DeltaMetaData(long minTxnId, long maxTxnId, List stmtIds) {
   this.minTxnId = minTxnId;
   this.maxTxnId = maxTxnId;
+  if (stmtIds == null) {
+throw new IllegalArgumentException("stmtIds == null");
+  }
   this.stmtIds = stmtIds;
 }
 long getMinTxnId() {
@@ -136,9 +138,6 @@ public interface AcidInputFormat
   out.writeLong(minTxnId);
   out.writeLong(maxTxnId);
   out.writeInt(stmtIds.size());
-  if(stmtIds == null) {
-return;
-  }
   for(Integer id : stmtIds) {
 out.writeInt(id);
   }
@@ -147,11 +146,8 @@ public interface AcidInputFormat
 public void readFields(DataInput in) throws IOException {
   minTxnId = in.readLong();
   maxTxnId = in.readLong();
+  stmtIds.clear();
   int numStatements = in.readInt();
-  if(numStatements <= 0) {
-return;
-  }
-  stmtIds = new ArrayList<>();
   for(int i = 0; i < numStatements; i++) {
 stmtIds.add(in.readInt());
   }



hive git commit: HIVE-12252 Streaming API HiveEndPoint can be created w/o partitionVals for partitioned table (Wei Zheng via Eugene Koifman)

2015-11-05 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 6d936b533 -> 0918ff959


HIVE-12252 Streaming API HiveEndPoint can be created w/o partitionVals for 
partitioned table (Wei Zheng via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0918ff95
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0918ff95
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0918ff95

Branch: refs/heads/master
Commit: 0918ff959e6b0fd67a6b8b478290436af9532a31
Parents: 6d936b5
Author: Eugene Koifman 
Authored: Thu Nov 5 10:07:30 2015 -0800
Committer: Eugene Koifman 
Committed: Thu Nov 5 10:07:30 2015 -0800

--
 .../hcatalog/streaming/ConnectionError.java |  4 ++
 .../hive/hcatalog/streaming/HiveEndPoint.java   | 51 +++-
 .../hive/hcatalog/streaming/TestStreaming.java  | 35 +++---
 3 files changed, 71 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0918ff95/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
index 1aeef76..ffa51c9 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
@@ -20,6 +20,10 @@ package org.apache.hive.hcatalog.streaming;
 
 public class ConnectionError extends StreamingException {
 
+  public ConnectionError(String msg) {
+super(msg);
+  }
+
   public ConnectionError(String msg, Exception innerEx) {
 super(msg, innerEx);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/0918ff95/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 306c93d..2f2d44a 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -279,23 +279,48 @@ public class HiveEndPoint {
   }
 }
 
-private void checkEndPoint(HiveEndPoint endPoint, IMetaStoreClient 
msClient) throws InvalidTable {
-  // 1 - check if TBLPROPERTIES ('transactional'='true') is set on table
+/**
+ * Checks the validity of endpoint
+ * @param endPoint the HiveEndPoint to be checked
+ * @param msClient the metastore client
+ * @throws InvalidTable
+ */
+private void checkEndPoint(HiveEndPoint endPoint, IMetaStoreClient 
msClient)
+throws InvalidTable, ConnectionError {
+  Table t;
   try {
-Table t = msClient.getTable(endPoint.database, endPoint.table);
-Map params = t.getParameters();
-if(params != null) {
-  String transactionalProp = params.get("transactional");
-  if (transactionalProp != null && 
transactionalProp.equalsIgnoreCase("true")) {
-return;
-  }
-}
-LOG.error("'transactional' property is not set on Table " + endPoint);
-throw new InvalidTable(endPoint.database, endPoint.table, 
"\'transactional\' property is not set on Table");
+t = msClient.getTable(endPoint.database, endPoint.table);
   } catch (Exception e) {
-LOG.warn("Unable to check if Table is transactional. " + endPoint, e);
+LOG.warn("Unable to check the endPoint: " + endPoint, e);
 throw new InvalidTable(endPoint.database, endPoint.table, e);
   }
+
+  // 1 - check if TBLPROPERTIES ('transactional'='true') is set on table
+  Map params = t.getParameters();
+  if (params != null) {
+String transactionalProp = params.get("transactional");
+if (transactionalProp == null || 
!transactionalProp.equalsIgnoreCase("true")) {
+  LOG.error("'transactional' property is not set on Table " + 
endPoint);
+  throw new InvalidTable(endPoint.database, endPoint.table, 
"\'transactional\' property" +
+  " is not set on Table");  }
+  }
+
+  // 2 - check if partitionvals are legitimate
+  if (t.getPartitionKeys() != null && !t.getPartitionKeys().isEmpty()
+  && endPoint.partitionVals.isEmpty()) {
+// Invalid if table is partitioned, but endPoint's partitionVals is 
empty
+String errMsg = "HiveEndPoint " + endPoint + " doesn't 

hive git commit: HIVE-12252 Streaming API HiveEndPoint can be created w/o partitionVals for partitioned table (Wei Zheng via Eugene Koifman)

2015-11-05 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 967f5c201 -> 33e818aad


HIVE-12252 Streaming API HiveEndPoint can be created w/o partitionVals for 
partitioned table (Wei Zheng via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/33e818aa
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/33e818aa
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/33e818aa

Branch: refs/heads/branch-1
Commit: 33e818aad84c8e22dba62ad81dc0967bc128ea83
Parents: 967f5c2
Author: Eugene Koifman 
Authored: Thu Nov 5 10:17:05 2015 -0800
Committer: Eugene Koifman 
Committed: Thu Nov 5 10:17:05 2015 -0800

--
 .../hcatalog/streaming/ConnectionError.java |  4 ++
 .../hive/hcatalog/streaming/HiveEndPoint.java   | 51 +++-
 .../hive/hcatalog/streaming/TestStreaming.java  | 35 +++---
 3 files changed, 71 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/33e818aa/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
index 1aeef76..ffa51c9 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/ConnectionError.java
@@ -20,6 +20,10 @@ package org.apache.hive.hcatalog.streaming;
 
 public class ConnectionError extends StreamingException {
 
+  public ConnectionError(String msg) {
+super(msg);
+  }
+
   public ConnectionError(String msg, Exception innerEx) {
 super(msg, innerEx);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/33e818aa/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
--
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index 5de3f1d..59bb272 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -279,23 +279,48 @@ public class HiveEndPoint {
   }
 }
 
-private void checkEndPoint(HiveEndPoint endPoint, IMetaStoreClient 
msClient) throws InvalidTable {
-  // 1 - check if TBLPROPERTIES ('transactional'='true') is set on table
+/**
+ * Checks the validity of endpoint
+ * @param endPoint the HiveEndPoint to be checked
+ * @param msClient the metastore client
+ * @throws InvalidTable
+ */
+private void checkEndPoint(HiveEndPoint endPoint, IMetaStoreClient 
msClient)
+throws InvalidTable, ConnectionError {
+  Table t;
   try {
-Table t = msClient.getTable(endPoint.database, endPoint.table);
-Map params = t.getParameters();
-if(params != null) {
-  String transactionalProp = params.get("transactional");
-  if (transactionalProp != null && 
transactionalProp.equalsIgnoreCase("true")) {
-return;
-  }
-}
-LOG.error("'transactional' property is not set on Table " + endPoint);
-throw new InvalidTable(endPoint.database, endPoint.table, 
"\'transactional\' property is not set on Table");
+t = msClient.getTable(endPoint.database, endPoint.table);
   } catch (Exception e) {
-LOG.warn("Unable to check if Table is transactional. " + endPoint, e);
+LOG.warn("Unable to check the endPoint: " + endPoint, e);
 throw new InvalidTable(endPoint.database, endPoint.table, e);
   }
+
+  // 1 - check if TBLPROPERTIES ('transactional'='true') is set on table
+  Map params = t.getParameters();
+  if (params != null) {
+String transactionalProp = params.get("transactional");
+if (transactionalProp == null || 
!transactionalProp.equalsIgnoreCase("true")) {
+  LOG.error("'transactional' property is not set on Table " + 
endPoint);
+  throw new InvalidTable(endPoint.database, endPoint.table, 
"\'transactional\' property" +
+  " is not set on Table");  }
+  }
+
+  // 2 - check if partitionvals are legitimate
+  if (t.getPartitionKeys() != null && !t.getPartitionKeys().isEmpty()
+  && endPoint.partitionVals.isEmpty()) {
+// Invalid if table is partitioned, but endPoint's partitionVals is 
empty
+String errMsg = "HiveEndPoint " + endPoint + " 

hive git commit: HIVE-12266 When client exists abnormally, it doesn't release ACID locks (Wei Zheng, via Eugene Koifman)

2015-11-03 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/branch-1 703526bc3 -> 87e5b4ef2


HIVE-12266 When client exists abnormally, it doesn't release ACID locks (Wei 
Zheng, via Eugene Koifman)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/87e5b4ef
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/87e5b4ef
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/87e5b4ef

Branch: refs/heads/branch-1
Commit: 87e5b4ef2f3a05f1c902b85588d1d96f8fe560b9
Parents: 703526b
Author: Eugene Koifman 
Authored: Tue Nov 3 11:47:52 2015 -0800
Committer: Eugene Koifman 
Committed: Tue Nov 3 11:47:52 2015 -0800

--
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 43 +++-
 1 file changed, 32 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/87e5b4ef/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 9b6104e..33c6ab5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -119,12 +119,14 @@ import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.mapred.ClusterStatus;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hive.common.util.ShutdownHookManager;
 
 public class Driver implements CommandProcessor {
 
   static final private String CLASS_NAME = Driver.class.getName();
   static final private Log LOG = LogFactory.getLog(CLASS_NAME);
   static final private LogHelper console = new LogHelper(LOG);
+  static final int SHUTDOWN_HOOK_PRIORITY = 0;
 
   private static final Object compileMonitor = new Object();
 
@@ -387,7 +389,20 @@ public class Driver implements CommandProcessor {
 
 try {
   // Initialize the transaction manager.  This must be done before analyze 
is called.
-  SessionState.get().initTxnMgr(conf);
+  final HiveTxnManager txnManager = SessionState.get().initTxnMgr(conf);
+  // In case when user Ctrl-C twice to kill Hive CLI JVM, we want to 
release locks
+  ShutdownHookManager.addShutdownHook(
+  new Runnable() {
+@Override
+public void run() {
+  try {
+releaseLocksAndCommitOrRollback(ctx.getHiveLocks(), false, 
txnManager);
+  } catch (LockException e) {
+LOG.warn("Exception when releasing locks in ShutdownHook for 
Driver: " +
+e.getMessage());
+  }
+}
+  }, SHUTDOWN_HOOK_PRIORITY);
 
   command = new VariableSubstitution().substitute(conf, command);
   ctx = new Context(conf);
@@ -1037,16 +1052,22 @@ public class Driver implements CommandProcessor {
*  list of hive locks to be released Release all the locks 
specified. If some of the
*  locks have already been released, ignore them
* @param commit if there is an open transaction and if true, commit,
-   *   if false rollback.  If there is no open transaction this 
parameter is ignored.
+   * @param txnManager an optional existing transaction manager retrieved 
earlier from the session
*
**/
-  private void releaseLocksAndCommitOrRollback(List hiveLocks, 
boolean commit)
+  private void releaseLocksAndCommitOrRollback(List hiveLocks, 
boolean commit,
+   HiveTxnManager txnManager)
   throws LockException {
 PerfLogger perfLogger = PerfLogger.getPerfLogger();
 perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.RELEASE_LOCKS);
 
-SessionState ss = SessionState.get();
-HiveTxnManager txnMgr = ss.getTxnMgr();
+HiveTxnManager txnMgr;
+if (txnManager == null) {
+  SessionState ss = SessionState.get();
+  txnMgr = ss.getTxnMgr();
+} else {
+  txnMgr = txnManager;
+}
 // If we've opened a transaction we need to commit or rollback rather than 
explicitly
 // releasing the locks.
 if (txnMgr.isTxnOpen()) {
@@ -1146,7 +1167,7 @@ public class Driver implements CommandProcessor {
 }
 if (ret != 0) {
   try {
-releaseLocksAndCommitOrRollback(ctx.getHiveLocks(), false);
+releaseLocksAndCommitOrRollback(ctx.getHiveLocks(), false, null);
   } catch (LockException e) {
 LOG.warn("Exception in releasing locks. "
 + org.apache.hadoop.util.StringUtils.stringifyException(e));
@@ -1231,7 +1252,7 @@ public class Driver implements CommandProcessor {
 if(plan.getAutoCommitValue() && !txnManager.getAutoCommit()) {
   /*here, if there is an open txn, we want to commit it; this 

  1   2   3   4   5   >