svn commit: r1537799 - in /hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql: io/orc/OrcInputFormat.java io/orc/ReaderImpl.java log/PerfLogger.java

2013-10-31 Thread gunther
Author: gunther
Date: Fri Nov  1 01:16:12 2013
New Revision: 1537799

URL: http://svn.apache.org/r1537799
Log:
HIVE-5719: Remove some overly noisy perflogger statements from Tez codepath 
(Gunther Hagleitner)

Modified:

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java?rev=1537799&r1=1537798&r2=1537799&view=diff
==
--- 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
 (original)
+++ 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
 Fri Nov  1 01:16:12 2013
@@ -43,6 +43,8 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedInputFormatInterface;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.io.InputFormatChecker;
+import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.FileGenerator;
+import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat.SplitGenerator;
 import org.apache.hadoop.hive.ql.io.orc.Reader.FileMetaInfo;
 import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
 import org.apache.hadoop.hive.ql.log.PerfLogger;
@@ -457,7 +459,6 @@ public class OrcInputFormat  implements 
 @Override
 public void run() {
   try {
-perfLogger.PerfLogBegin(CLASS_NAME, 
PerfLogger.ORC_GET_BLOCK_LOCATIONS);
 Iterator itr = context.shims.listLocatedStatus(fs, dir,
 hiddenFileFilter);
 while (itr.hasNext()) {
@@ -470,8 +471,6 @@ public class OrcInputFormat  implements 
 context.schedule(new SplitGenerator(context, fs, file, fileInfo));
   }
 }
-// mark the fact that we are done
-perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.ORC_GET_BLOCK_LOCATIONS);
   } catch (Throwable th) {
 if (!(th instanceof IOException)) {
   LOG.error("Unexpected Exception", th);
@@ -634,7 +633,6 @@ public class OrcInputFormat  implements 
  */
 @Override
 public void run() {
-  perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.CREATE_ORC_SPLITS);
   try {
 populateAndCacheStripeDetails();
 long currentOffset = -1;
@@ -675,7 +673,6 @@ public class OrcInputFormat  implements 
   } finally {
 context.decrementSchedulers();
   }
-  perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.CREATE_ORC_SPLITS);
 }
 
 

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java?rev=1537799&r1=1537798&r2=1537799&view=diff
==
--- 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java 
(original)
+++ 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java 
Fri Nov  1 01:16:12 2013
@@ -288,7 +288,6 @@ final class ReaderImpl implements Reader
* @throws IOException
*/
   ReaderImpl(FileSystem fs, Path path) throws IOException {
-perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.INIT_ORC_RECORD_READER);
 this.fileSystem = fs;
 this.path = path;
 
@@ -303,7 +302,6 @@ final class ReaderImpl implements Reader
 this.bufferSize = rInfo.bufferSize;
 this.footer = rInfo.footer;
 this.inspector = rInfo.inspector;
-perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.INIT_ORC_RECORD_READER);
   }
 
 
@@ -317,7 +315,6 @@ final class ReaderImpl implements Reader
*/
   ReaderImpl(FileSystem fs, Path path, FileMetaInfo fMetaInfo)
   throws IOException {
-perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.INIT_ORC_RECORD_READER);
 this.fileSystem = fs;
 this.path = path;
 
@@ -332,8 +329,6 @@ final class ReaderImpl implements Reader
 this.bufferSize = rInfo.bufferSize;
 this.footer = rInfo.footer;
 this.inspector = rInfo.inspector;
-perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.INIT_ORC_RECORD_READER);
-
   }
 
 

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java?rev=1537799&r1=1537798&r2=1537799&view=diff
==
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
(original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
Fri Nov  1 01:16:12 2013
@@ -63,10 +63,7 @@ public class 

svn commit: r1537786 - in /hive/branches/tez/ql: build.xml src/test/org/apache/hadoop/hive/ql/QTestUtil.java

2013-10-31 Thread gunther
Author: gunther
Date: Fri Nov  1 00:48:55 2013
New Revision: 1537786

URL: http://svn.apache.org/r1537786
Log:
HIVE-5703: While using tez, Qtest needs to close session before creating a new 
one (Vikram Dixit K via Gunther Hagleitner)

Modified:
hive/branches/tez/ql/build.xml
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

Modified: hive/branches/tez/ql/build.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/build.xml?rev=1537786&r1=1537785&r2=1537786&view=diff
==
--- hive/branches/tez/ql/build.xml (original)
+++ hive/branches/tez/ql/build.xml Fri Nov  1 00:48:55 2013
@@ -139,7 +139,6 @@
 
   
   
-
 http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1537786&r1=1537785&r2=1537786&view=diff
==
--- hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java 
(original)
+++ hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Fri 
Nov  1 00:48:55 2013
@@ -114,6 +114,7 @@ public class QTestUtil {
 "src_sequencefile", "srcpart", "alltypesorc"
   }));
 
+  private static MiniClusterType clusterType = MiniClusterType.none;
   private ParseDriver pd;
   private Hive db;
   protected HiveConf conf;
@@ -313,6 +314,7 @@ public class QTestUtil {
 qMap = new TreeMap();
 qSkipSet = new HashSet();
 qSortSet = new HashSet();
+this.clusterType = clusterType;
 
 HadoopShims shims = null;
 switch (clusterType) {
@@ -790,6 +792,11 @@ public class QTestUtil {
 ss.err = new CachingPrintStream(fo, true, "UTF-8");
 ss.setIsSilent(true);
 SessionState oldSs = SessionState.get();
+
+if (oldSs != null && clusterType == MiniClusterType.tez) {
+  oldSs.close();
+}
+
 if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
   oldSs.out.close();
 }




svn commit: r1537781 - in /hive/branches/tez: hbase-handler/src/test/org/apache/hadoop/hive/hbase/ hbase-handler/src/test/templates/ ql/src/test/org/apache/hadoop/hive/ql/ ql/src/test/templates/

2013-10-31 Thread gunther
Author: gunther
Date: Fri Nov  1 00:38:11 2013
New Revision: 1537781

URL: http://svn.apache.org/r1537781
Log:
HIVE-5688: TestCliDriver compilation fails on tez branch. (Vikram Dixit K via 
Gunther Hagleitner)

Modified:

hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
hive/branches/tez/hbase-handler/src/test/templates/TestHBaseCliDriver.vm

hive/branches/tez/hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/TestLocationQueries.java
hive/branches/tez/ql/src/test/templates/TestCliDriver.vm
hive/branches/tez/ql/src/test/templates/TestNegativeCliDriver.vm
hive/branches/tez/ql/src/test/templates/TestParse.vm
hive/branches/tez/ql/src/test/templates/TestParseNegative.vm

Modified: 
hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java?rev=1537781&r1=1537780&r2=1537781&view=diff
==
--- 
hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
 (original)
+++ 
hive/branches/tez/hbase-handler/src/test/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
 Fri Nov  1 00:38:11 2013
@@ -18,13 +18,14 @@
 package org.apache.hadoop.hive.hbase;
 
 import org.apache.hadoop.hive.ql.QTestUtil;
+import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 
 /**
  * HBaseQTestUtil initializes HBase-specific test fixtures.
  */
 public class HBaseQTestUtil extends QTestUtil {
   public HBaseQTestUtil(
-String outDir, String logDir, String miniMr, HBaseTestSetup setup)
+String outDir, String logDir, MiniClusterType miniMr, HBaseTestSetup setup)
 throws Exception {
 
 super(outDir, logDir, miniMr, null);

Modified: 
hive/branches/tez/hbase-handler/src/test/templates/TestHBaseCliDriver.vm
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/templates/TestHBaseCliDriver.vm?rev=1537781&r1=1537780&r2=1537781&view=diff
==
--- hive/branches/tez/hbase-handler/src/test/templates/TestHBaseCliDriver.vm 
(original)
+++ hive/branches/tez/hbase-handler/src/test/templates/TestHBaseCliDriver.vm 
Fri Nov  1 00:38:11 2013
@@ -24,6 +24,7 @@ import junit.framework.TestSuite;
 import java.io.*;
 import java.util.*;
 
+import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import org.apache.hadoop.hive.hbase.HBaseQTestUtil;
 import org.apache.hadoop.hive.hbase.HBaseTestSetup;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -42,10 +43,11 @@ public class $className extends TestCase
 
   @Override
   protected void setUp() {
+
+MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
+
 try {
-  String miniMR = "$clusterMode";
   qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + 
"$logDir"), miniMR, setup);
-
 } catch (Exception e) {
   System.err.println("Exception: " + e.getMessage());
   e.printStackTrace();

Modified: 
hive/branches/tez/hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm?rev=1537781&r1=1537780&r2=1537781&view=diff
==
--- 
hive/branches/tez/hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
 (original)
+++ 
hive/branches/tez/hbase-handler/src/test/templates/TestHBaseNegativeCliDriver.vm
 Fri Nov  1 00:38:11 2013
@@ -25,6 +25,7 @@ import junit.framework.TestSuite;
 import java.io.*;
 import java.util.*;
 
+import org.apache.hadoop.hive.ql.QTestUtil.MiniClusterType;
 import org.apache.hadoop.hive.hbase.HBaseQTestUtil;
 import org.apache.hadoop.hive.hbase.HBaseTestSetup;
 
@@ -42,11 +43,11 @@ public class $className extends TestCase
 
   @Override
   protected void setUp() {
-try {
-  String miniMR = "$clusterMode";
 
-  qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + 
"$logDir"), miniMR, setup);
+MiniClusterType miniMR = MiniClusterType.valueForString("$clusterMode");
 
+try {
+  qt = new HBaseQTestUtil((HIVE_ROOT + "$resultsDir"), (HIVE_ROOT + 
"$logDir"), miniMR, setup);
 } catch (Exception e) {
   System.err.println("Exception: " + e.getMessage());
   e.printStackTrace();

Modified: hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1537781&r1=1537780&r2=1537781&view=diff
==
--- hive/branches/tez/ql/src/

svn commit: r1537742 - in /hive/trunk/ql/src/test: queries/clientpositive/ results/clientpositive/

2013-10-31 Thread brock
Author: brock
Date: Thu Oct 31 22:29:29 2013
New Revision: 1537742

URL: http://svn.apache.org/r1537742
Log:
HIVE-5716 - Fix broken tests after maven merge (1) (Brock Noland reviewed by 
Thejas M Nair and Ashutosh Chauhan)

Modified:
hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q
hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q
hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q
hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q
hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out
hive/trunk/ql/src/test/results/clientpositive/metadata_only_queries.q.out
hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q?rev=1537742&r1=1537741&r2=1537742&view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q 
(original)
+++ hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_1.q Thu 
Oct 31 22:29:29 2013
@@ -13,7 +13,7 @@ CREATE TABLE part( 
 p_comment STRING
 );
 
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table 
part;
 
 
 

Modified: hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q?rev=1537742&r1=1537741&r2=1537742&view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q 
(original)
+++ hive/trunk/ql/src/test/queries/clientpositive/join_cond_pushdown_2.q Thu 
Oct 31 22:29:29 2013
@@ -13,7 +13,7 @@ CREATE TABLE part( 
 p_comment STRING
 );
 
-LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite into table part;
+LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' overwrite into table 
part;
 
 
 explain select *

Modified: hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q?rev=1537742&r1=1537741&r2=1537742&view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q 
(original)
+++ hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q Thu 
Oct 31 22:29:29 2013
@@ -15,7 +15,7 @@ create table over10k(
row format delimited
fields terminated by '|';
 
-load data local inpath '../data/files/over10k' into table over10k;
+load data local inpath '../../data/files/over10k' into table over10k;
 
 create table stats_tbl(
t tinyint,

Modified: hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q?rev=1537742&r1=1537741&r2=1537742&view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q Thu Oct 31 
22:29:29 2013
@@ -6,7 +6,7 @@ CREATE TABLE stats_null(a double, b int,
 
 CREATE TABLE stats_null_part(a double, b int, c STRING, d smallint) 
partitioned by (dt string) STORED AS TEXTFILE; 
 
-LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE temps_null;
+LOAD DATA LOCAL INPATH '../../data/files/null.txt' INTO TABLE temps_null;
 
 insert overwrite table stats_null select * from temps_null;
 insert overwrite table stats_null_part partition(dt='2010') select * from 
temps_null where d <=5;

Modified: 
hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out?rev=1537742&r1=1537741&r2=1537742&view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out 
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out 
Thu Oct 31 22:29:29 2013
@@ -29,10 +29,10 @@ CREATE TABLE part( 
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@part
-PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite 
into table part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../../data/files/part_tiny.txt' 
overwrite into table part
 PREHOOK: type: LOAD
 PREHOOK: Output: default@part
-POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' 
overwrite into tab

svn commit: r1537667 [3/3] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/metadata/ ql/src/java/

2013-10-31 Thread thejas
Added: hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out?rev=1537667&view=auto
==
--- hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out Thu Oct 
31 21:22:02 2013
@@ -0,0 +1,499 @@
+PREHOOK: query: CREATE TABLE temps_null(a double, b int, c STRING, d smallint) 
STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE temps_null(a double, b int, c STRING, d 
smallint) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@temps_null
+PREHOOK: query: CREATE TABLE stats_null(a double, b int, c STRING, d smallint) 
STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE stats_null(a double, b int, c STRING, d 
smallint) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@stats_null
+PREHOOK: query: CREATE TABLE stats_null_part(a double, b int, c STRING, d 
smallint) partitioned by (dt string) STORED AS TEXTFILE
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE stats_null_part(a double, b int, c STRING, d 
smallint) partitioned by (dt string) STORED AS TEXTFILE
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@stats_null_part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE 
temps_null
+PREHOOK: type: LOAD
+PREHOOK: Output: default@temps_null
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/null.txt' INTO TABLE 
temps_null
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@temps_null
+PREHOOK: query: insert overwrite table stats_null select * from temps_null
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temps_null
+PREHOOK: Output: default@stats_null
+POSTHOOK: query: insert overwrite table stats_null select * from temps_null
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temps_null
+POSTHOOK: Output: default@stats_null
+POSTHOOK: Lineage: stats_null.a SIMPLE 
[(temps_null)temps_null.FieldSchema(name:a, type:double, comment:null), ]
+POSTHOOK: Lineage: stats_null.b SIMPLE 
[(temps_null)temps_null.FieldSchema(name:b, type:int, comment:null), ]
+POSTHOOK: Lineage: stats_null.c SIMPLE 
[(temps_null)temps_null.FieldSchema(name:c, type:string, comment:null), ]
+POSTHOOK: Lineage: stats_null.d SIMPLE 
[(temps_null)temps_null.FieldSchema(name:d, type:smallint, comment:null), ]
+PREHOOK: query: insert overwrite table stats_null_part partition(dt='2010') 
select * from temps_null where d <=5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temps_null
+PREHOOK: Output: default@stats_null_part@dt=2010
+POSTHOOK: query: insert overwrite table stats_null_part partition(dt='2010') 
select * from temps_null where d <=5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temps_null
+POSTHOOK: Output: default@stats_null_part@dt=2010
+POSTHOOK: Lineage: stats_null.a SIMPLE 
[(temps_null)temps_null.FieldSchema(name:a, type:double, comment:null), ]
+POSTHOOK: Lineage: stats_null.b SIMPLE 
[(temps_null)temps_null.FieldSchema(name:b, type:int, comment:null), ]
+POSTHOOK: Lineage: stats_null.c SIMPLE 
[(temps_null)temps_null.FieldSchema(name:c, type:string, comment:null), ]
+POSTHOOK: Lineage: stats_null.d SIMPLE 
[(temps_null)temps_null.FieldSchema(name:d, type:smallint, comment:null), ]
+POSTHOOK: Lineage: stats_null_part PARTITION(dt=2010).a SIMPLE 
[(temps_null)temps_null.FieldSchema(name:a, type:double, comment:null), ]
+POSTHOOK: Lineage: stats_null_part PARTITION(dt=2010).b SIMPLE 
[(temps_null)temps_null.FieldSchema(name:b, type:int, comment:null), ]
+POSTHOOK: Lineage: stats_null_part PARTITION(dt=2010).c SIMPLE 
[(temps_null)temps_null.FieldSchema(name:c, type:string, comment:null), ]
+POSTHOOK: Lineage: stats_null_part PARTITION(dt=2010).d SIMPLE 
[(temps_null)temps_null.FieldSchema(name:d, type:smallint, comment:null), ]
+PREHOOK: query: insert overwrite table stats_null_part partition(dt='2011') 
select * from temps_null where d > 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@temps_null
+PREHOOK: Output: default@stats_null_part@dt=2011
+POSTHOOK: query: insert overwrite table stats_null_part partition(dt='2011') 
select * from temps_null where d > 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@temps_null
+POSTHOOK: Output: default@stats_null_part@dt=2011
+POSTHOOK: Lineage: stats_null.a SIMPLE 
[(temps_null)temps_null.FieldSchema(name:a, type:double, comment:null), ]
+POSTHOOK: Lineage: stats_null.b SIMPLE 
[(temps_null)temps_null.FieldSchema(name:b, type:int, comment:null), ]
+POSTHOOK: Lineage: stats_null.c SIMPLE 
[(temps_null)temps_null.FieldSchema(name:c, type:string, comment:null), ]
+POSTHOOK: Lineage: stats_null.d SIMPLE 
[(temps_null)temps_null.FieldSchema(name:d, type:smallint, comment:null), ]
+POSTHOOK: Lineage: stats_null_part PARTITION(dt=2010).a SIMPLE 
[(temps_null)temps_null.Field

svn commit: r1537667 [1/3] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/metadata/ ql/src/java/

2013-10-31 Thread thejas
Author: thejas
Date: Thu Oct 31 21:22:02 2013
New Revision: 1537667

URL: http://svn.apache.org/r1537667
Log:
HIVE-5483 : use metastore statistics to optimize max/min/etc. queries (Ashutosh 
Chauhan via Thejas Nair)

Added:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java
hive/trunk/ql/src/test/queries/clientpositive/metadata_only_queries.q
hive/trunk/ql/src/test/queries/clientpositive/stats_only_null.q
hive/trunk/ql/src/test/results/clientpositive/metadata_only_queries.q.out
hive/trunk/ql/src/test/results/clientpositive/stats_only_null.q.out
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/conf/hive-default.xml.template
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1537667&r1=1537666&r2=1537667&view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu 
Oct 31 21:22:02 2013
@@ -658,6 +658,8 @@ public class HiveConf extends Configurat
 
 HIVEFETCHTASKAGGR("hive.fetch.task.aggr", false),
 
+HIVEOPTIMIZEMETADATAQUERIES("hive.compute.query.using.stats", false),
+
 // Serde for FetchTask
 HIVEFETCHOUTPUTSERDE("hive.fetch.output.serde", 
"org.apache.hadoop.hive.serde2.DelimitedJSONSerDe"),
 

Modified: hive/trunk/conf/hive-default.xml.template
URL: 
http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1537667&r1=1537666&r2=1537667&view=diff
==
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Thu Oct 31 21:22:02 2013
@@ -2032,6 +2032,17 @@
 
 
 
+  hive.compute.query.using.stats
+  false
+  
+  When set to true hive will answer few queries like count(1) purely using 
stats
+  stored in metastore. For basic stats collection turn on the config 
hive.stats.autogather to true.
+  For more advanced stats collection need to run analyze table queries.
+  
+
+
+
+
   hive.metastore.schema.verification
   false


Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java?rev=1537667&r1=1537666&r2=1537667&view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FetchOperator.java 
Thu Oct 31 21:22:02 2013
@@ -498,6 +498,13 @@ public class FetchOperator implements Se
* Currently only used by FetchTask.
**/
   public boolean pushRow() throws IOException, HiveException {
+if(work.getRowsComputedUsingStats() != null) {
+  for (List row : work.getRowsComputedUsingStats()) {
+operator.process(row, 0);
+  }
+  operator.flush();
+  return true;
+}
 InspectableObject row = getNextRow();
 if (row != null) {
   pushRow(row);
@@ -609,6 +616,9 @@ public class FetchOperator implements Se
* returns output ObjectInspector, never null
*/
   public ObjectInspector getOutputObjectInspector() throws HiveException {
+if(null != work.getStatRowOI()) {
+  return work.getStatRowOI();
+}
 try {
   if (work.isNotPartitioned()) {
 return getRowInspectorFromTable(work.getTblDesc());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1537667&r1=1537666&r2=1537667&view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Thu Oct 
31 21:22:02 2013
@@ -48,6 +48,8 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.FileUtils;
+import 
org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
+import 
org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;

svn commit: r1537610 - in /hive/trunk/hcatalog: src/test/e2e/templeton/drivers/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ webhcat/svr/src/main/java/or

2013-10-31 Thread daijy
Author: daijy
Date: Thu Oct 31 20:02:30 2013
New Revision: 1537610

URL: http://svn.apache.org/r1537610
Log:
HIVE-5510: [WebHCat] GET job/queue return wrong job information

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf

hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/ListDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/QueueStatusBean.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/StatusDelegator.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm?rev=1537610&r1=1537609&r2=1537610&view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
(original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm Thu 
Oct 31 20:02:30 2013
@@ -647,19 +647,16 @@ sub compare
   foreach my $key (keys %$json_matches) {
 my $regex_expected_value = $json_matches->{$key};
 my $path = JSON::Path->new($key);
-my $value; 
-# when filter_job_status is defined 
-if (defined $testCmd->{'filter_job_status'}) {
-   # decode $testResult->{'body'} to an array of hash
-   my $body = decode_json $testResult->{'body'};
-   # in the tests, we run this case with jobName = 
"PigLatin:loadstore.pig"
-   # filter $body to leave only records with this jobName
-   my @filtered_body = grep {($_->{detail}{profile}{jobName} eq 
"PigLatin:loadstore.pig")}  @$body;
-   my @sorted_filtered_body = sort { $a->{id} cmp $b->{id} 
} @filtered_body;
-   $value = $path->value(\@sorted_filtered_body);
+
+# decode $testResult->{'body'} to an array of hash
+my $body = decode_json $testResult->{'body'};
+my @sorted_body;
+if (ref @$body[0] eq 'HASH') {
+  @sorted_body = sort { $a->{id} cmp $b->{id} } @$body;
 } else {
-   $value = $path->value($testResult->{'body'});
+  @sorted_body = sort { $a cmp $b } @$body;
 }
+my $value = $path->value(\@sorted_body);
 
 if ($value !~ /$regex_expected_value/s) {
   print $log "$0::$subName INFO check failed:"

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf?rev=1537610&r1=1537609&r2=1537610&view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobstatus.conf Thu Oct 31 
20:02:30 2013
@@ -89,7 +89,6 @@ $cfg = 
  'json_path' => {'$[-1:].detail.status.username' => ':UNAME_OTHER:', 
'$[-2:].detail.status.username' => ':UNAME_OTHER:', 
'$[-3:].detail.status.username' => ':UNAME:',
  '$[-4:].detail.status.username' => ':UNAME:', 
'$[-5:].detail.status.username' => ':UNAME_OTHER:', 
'$[-6:].detail.status.username' => ':UNAME_OTHER:'},
  'status_code' => 200,
- 'filter_job_status' => 1, 
 },
 {
  # GET jobs?user.name=UNAME_OTHER&fields=*, should get only jobs launched 
as UNAME_OTHER
@@ -134,11 +133,11 @@ $cfg = 
  '$[-1:].detail.profile.jobId' => 'job_.*',
  '$[-1:].detail.id' => 'job_.*',
  '$[-1:].detail.parentId' => 'job_.*',
- '$[-1:].detail.percentComplete' => '100%',
- '$[-1:].detail.exitValue' => '0',
- '$[-1:].detail.user' => ':UNAME_OTHER:',
- '$[-1:].detail.callback' => '^.+$',
- '$[-1:].detail.completed' => 'done',
+ '$[-2:].detail.exitValue' => '0',
+ '$[-2:].detail.user' => ':UNAME_OTHER:',
+ '$[-2:].detail.callback' => '^.+$',
+ '$[-2:].detail.completed' => 'done',
+ '$[-2:].detail.percentComplete' => '100%',
 },
  'status_code' => 200,
 },

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission_streaming.conf
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatal

svn commit: r1537585 - in /hive/trunk: ./ ant/ beeline/ cli/ common/ contrib/ eclipse-templates/ hbase-handler/ hcatalog/ hcatalog/build-support/ant/ hcatalog/core/ hcatalog/hcatalog-pig-adapter/ hcat

2013-10-31 Thread brock
Author: brock
Date: Thu Oct 31 18:52:58 2013
New Revision: 1537585

URL: http://svn.apache.org/r1537585
Log:
HIVE-5610 - Merge maven branch into trunk (delete ant)

Removed:
hive/trunk/ant/build.xml
hive/trunk/ant/ivy.xml
hive/trunk/beeline/build.xml
hive/trunk/beeline/ivy.xml
hive/trunk/build-common.xml
hive/trunk/build-offline.xml
hive/trunk/build.properties
hive/trunk/build.xml
hive/trunk/cli/build.xml
hive/trunk/cli/ivy.xml
hive/trunk/common/build.xml
hive/trunk/common/ivy.xml
hive/trunk/contrib/build.xml
hive/trunk/contrib/ivy.xml
hive/trunk/eclipse-templates/
hive/trunk/hbase-handler/build.xml
hive/trunk/hbase-handler/ivy.xml
hive/trunk/hcatalog/build-support/ant/build-common.xml
hive/trunk/hcatalog/build-support/ant/deploy.xml
hive/trunk/hcatalog/build-support/ant/test.xml
hive/trunk/hcatalog/build.xml
hive/trunk/hcatalog/core/build.xml
hive/trunk/hcatalog/core/pom-old.xml
hive/trunk/hcatalog/hcatalog-pig-adapter/build.xml
hive/trunk/hcatalog/hcatalog-pig-adapter/pom-old.xml
hive/trunk/hcatalog/ivy.xml
hive/trunk/hcatalog/pom-old.xml
hive/trunk/hcatalog/server-extensions/build.xml
hive/trunk/hcatalog/server-extensions/pom-old.xml
hive/trunk/hcatalog/storage-handlers/hbase/build.xml
hive/trunk/hcatalog/storage-handlers/hbase/pom-old.xml
hive/trunk/hcatalog/webhcat/java-client/build.xml
hive/trunk/hcatalog/webhcat/java-client/pom-old.xml
hive/trunk/hcatalog/webhcat/svr/build.xml
hive/trunk/hcatalog/webhcat/svr/pom-old.xml
hive/trunk/hwi/build.xml
hive/trunk/hwi/ivy.xml
hive/trunk/ivy/
hive/trunk/ivy.xml
hive/trunk/jdbc/build.xml
hive/trunk/jdbc/ivy.xml
hive/trunk/maven-delete-ant.sh
hive/trunk/maven-rollback.sh
hive/trunk/maven-rollforward.sh
hive/trunk/metastore/build.xml
hive/trunk/metastore/ivy.xml
hive/trunk/odbc/build.xml
hive/trunk/odbc/ivy.xml
hive/trunk/ql/build.xml
hive/trunk/ql/ivy.xml
hive/trunk/serde/build.xml
hive/trunk/serde/ivy.xml
hive/trunk/service/build.xml
hive/trunk/service/ivy.xml
hive/trunk/shims/build.xml
hive/trunk/shims/ivy.xml
hive/trunk/testutils/build.xml
hive/trunk/testutils/ivy.xml



svn commit: r1537581 - in /hive/trunk: ./ beeline/src/java/org/apache/hive/beeline/ beeline/src/main/ beeline/src/main/resources/ beeline/src/test/org/apache/hive/beeline/src/test/ common/src/java/con

2013-10-31 Thread brock
Author: brock
Date: Thu Oct 31 18:41:45 2013
New Revision: 1537581

URL: http://svn.apache.org/r1537581
Log:
HIVE-5610 - Merge maven branch into trunk (maven rollforward)

Added:
hive/trunk/beeline/src/main/
hive/trunk/beeline/src/main/resources/
hive/trunk/beeline/src/main/resources/BeeLine.properties
  - copied unchanged from r1537580, 
hive/trunk/beeline/src/java/org/apache/hive/beeline/BeeLine.properties
hive/trunk/beeline/src/main/resources/sql-keywords.properties
  - copied unchanged from r1537580, 
hive/trunk/beeline/src/java/org/apache/hive/beeline/sql-keywords.properties
hive/trunk/common/src/main/
hive/trunk/common/src/main/resources/
hive/trunk/common/src/main/resources/hive-log4j.properties
  - copied unchanged from r1537580, 
hive/trunk/common/src/java/conf/hive-log4j.properties
hive/trunk/data/conf/hive-log4j-old.properties
  - copied unchanged from r1537580, 
hive/trunk/data/conf/hive-log4j.properties
hive/trunk/data/conf/hive-log4j.properties
  - copied unchanged from r1537580, 
hive/trunk/data/conf/hive-log4j-new.properties
hive/trunk/data/conf/hive-site-old.xml
  - copied unchanged from r1537580, hive/trunk/data/conf/hive-site.xml
hive/trunk/data/conf/hive-site.xml
  - copied unchanged from r1537580, hive/trunk/data/conf/hive-site-new.xml
hive/trunk/hcatalog/core/pom-old.xml
  - copied unchanged from r1537580, hive/trunk/hcatalog/core/pom.xml
hive/trunk/hcatalog/core/pom.xml
  - copied unchanged from r1537580, hive/trunk/hcatalog/core/pom-new.xml
hive/trunk/hcatalog/hcatalog-pig-adapter/pom-old.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml
hive/trunk/hcatalog/hcatalog-pig-adapter/pom.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/hcatalog-pig-adapter/pom-new.xml
hive/trunk/hcatalog/pom-old.xml
  - copied unchanged from r1537580, hive/trunk/hcatalog/pom.xml
hive/trunk/hcatalog/pom.xml
  - copied unchanged from r1537580, hive/trunk/hcatalog/pom-new.xml
hive/trunk/hcatalog/server-extensions/pom-old.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/server-extensions/pom.xml
hive/trunk/hcatalog/server-extensions/pom.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/server-extensions/pom-new.xml
hive/trunk/hcatalog/storage-handlers/hbase/pom-old.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/storage-handlers/hbase/pom.xml
hive/trunk/hcatalog/storage-handlers/hbase/pom.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/storage-handlers/hbase/pom-new.xml
hive/trunk/hcatalog/webhcat/java-client/pom-old.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/webhcat/java-client/pom.xml
hive/trunk/hcatalog/webhcat/java-client/pom.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/webhcat/java-client/pom-new.xml
hive/trunk/hcatalog/webhcat/svr/pom-old.xml
  - copied unchanged from r1537580, hive/trunk/hcatalog/webhcat/svr/pom.xml
hive/trunk/hcatalog/webhcat/svr/pom.xml
  - copied unchanged from r1537580, 
hive/trunk/hcatalog/webhcat/svr/pom-new.xml
hive/trunk/itests/custom-serde/src/
hive/trunk/itests/custom-serde/src/main/
hive/trunk/itests/custom-serde/src/main/java/
hive/trunk/itests/custom-serde/src/main/java/org/
hive/trunk/itests/custom-serde/src/main/java/org/apache/
hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/
hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/
hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java
  - copied unchanged from r1537580, 
hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableListObjectInspector1.java

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java
  - copied unchanged from r1537580, 
hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableStructObjectInspector1.java

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java
  - copied unchanged from r1537580, 
hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/CustomNonSettableUnionObjectInspector1.java

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe1.java
  - copied unchanged from r1537580, 
hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe1.java

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe2.java
  - copied unchanged from r1537580, 
hive/trunk/ql/src/test/org/apache/hadoop/hive/serde2/CustomSerDe2.java

hive/trunk/itests/custom-serde/src/main/java/org/apache/hadoop/hive/serde2/CustomSerDe3.java
  - copied

svn commit: r1537576 [23/23] - in /hive/trunk: ./ ant/ ant/src/org/apache/hadoop/hive/ant/ beeline/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/src/test/ cli/ co

2013-10-31 Thread brock
Added: hive/trunk/service/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/service/pom.xml?rev=1537576&view=auto
==
--- hive/trunk/service/pom.xml (added)
+++ hive/trunk/service/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,161 @@
+
+
+http://maven.apache.org/POM/4.0.0";
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  4.0.0
+  
+org.apache.hive
+hive
+0.13.0-SNAPSHOT
+../pom.xml
+  
+
+  hive-service
+  jar
+  Hive Service
+
+  
+..
+  
+
+  
+
+
+  org.apache.hive
+  hive-exec
+  ${project.version}
+
+
+
+  commons-cli
+  commons-cli
+  ${commons-cli.version}
+
+
+  commons-logging
+  commons-logging
+  ${commons-logging.version}
+
+
+  commons-io
+  commons-io
+  ${commons-io.version}
+
+
+  org.apache.thrift
+  libfb303
+  ${libfb303.version}
+
+
+  org.apache.thrift
+  libthrift
+  ${libthrift.version}
+
+
+
+  org.apache.hive
+  hive-exec
+  ${project.version}
+  test
+  tests
+
+
+
+  junit
+  junit
+  ${junit.version}
+  test
+
+
+  org.mockito
+  mockito-all
+  ${mockito-all.version}
+  test
+
+  
+
+  
+
+  hadoop-1
+  
+true
+  
+  
+
+  org.apache.hadoop
+  hadoop-core
+  ${hadoop-20S.version}
+ true
+
+  
+
+   
+  hadoop-2
+  
+
+  org.apache.hadoop
+  hadoop-common
+  ${hadoop-23.version}
+  true
+
+
+  org.apache.hadoop
+  hadoop-mapreduce-client-core
+  ${hadoop-23.version}
+  true
+
+  
+
+  
+
+  
+${basedir}/src/java
+${basedir}/src/test
+
+  
+org.codehaus.mojo
+build-helper-maven-plugin
+
+  
+add-source
+generate-sources
+
+  add-source
+
+
+  
+src/model
+src/gen/thrift/gen-javabean
+  
+
+  
+
+  
+  
+org.apache.maven.plugins
+maven-jar-plugin
+
+  
+
+  test-jar
+
+  
+
+  
+
+  
+
+

Added: hive/trunk/shims/0.20/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/0.20/pom.xml?rev=1537576&view=auto
==
--- hive/trunk/shims/0.20/pom.xml (added)
+++ hive/trunk/shims/0.20/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,67 @@
+
+
+http://maven.apache.org/POM/4.0.0";
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  4.0.0
+  
+org.apache.hive
+hive
+0.13.0-SNAPSHOT
+../../pom.xml
+  
+
+  org.apache.hive.shims
+  hive-shims-0.20
+  jar
+  Hive Shims 0.20
+
+  
+../..
+  
+
+  
+
+
+  org.apache.hive.shims
+  hive-shims-common
+  ${project.version}
+
+
+
+  org.apache.hadoop
+  hadoop-core
+  ${hadoop-20.version}
+  true
+
+
+  org.apache.hadoop
+  hadoop-test
+  ${hadoop-20.version}
+  true
+
+
+  org.apache.hadoop
+  hadoop-tools
+  ${hadoop-20.version}
+  true
+
+
+  org.mortbay.jetty
+  jetty
+  ${jetty.version}
+
+  
+

Added: hive/trunk/shims/0.20S/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/0.20S/pom.xml?rev=1537576&view=auto
==
--- hive/trunk/shims/0.20S/pom.xml (added)
+++ hive/trunk/shims/0.20S/pom.xml Thu Oct 31 18:27:31 2013
@@ -0,0 +1,61 @@
+
+
+http://maven.apache.org/POM/4.0.0";
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  4.0.0
+  
+org.apache.hive
+hive
+0.13.0-SNAPSHOT
+../../pom.xml
+  
+
+  org.apache.hive.shims
+  hive-shims-0.20S
+  jar
+  Hive Shims 0.20S
+
+  
+../..
+  
+
+  
+
+
+  org.apache.hive.shims
+  hive-shims-common-secure
+  ${project.version}
+
+
+
+  org.apache.hadoop
+  hadoop-core
+  ${hadoop-20S.version}
+  true
+
+
+  org.apache.hadoop
+  hadoop-test
+  ${hadoop-20S.version}
+  true
+
+
+  org.mortbay.jetty
+  jetty
+  ${jetty.version}
+
+  
+

Added: hive/trunk/shims/0.23/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/0.23/pom.xm

svn commit: r1537501 - in /hive/trunk: ant/src/org/apache/hadoop/hive/ant/ ql/src/gen/vectorization/ExpressionTemplates/ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/ ql/src/java/org/apache/hadoo

2013-10-31 Thread hashutosh
Author: hashutosh
Date: Thu Oct 31 14:19:01 2013
New Revision: 1537501

URL: http://svn.apache.org/r1537501
Log:
HIVE-5582 : Implement BETWEEN filter in vectorized mode (Eric Hanson via 
Ashutosh Chauhan)

Added:

hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/FilterColumnBetween.txt

hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/FilterStringColumnBetween.txt
Modified:
hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorizationContext.java

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorFilterExpressions.java

Modified: hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java?rev=1537501&r1=1537500&r2=1537501&view=diff
==
--- hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java (original)
+++ hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java Thu Oct 31 
14:19:01 2013
@@ -215,6 +215,9 @@ public class GenVectorCode extends Task 
   {"FilterStringColumnCompareScalar", "Greater", ">"},
   {"FilterStringColumnCompareScalar", "GreaterEqual", ">="},
 
+  {"FilterStringColumnBetween", ""},
+  {"FilterStringColumnBetween", "!"},
+
   {"StringColumnCompareScalar", "Equal", "=="},
   {"StringColumnCompareScalar", "NotEqual", "!="},
   {"StringColumnCompareScalar", "Less", "<"},
@@ -276,6 +279,11 @@ public class GenVectorCode extends Task 
 {"FilterColumnCompareColumn", "GreaterEqual", "long", "long", ">="},
 {"FilterColumnCompareColumn", "GreaterEqual", "double", "long", ">="},
 
+  {"FilterColumnBetween", "long", ""},
+  {"FilterColumnBetween", "double", ""},
+  {"FilterColumnBetween", "long", "!"},
+  {"FilterColumnBetween", "double", "!"},
+
   {"ColumnCompareColumn", "Equal", "long", "double", "=="},
   {"ColumnCompareColumn", "Equal", "double", "double", "=="},
   {"ColumnCompareColumn", "NotEqual", "long", "double", "!="},
@@ -511,6 +519,8 @@ public class GenVectorCode extends Task 
 generateFilterColumnCompareScalar(tdesc);
   } else if (tdesc[0].equals("FilterScalarCompareColumn")) {
 generateFilterScalarCompareColumn(tdesc);
+  } else if (tdesc[0].equals("FilterColumnBetween")) {
+generateFilterColumnBetween(tdesc);
   } else if (tdesc[0].equals("ScalarArithmeticColumn")) {
 generateScalarArithmeticColumn(tdesc);
   } else if (tdesc[0].equals("FilterColumnCompareColumn")) {
@@ -535,6 +545,8 @@ public class GenVectorCode extends Task 
 generateVectorUDAFVar(tdesc);
   } else if (tdesc[0].equals("FilterStringColumnCompareScalar")) {
 generateFilterStringColumnCompareScalar(tdesc);
+  } else if (tdesc[0].equals("FilterStringColumnBetween")) {
+generateFilterStringColumnBetween(tdesc);
   } else if (tdesc[0].equals("StringColumnCompareScalar")) {
 generateStringColumnCompareScalar(tdesc);
   } else if (tdesc[0].equals("FilterStringScalarCompareColumn")) {
@@ -553,6 +565,40 @@ public class GenVectorCode extends Task 
 testCodeGen.generateTestSuites();
   }
 
+  private void generateFilterStringColumnBetween(String[] tdesc) throws 
IOException {
+String optionalNot = tdesc[1];
+String className = "FilterStringColumn" + (optionalNot.equals("!") ? "Not" 
: "")
++ "Between";
+String outputFile = joinPath(this.expressionOutputDirectory, className + 
".java");
+
+// Read the template into a string, expand it, and write it.
+String templateFile = joinPath(this.expressionTemplateDirectory, tdesc[0] 
+ ".txt");
+String templateString = readFile(templateFile);
+templateString = templateString.replaceAll("", className);
+templateString = templateString.replaceAll("", optionalNot);
+writeFile(outputFile, templateString);
+  }
+
+  private void generateFilterColumnBetween(String[] tdesc) throws IOException {
+String operandType = tdesc[1];
+String optionalNot = tdesc[2];
+
+String className = "Filter" + getCamelCaseType(operandType) + "Column" +
+  (optionalNot.equals("!") ? "Not" : "") + "Between";
+String inputColumnVectorType = getColumnVectorType(operandType);
+String outputFile = joinPath(this.expressionOutputDirectory, className + 
".java");
+
+// Read the template into a string, expand it, and write it.
+String templateFile = joinPath(this.expressionTemplateDirectory, tdesc[0] 
+ ".txt");
+String templateString = readFile(templateFile);
+templateString = templateString.replaceAll("", className);
+templateString = templateS

svn commit: r1537500 [2/2] - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/ test/org/apache/hadoop/hive/ql/parse/ test/queries/clientpositive/ test/results/clientpositive/

2013-10-31 Thread hashutosh
Added: hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out?rev=1537500&view=auto
==
--- hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out 
(added)
+++ hive/trunk/ql/src/test/results/clientpositive/join_cond_pushdown_2.q.out 
Thu Oct 31 14:10:54 2013
@@ -0,0 +1,683 @@
+PREHOOK: query: DROP TABLE part
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE part
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: -- data setup
+CREATE TABLE part( 
+p_partkey INT,
+p_name STRING,
+p_mfgr STRING,
+p_brand STRING,
+p_type STRING,
+p_size INT,
+p_container STRING,
+p_retailprice DOUBLE,
+p_comment STRING
+)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- data setup
+CREATE TABLE part( 
+p_partkey INT,
+p_name STRING,
+p_mfgr STRING,
+p_brand STRING,
+p_type STRING,
+p_size INT,
+p_container STRING,
+p_retailprice DOUBLE,
+p_comment STRING
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@part
+PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' overwrite 
into table part
+PREHOOK: type: LOAD
+PREHOOK: Output: default@part
+POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/part_tiny.txt' 
overwrite into table part
+POSTHOOK: type: LOAD
+POSTHOOK: Output: default@part
+PREHOOK: query: explain select *
+from part p1 join part p2 join part p3 on p1.p_name = p2.p_name join part p4 
on p2.p_name = p3.p_name and p1.p_name = p4.p_name
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select *
+from part p1 join part p2 join part p3 on p1.p_name = p2.p_name join part p4 
on p2.p_name = p3.p_name and p1.p_name = p4.p_name
+POSTHOOK: type: QUERY
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_JOIN (TOK_TABREF (TOK_TABNAME 
part) p1) (TOK_TABREF (TOK_TABNAME part) p2)) (TOK_TABREF (TOK_TABNAME part) 
p3) (= (. (TOK_TABLE_OR_COL p1) p_name) (. (TOK_TABLE_OR_COL p2) p_name))) 
(TOK_TABREF (TOK_TABNAME part) p4) (and (= (. (TOK_TABLE_OR_COL p2) p_name) (. 
(TOK_TABLE_OR_COL p3) p_name)) (= (. (TOK_TABLE_OR_COL p1) p_name) (. 
(TOK_TABLE_OR_COL p4) p_name) (TOK_INSERT (TOK_DESTINATION (TOK_DIR 
TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+Map Reduce
+  Alias -> Map Operator Tree:
+p1 
+  TableScan
+alias: p1
+Reduce Output Operator
+  key expressions:
+expr: p_name
+type: string
+  sort order: +
+  Map-reduce partition columns:
+expr: p_name
+type: string
+  tag: 0
+  value expressions:
+expr: p_partkey
+type: int
+expr: p_name
+type: string
+expr: p_mfgr
+type: string
+expr: p_brand
+type: string
+expr: p_type
+type: string
+expr: p_size
+type: int
+expr: p_container
+type: string
+expr: p_retailprice
+type: double
+expr: p_comment
+type: string
+p2 
+  TableScan
+alias: p2
+Reduce Output Operator
+  key expressions:
+expr: p_name
+type: string
+  sort order: +
+  Map-reduce partition columns:
+expr: p_name
+type: string
+  tag: 1
+  value expressions:
+expr: p_partkey
+type: int
+expr: p_name
+type: string
+expr: p_mfgr
+type: string
+expr: p_brand
+type: string
+expr: p_type
+type: string
+expr: p_size
+type: int
+expr: p_container
+type: string
+expr: p_retailprice
+type: double
+expr: p_comment
+type: string
+p3 
+  TableScan
+alias: p3
+Reduce Output Operator
+  key expressions:
+expr: p_name
+type: string
+  sort order: +
+  Map-reduce partition columns:
+expr: p_name
+type: string
+  tag: 2
+  value expressions: