svn commit: r1552339 - in /hive/trunk: ql/pom.xml shims/aggregator/ shims/aggregator/pom.xml shims/assembly/pom.xml shims/assembly/src/assemble/uberjar.xml shims/pom.xml

2013-12-19 Thread brock
Author: brock
Date: Thu Dec 19 16:24:18 2013
New Revision: 1552339

URL: http://svn.apache.org/r1552339
Log:
HIVE-5966 - Fix eclipse:eclipse post shim aggregation changes (Szehon Ho via 
Brock Noland)

Added:
hive/trunk/shims/aggregator/
hive/trunk/shims/aggregator/pom.xml
Removed:
hive/trunk/shims/assembly/pom.xml
hive/trunk/shims/assembly/src/assemble/uberjar.xml
Modified:
hive/trunk/ql/pom.xml
hive/trunk/shims/pom.xml

Modified: hive/trunk/ql/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/pom.xml?rev=1552339r1=1552338r2=1552339view=diff
==
--- hive/trunk/ql/pom.xml (original)
+++ hive/trunk/ql/pom.xml Thu Dec 19 16:24:18 2013
@@ -356,7 +356,12 @@
   includecommons-lang:commons-lang/include
   includeorg.json:json/include
   includeorg.apache.avro:arvro-mapred/include
-  includeorg.apache.hive:hive-shims/include
+  includeorg.apache.hive.shims:hive-shims-0.20/include
+  includeorg.apache.hive.shims:hive-shims-0.20S/include
+  includeorg.apache.hive.shims:hive-shims-0.23/include
+  includeorg.apache.hive.shims:hive-shims-0.23/include
+  includeorg.apache.hive.shims:hive-shims-common/include
+  
includeorg.apache.hive.shims:hive-shims-common-secure/include
   includecom.googlecode.javaewah:JavaEWAH/include
   includejavolution:javolution/include
   includecom.google.protobuf:protobuf-java/include

Added: hive/trunk/shims/aggregator/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/aggregator/pom.xml?rev=1552339view=auto
==
--- hive/trunk/shims/aggregator/pom.xml (added)
+++ hive/trunk/shims/aggregator/pom.xml Thu Dec 19 16:24:18 2013
@@ -0,0 +1,67 @@
+?xml version=1.0 encoding=UTF-8?
+!--
+  Licensed under the Apache License, Version 2.0 (the License);
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an AS IS BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+--
+project xmlns=http://maven.apache.org/POM/4.0.0;
+ xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+ xsi:schemaLocation=http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd;
+  modelVersion4.0.0/modelVersion
+  parent
+groupIdorg.apache.hive/groupId
+artifactIdhive/artifactId
+version0.13.0-SNAPSHOT/version
+relativePath../../pom.xml/relativePath
+  /parent
+
+  artifactIdhive-shims/artifactId
+  packagingjar/packaging
+  nameHive Shims/name
+
+  properties
+hive.path.to.root../../hive.path.to.root
+  /properties
+
+  dependencies
+!-- dependencies are always listed in sorted order by groupId, artifectId 
--
+dependency
+  groupIdorg.apache.hive.shims/groupId
+  artifactIdhive-shims-common/artifactId
+  version${project.version}/version
+  scopecompile/scope
+/dependency
+dependency
+  groupIdorg.apache.hive.shims/groupId
+  artifactIdhive-shims-0.20/artifactId
+  version${project.version}/version
+  scoperuntime/scope
+/dependency
+dependency
+  groupIdorg.apache.hive.shims/groupId
+  artifactIdhive-shims-common-secure/artifactId
+  version${project.version}/version
+  scopecompile/scope
+/dependency
+dependency
+  groupIdorg.apache.hive.shims/groupId
+  artifactIdhive-shims-0.20S/artifactId
+  version${project.version}/version
+  scoperuntime/scope
+/dependency
+dependency
+  groupIdorg.apache.hive.shims/groupId
+  artifactIdhive-shims-0.23/artifactId
+  version${project.version}/version
+  scoperuntime/scope
+/dependency
+  /dependencies
+/project

Modified: hive/trunk/shims/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/pom.xml?rev=1552339r1=1552338r2=1552339view=diff
==
--- hive/trunk/shims/pom.xml (original)
+++ hive/trunk/shims/pom.xml Thu Dec 19 16:24:18 2013
@@ -37,6 +37,6 @@
 modulecommon-secure/module
 module0.20S/module
 module0.23/module
-moduleassembly/module
+moduleaggregator/module
   /modules
 /project




svn commit: r1552375 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/optimizer/physical/ java/org/apache/hadoop/hive/ql/plan/ test/queries/clientpositive/ test/results/clientpositive/

2013-12-19 Thread hashutosh
Author: hashutosh
Date: Thu Dec 19 17:41:25 2013
New Revision: 1552375

URL: http://svn.apache.org/r1552375
Log:
HIVE-6041 : Incorrect task dependency graph for skewed join optimization (Navis 
via Ashutosh Chauhan)

Added:
hive/trunk/ql/src/test/queries/clientpositive/skewjoin_noskew.q
hive/trunk/ql/src/test/results/clientpositive/skewjoin_noskew.q.out
Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
hive/trunk/ql/src/test/results/clientpositive/skewjoin.q.out

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java?rev=1552375r1=1552374r2=1552375view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/GenMRSkewJoinProcessor.java
 Thu Dec 19 17:41:25 2013
@@ -43,6 +43,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin;
+import 
org.apache.hadoop.hive.ql.plan.ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx;
 import org.apache.hadoop.hive.ql.plan.ConditionalWork;
 import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -114,6 +115,14 @@ public final class GenMRSkewJoinProcesso
   return;
 }
 
+ListTask? extends Serializable children = currTask.getChildTasks();
+if (children != null  children.size()  1) {
+  throw new SemanticException(Should not happened);
+}
+
+Task? extends Serializable child =
+children != null  children.size() == 1 ? children.get(0) : null;
+
 String baseTmpDir = parseCtx.getContext().getMRTmpFileURI();
 
 JoinDesc joinDescriptor = joinOp.getConf();
@@ -333,25 +342,27 @@ public final class GenMRSkewJoinProcesso
   listWorks.add(skewJoinMapJoinTask.getWork());
   listTasks.add(skewJoinMapJoinTask);
 }
+if (children != null) {
+  for (Task? extends Serializable tsk : listTasks) {
+for (Task? extends Serializable oldChild : children) {
+  tsk.addDependentTask(oldChild);
+}
+  }
+}
+if (child != null) {
+  listTasks.add(child);
+}
+ConditionalResolverSkewJoinCtx context =
+new ConditionalResolverSkewJoinCtx(bigKeysDirToTaskMap, child);
 
 ConditionalWork cndWork = new ConditionalWork(listWorks);
 ConditionalTask cndTsk = (ConditionalTask) TaskFactory.get(cndWork, 
parseCtx.getConf());
 cndTsk.setListTasks(listTasks);
 cndTsk.setResolver(new ConditionalResolverSkewJoin());
-cndTsk
-.setResolverCtx(new 
ConditionalResolverSkewJoin.ConditionalResolverSkewJoinCtx(
-bigKeysDirToTaskMap));
-ListTask? extends Serializable oldChildTasks = 
currTask.getChildTasks();
+cndTsk.setResolverCtx(context);
 currTask.setChildTasks(new ArrayListTask? extends Serializable());
 currTask.addDependentTask(cndTsk);
 
-if (oldChildTasks != null) {
-  for (Task? extends Serializable tsk : cndTsk.getListTasks()) {
-for (Task? extends Serializable oldChild : oldChildTasks) {
-  tsk.addDependentTask(oldChild);
-}
-  }
-}
 return;
   }
 

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java?rev=1552375r1=1552374r2=1552375view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverSkewJoin.java
 Thu Dec 19 17:41:25 2013
@@ -50,7 +50,8 @@ public class ConditionalResolverSkewJoin
 // tables into corresponding different dirs (one dir per table).
 // this map stores mapping from big key dir to its corresponding mapjoin
 // task.
-HashMapString, Task? extends Serializable dirToTaskMap;
+private HashMapString, Task? extends Serializable dirToTaskMap;
+private Task? extends Serializable noSkewTask;
 
 /**
  * For serialization use only.
@@ -59,9 +60,11 @@ public class ConditionalResolverSkewJoin
 }
 
 public ConditionalResolverSkewJoinCtx(
-HashMapString, Task? extends Serializable dirToTaskMap) {
+HashMapString, Task? extends Serializable dirToTaskMap,
+Task? extends Serializable 

svn commit: r1552393 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

2013-12-19 Thread daijy
Author: daijy
Date: Thu Dec 19 18:38:03 2013
New Revision: 1552393

URL: http://svn.apache.org/r1552393
Log:
HIVE-5540: webhcat e2e test failures: Expect 1 jobs in logs, but get 1

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java?rev=1552393r1=1552392r2=1552393view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 Thu Dec 19 18:38:03 2013
@@ -241,6 +241,7 @@ public class LaunchMapper extends Mapper
 private OutputStream out;
 private final JobID jobid;
 private final Configuration conf;
+boolean needCloseOutput = false;
 
 public Watcher(Configuration conf, JobID jobid, InputStream in, String 
statusdir, String name)
   throws IOException {
@@ -258,16 +259,18 @@ public class LaunchMapper extends Mapper
 Path p = new Path(statusdir, name);
 FileSystem fs = p.getFileSystem(conf);
 out = fs.create(p);
+needCloseOutput = true;
 LOG.info(templeton: Writing status to  + p);
   }
 }
 
 @Override
 public void run() {
+  PrintWriter writer = null;
   try {
 InputStreamReader isr = new InputStreamReader(in);
 BufferedReader reader = new BufferedReader(isr);
-PrintWriter writer = new PrintWriter(out);
+writer = new PrintWriter(out);
 
 String line;
 while ((line = reader.readLine()) != null) {
@@ -308,6 +311,15 @@ public class LaunchMapper extends Mapper
 }
   } catch (IOException e) {
 LOG.error(templeton: execute error: , e);
+  } finally {
+// Need to close() because in some FileSystem
+// implementations flush() is no-op.
+// Close the file handle if it is a hdfs file.
+// But if it is stderr/stdout, skip it since
+// WebHCat is not supposed to close it
+if (needCloseOutput  writer!=null) {
+  writer.close();
+}
   }
 }
   }




svn commit: r1552449 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ metastore/src/java/org/apache/hadoop/hive/metastore/ metastore/src/java/org/apache/hadoop/hive/metastore/parser/ ql/

2013-12-19 Thread hashutosh
Author: hashutosh
Date: Thu Dec 19 22:49:01 2013
New Revision: 1552449

URL: http://svn.apache.org/r1552449
Log:
HIVE-6052 : metastore JDO filter pushdown for integers may produce unexpected 
results with non-normalized integer columns (Sergey Shelukhin via Ashutosh 
Chauhan)

Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/parser/ExpressionTree.java
hive/trunk/ql/src/test/queries/clientpositive/alter_partition_coltype.q
hive/trunk/ql/src/test/queries/clientpositive/annotate_stats_part.q

hive/trunk/ql/src/test/queries/clientpositive/dynamic_partition_skip_default.q
hive/trunk/ql/src/test/results/clientpositive/alter_partition_coltype.q.out
hive/trunk/ql/src/test/results/clientpositive/annotate_stats_part.q.out

hive/trunk/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1552449r1=1552448r2=1552449view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu 
Dec 19 22:49:01 2013
@@ -340,6 +340,9 @@ public class HiveConf extends Configurat
 METASTORE_EXECUTE_SET_UGI(hive.metastore.execute.setugi, false),
 METASTORE_PARTITION_NAME_WHITELIST_PATTERN(
 hive.metastore.partition.name.whitelist.pattern, ),
+// Whether to enable integral JDO pushdown. For partition columns storing 
integers
+// in non-canonical form, (e.g. '012'), it may not work, so it's off by 
default.
+METASTORE_INTEGER_JDO_PUSHDOWN(hive.metastore.integral.jdo.pushdown, 
false),
 METASTORE_TRY_DIRECT_SQL(hive.metastore.try.direct.sql, true),
 METASTORE_TRY_DIRECT_SQL_DDL(hive.metastore.try.direct.sql.ddl, true),
 METASTORE_DISALLOW_INCOMPATIBLE_COL_TYPE_CHANGES(

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java?rev=1552449r1=1552448r2=1552449view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
 Thu Dec 19 22:49:01 2013
@@ -745,9 +745,6 @@ class MetaStoreDirectSql {
 return;
   }
 
-  // Force string-based handling in some cases to be compatible with JDO 
pushdown.
-  boolean forceStringEq = !isStringCol  
node.canJdoUseStringsWithIntegral();
-
   if (joins.isEmpty()) {
 // There's a fixed number of partition cols that we might have filters 
on. To avoid
 // joining multiple times for one column (if there are several filters 
on it), we will
@@ -765,16 +762,24 @@ class MetaStoreDirectSql {
 
   // Build the filter and add parameters linearly; we are traversing leaf 
nodes LTR.
   String tableValue = \FILTER + partColIndex + \.\PART_KEY_VAL\;
-  if (!isStringCol  !forceStringEq) {
+  if (node.isReverseOrder) {
+params.add(node.value);
+  }
+  if (!isStringCol) {
 // The underlying database field is varchar, we need to compare 
numbers.
+// Note that this won't work with __HIVE_DEFAULT_PARTITION__. It will 
fail and fall
+// back to JDO. That is by design; we could add an ugly workaround 
here but didn't.
 tableValue = cast( + tableValue +  as decimal(21,0));
+
 // This is a workaround for DERBY-6358; as such, it is pretty horrible.
 tableValue = (case when \TBLS\.\TBL_NAME\ = ? and 
\DBS\.\NAME\ = ? then 
   + tableValue +  else null end);
 params.add(table.getTableName().toLowerCase());
 params.add(table.getDbName().toLowerCase());
   }
-  params.add(forceStringEq ? node.value.toString() : node.value);
+  if (!node.isReverseOrder) {
+params.add(node.value);
+  }
 
   filterBuffer.append(node.isReverseOrder
   ? (?  + node.operator.getSqlOp() +   + tableValue + )

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1552449r1=1552448r2=1552449view=diff
==
--- 

svn commit: r1552451 [2/2] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ itests/qtest/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/metad

2013-12-19 Thread hashutosh
Added: hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out?rev=1552451view=auto
==
--- hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out 
(added)
+++ hive/trunk/ql/src/test/results/clientpositive/quotedid_partition.q.out Thu 
Dec 19 22:55:10 2013
@@ -0,0 +1,440 @@
+PREHOOK: query: create table src_p(`x+1` string, `yy` string) partitioned by 
(`!@#$%^*()_q` string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table src_p(`x+1` string, `yy` string) partitioned by 
(`!@#$%^*()_q` string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@src_p
+PREHOOK: query: insert overwrite table src_p partition(`!@#$%^*()_q`='a') 
select * from src
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@src_p@!@%23$%25%5E%2A()_q=a
+POSTHOOK: query: insert overwrite table src_p partition(`!@#$%^*()_q`='a') 
select * from src
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@src_p@!@%23$%25%5E%2A()_q=a
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).x+1 SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).yy SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: show partitions src_p
+PREHOOK: type: SHOWPARTITIONS
+POSTHOOK: query: show partitions src_p
+POSTHOOK: type: SHOWPARTITIONS
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).x+1 SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).yy SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+!@%23$%25%5E%2A()_q=a
+PREHOOK: query: explain select `x+1`, `yy`, `!@#$%^*()_q` 
+from src_p where `!@#$%^*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `yy`, `!@#$%^*()_q` having `!@#$%^*()_q` = 'a'
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select `x+1`, `yy`, `!@#$%^*()_q` 
+from src_p where `!@#$%^*()_q` = 'a' and `x+1`='10'
+group by `x+1`, `yy`, `!@#$%^*()_q` having `!@#$%^*()_q` = 'a'
+POSTHOOK: type: QUERY
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).x+1 SIMPLE 
[(src)src.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: src_p PARTITION(!@#$%^*()_q=a).yy SIMPLE 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]
+ABSTRACT SYNTAX TREE:
+  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src_p))) (TOK_INSERT 
(TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR 
(TOK_TABLE_OR_COL x+1)) (TOK_SELEXPR (TOK_TABLE_OR_COL yy)) (TOK_SELEXPR 
(TOK_TABLE_OR_COL !@#$%^*()_q))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL 
!@#$%^*()_q) 'a') (= (TOK_TABLE_OR_COL x+1) '10'))) (TOK_GROUPBY 
(TOK_TABLE_OR_COL x+1) (TOK_TABLE_OR_COL yy) (TOK_TABLE_OR_COL !@#$%^*()_q)) 
(TOK_HAVING (= (TOK_TABLE_OR_COL !@#$%^*()_q) 'a'
+
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+Map Reduce
+  Alias - Map Operator Tree:
+src_p 
+  TableScan
+alias: src_p
+Filter Operator
+  predicate:
+  expr: (x+1 = '10')
+  type: boolean
+  Select Operator
+expressions:
+  expr: x+1
+  type: string
+  expr: yy
+  type: string
+  expr: !@#$%^*()_q
+  type: string
+outputColumnNames: x+1, yy, !@#$%^*()_q
+Group By Operator
+  bucketGroup: false
+  keys:
+expr: x+1
+type: string
+expr: yy
+type: string
+expr: !@#$%^*()_q
+type: string
+  mode: hash
+  outputColumnNames: _col0, _col1, _col2
+  Reduce Output Operator
+key expressions:
+  expr: _col0
+  type: string
+  expr: _col1
+  type: string
+  expr: _col2
+  type: string
+sort order: +++
+Map-reduce partition columns:
+  expr: _col0
+  type: string
+  expr: _col1
+  type: string
+  expr: _col2
+  type: string
+tag: -1
+  Reduce Operator Tree:
+Group By Operator
+  bucketGroup: false
+  keys:
+expr: KEY._col0
+type: string
+expr: KEY._col1
+  

svn commit: r1552480 - in /hive/branches/tez/ql/src: java/org/apache/hadoop/hive/ql/ErrorMsg.java test/org/apache/hadoop/hive/ql/exec/TestOperators.java test/org/apache/hadoop/hive/ql/io/orc/TestInput

2013-12-19 Thread gunther
Author: gunther
Date: Fri Dec 20 02:38:04 2013
New Revision: 1552480

URL: http://svn.apache.org/r1552480
Log:
HIVE-6077: Fixing a couple of orc unit tests on tez (Gunther Hagleitner)

Modified:
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java

hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java

hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1552480r1=1552479r2=1552480view=diff
==
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
(original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Fri 
Dec 20 02:38:04 2013
@@ -366,9 +366,9 @@ public enum ErrorMsg {
   UNSUPPORTED_SUBQUERY_EXPRESSION(10249, Unsupported SubQuery Expression),
   INVALID_SUBQUERY_EXPRESSION(10250, Invalid SubQuery expression),
 
-  INVALID_HDFS_URI(10248, {0} is not a hdfs uri, true),
-  INVALID_DIR(10249, {0} is not a directory, true),
-  NO_VALID_LOCATIONS(10250, Could not find any valid location to place the 
jars.  +
+  INVALID_HDFS_URI(10251, {0} is not a hdfs uri, true),
+  INVALID_DIR(10252, {0} is not a directory, true),
+  NO_VALID_LOCATIONS(10253, Could not find any valid location to place the 
jars.  +
   Please update hive.jar.directory or hive.user.install.directory with a 
valid location, false),
 
   SCRIPT_INIT_ERROR(2, Unable to initialize custom script.),

Modified: 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java?rev=1552480r1=1552479r2=1552480view=diff
==
--- 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java 
(original)
+++ 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/exec/TestOperators.java 
Fri Dec 20 02:38:04 2013
@@ -29,6 +29,7 @@ import junit.framework.TestCase;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.io.IOContext;
 import org.apache.hadoop.hive.ql.parse.TypeCheckProcFactory;
 import org.apache.hadoop.hive.ql.plan.CollectDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
@@ -313,6 +314,7 @@ public class TestOperators extends TestC
   Configuration hconf = new JobConf(TestOperators.class);
   HiveConf.setVar(hconf, HiveConf.ConfVars.HADOOPMAPFILENAME,
   hdfs:///testDir/testFile);
+  IOContext.get().setInputPath(new Path(hdfs:///testDir/testFile));
 
   // initialize pathToAliases
   ArrayListString aliases = new ArrayListString();

Modified: 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java?rev=1552480r1=1552479r2=1552480view=diff
==
--- 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
 (original)
+++ 
hive/branches/tez/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
 Fri Dec 20 02:38:04 2013
@@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.io.orc
 
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.DataInput;
 import java.io.DataOutput;
@@ -562,7 +563,6 @@ public class TestInputOutputFormat {
 IntObjectInspector intInspector =
 (IntObjectInspector) fields.get(0).getFieldObjectInspector();
 assertEquals(0.0, reader.getProgress(), 0.1);
-assertEquals(3, reader.getPos());
 while (reader.next(key, value)) {
   assertEquals(++rowNum, intInspector.get(inspector.
   getStructFieldData(serde.deserialize(value), fields.get(0;
@@ -697,7 +697,7 @@ public class TestInputOutputFormat {
 InputFormat?,? in = new OrcInputFormat();
 FileInputFormat.setInputPaths(conf, testFilePath.toString());
 InputSplit[] splits = in.getSplits(conf, 1);
-assertEquals(0, splits.length);
+assertTrue(1 == splits.length);
 assertEquals(null, serde.getSerDeStats());
   }
 




svn commit: r1552482 - /hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java

2013-12-19 Thread gunther
Author: gunther
Date: Fri Dec 20 02:56:24 2013
New Revision: 1552482

URL: http://svn.apache.org/r1552482
Log:
HIVE-6078: Choosing conditional task for merging files is not deterministic in 
tez (Vikram Dixit K via Gunther Hagleitner)

Modified:

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java?rev=1552482r1=1552481r2=1552482view=diff
==
--- 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
 (original)
+++ 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java
 Fri Dec 20 02:56:24 2013
@@ -1628,15 +1628,21 @@ public final class GenMapRedUtils {
   // There are separate configuration parameters to control whether to
   // merge for a map-only job
   // or for a map-reduce job
-  ReduceWork reduceWork = currTask.getWork() instanceof MapredWork
-  ? ((MapredWork) currTask.getWork()).getReduceWork() : null;
-  boolean mergeMapOnly =
-  hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES)  reduceWork == 
null;
-  boolean mergeMapRed =
-  hconf.getBoolVar(ConfVars.HIVEMERGEMAPREDFILES) 
-  reduceWork != null;
-  if (mergeMapOnly || mergeMapRed) {
-return true;
+  if (currTask.getWork() instanceof TezWork) {
+return hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES) || 
+hconf.getBoolVar(ConfVars.HIVEMERGEMAPREDFILES);
+  } else if (currTask.getWork() instanceof MapredWork) {
+ReduceWork reduceWork = ((MapredWork) 
currTask.getWork()).getReduceWork();
+boolean mergeMapOnly =
+hconf.getBoolVar(ConfVars.HIVEMERGEMAPFILES)  reduceWork == 
null;
+boolean mergeMapRed =
+hconf.getBoolVar(ConfVars.HIVEMERGEMAPREDFILES) 
+reduceWork != null;
+if (mergeMapOnly || mergeMapRed) {
+  return true;
+}
+  } else {
+return false;
   }
 }
   }




svn commit: r1552483 - in /hive/branches/tez: itests/qtest/pom.xml packaging/src/main/assembly/bin.xml ql/pom.xml ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java ql/src/java/org/apache/had

2013-12-19 Thread gunther
Author: gunther
Date: Fri Dec 20 03:01:10 2013
New Revision: 1552483

URL: http://svn.apache.org/r1552483
Log:
HIVE-6079: Hadoop 1 tests fail in tez branch (Vikram Dixit K via Gunther 
Hagleitner)

Modified:
hive/branches/tez/itests/qtest/pom.xml
hive/branches/tez/packaging/src/main/assembly/bin.xml
hive/branches/tez/ql/pom.xml

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java

Modified: hive/branches/tez/itests/qtest/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/itests/qtest/pom.xml?rev=1552483r1=1552482r2=1552483view=diff
==
--- hive/branches/tez/itests/qtest/pom.xml (original)
+++ hive/branches/tez/itests/qtest/pom.xml Fri Dec 20 03:01:10 2013
@@ -330,6 +330,42 @@
   version${tez.version}/version
   typetest-jar/type
 /dependency
+dependency 
+  groupIdorg.apache.tez/groupId
+  artifactIdtez-api/artifactId
+  version${tez.version}/version
+  scopetest/scope
+/dependency
+dependency 
+  groupIdorg.apache.tez/groupId
+  artifactIdtez-runtime-library/artifactId
+  version${tez.version}/version
+  scopetest/scope
+/dependency
+dependency 
+  groupIdorg.apache.tez/groupId
+  artifactIdtez-mapreduce/artifactId
+  version${tez.version}/version
+  scopetest/scope
+/dependency
+dependency 
+  groupIdorg.apache.tez/groupId
+  artifactIdtez-dag/artifactId
+  version${tez.version}/version
+  scopetest/scope
+/dependency
+dependency 
+   groupIdorg.apache.tez/groupId
+   artifactIdtez-common/artifactId
+   version${tez.version}/version
+  scopetest/scope
+/dependency
+dependency
+   groupIdorg.apache.tez/groupId
+   artifactIdtez-runtime-internals/artifactId
+   version${tez.version}/version
+  scopetest/scope
+/dependency
   /dependencies
 /profile
   /profiles

Modified: hive/branches/tez/packaging/src/main/assembly/bin.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/packaging/src/main/assembly/bin.xml?rev=1552483r1=1552482r2=1552483view=diff
==
--- hive/branches/tez/packaging/src/main/assembly/bin.xml (original)
+++ hive/branches/tez/packaging/src/main/assembly/bin.xml Fri Dec 20 03:01:10 
2013
@@ -40,7 +40,6 @@
   useTransitiveFilteringtrue/useTransitiveFiltering
   excludes
 excludeorg.apache.hive.hcatalog:*/exclude
-excludeorg.apache.tez:*/exclude
   /excludes
 /dependencySet
 dependencySet

Modified: hive/branches/tez/ql/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/pom.xml?rev=1552483r1=1552482r2=1552483view=diff
==
--- hive/branches/tez/ql/pom.xml (original)
+++ hive/branches/tez/ql/pom.xml Fri Dec 20 03:01:10 2013
@@ -219,16 +219,19 @@
   groupIdorg.apache.tez/groupId
   artifactIdtez-api/artifactId
   version${tez.version}/version
+  optionaltrue/optional
 /dependency
 dependency 
   groupIdorg.apache.tez/groupId
   artifactIdtez-runtime-library/artifactId
   version${tez.version}/version
+  optionaltrue/optional
 /dependency
 dependency 
   groupIdorg.apache.tez/groupId
   artifactIdtez-mapreduce/artifactId
   version${tez.version}/version
+  optionaltrue/optional
 /dependency
   /dependencies
 
@@ -259,21 +262,6 @@
   version${hadoop-23.version}/version
   optionaltrue/optional
 /dependency
-dependency 
-  groupIdorg.apache.tez/groupId
-  artifactIdtez-dag/artifactId
-  version${tez.version}/version
-/dependency
-dependency 
-   groupIdorg.apache.tez/groupId
-   artifactIdtez-common/artifactId
-   version${tez.version}/version
-/dependency
-dependency
-   groupIdorg.apache.tez/groupId
-   artifactIdtez-runtime-internals/artifactId
-   version${tez.version}/version
-/dependency
 dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-hdfs/artifactId

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java?rev=1552483r1=1552482r2=1552483view=diff
==
--- 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/TezTask.java 
(original)
+++ 

svn commit: r1552485 - /hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java

2013-12-19 Thread gunther
Author: gunther
Date: Fri Dec 20 03:03:35 2013
New Revision: 1552485

URL: http://svn.apache.org/r1552485
Log:
HIVE-6081: Dag utils in tez has incorrect dependency on Hadoop20 shims (Vikram 
Dixit K via Gunther Hagleitner)

Modified:

hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java

Modified: 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java?rev=1552485r1=1552484r2=1552485view=diff
==
--- 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java 
(original)
+++ 
hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/DagUtils.java 
Fri Dec 20 03:03:35 2013
@@ -53,7 +53,7 @@ import org.apache.hadoop.hive.ql.plan.Te
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
-import org.apache.hadoop.hive.shims.Hadoop20Shims.NullOutputCommitter;
+import org.apache.hadoop.hive.shims.HadoopShimsSecure.NullOutputCommitter;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.InputFormat;




svn commit: r1552487 [1/2] - in /hive/trunk: ant/src/org/apache/hadoop/hive/ant/ ql/src/gen/vectorization/ExpressionTemplates/ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ ql/src/tes

2013-12-19 Thread hashutosh
Author: hashutosh
Date: Fri Dec 20 03:31:26 2013
New Revision: 1552487

URL: http://svn.apache.org/r1552487
Log:
HIVE-6034 : vectorized % doesn't handle zeroes the same way as non-vectorized 
(Sergey Shelukhin via Eric Hanson)

Modified:
hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java

hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ColumnDivideColumn.txt

hive/trunk/ql/src/gen/vectorization/ExpressionTemplates/ScalarDivideColumn.txt

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
hive/trunk/ql/src/test/queries/clientpositive/vectorization_12.q
hive/trunk/ql/src/test/queries/clientpositive/vectorization_14.q
hive/trunk/ql/src/test/results/clientpositive/vectorization_12.q.out
hive/trunk/ql/src/test/results/clientpositive/vectorization_14.q.out

Modified: hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java?rev=1552487r1=1552486r2=1552487view=diff
==
--- hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java (original)
+++ hive/trunk/ant/src/org/apache/hadoop/hive/ant/GenVectorCode.java Fri Dec 20 
03:31:26 2013
@@ -38,62 +38,51 @@ public class GenVectorCode extends Task 
   {ColumnArithmeticScalar, Add, long, long, +},
   {ColumnArithmeticScalar, Subtract, long, long, -},
   {ColumnArithmeticScalar, Multiply, long, long, *},
-  {ColumnArithmeticScalar, Modulo, long, long, %},
 
   {ColumnArithmeticScalar, Add, long, double, +},
   {ColumnArithmeticScalar, Subtract, long, double, -},
   {ColumnArithmeticScalar, Multiply, long, double, *},
-  {ColumnArithmeticScalar, Modulo, long, double, %},
 
   {ColumnArithmeticScalar, Add, double, long, +},
   {ColumnArithmeticScalar, Subtract, double, long, -},
   {ColumnArithmeticScalar, Multiply, double, long, *},
-  {ColumnArithmeticScalar, Modulo, double, long, %},
 
   {ColumnArithmeticScalar, Add, double, double, +},
   {ColumnArithmeticScalar, Subtract, double, double, -},
   {ColumnArithmeticScalar, Multiply, double, double, *},
-  {ColumnArithmeticScalar, Modulo, double, double, %},
 
   {ScalarArithmeticColumn, Add, long, long, +},
   {ScalarArithmeticColumn, Subtract, long, long, -},
   {ScalarArithmeticColumn, Multiply, long, long, *},
-  {ScalarArithmeticColumn, Modulo, long, long, %},
 
   {ScalarArithmeticColumn, Add, long, double, +},
   {ScalarArithmeticColumn, Subtract, long, double, -},
   {ScalarArithmeticColumn, Multiply, long, double, *},
-  {ScalarArithmeticColumn, Modulo, long, double, %},
 
   {ScalarArithmeticColumn, Add, double, long, +},
   {ScalarArithmeticColumn, Subtract, double, long, -},
   {ScalarArithmeticColumn, Multiply, double, long, *},
-  {ScalarArithmeticColumn, Modulo, double, long, %},
 
   {ScalarArithmeticColumn, Add, double, double, +},
   {ScalarArithmeticColumn, Subtract, double, double, -},
   {ScalarArithmeticColumn, Multiply, double, double, *},
-  {ScalarArithmeticColumn, Modulo, double, double, %},
 
   {ColumnArithmeticColumn, Add, long, long, +},
   {ColumnArithmeticColumn, Subtract, long, long, -},
   {ColumnArithmeticColumn, Multiply, long, long, *},
-  {ColumnArithmeticColumn, Modulo, long, long, %},
 
   {ColumnArithmeticColumn, Add, long, double, +},
   {ColumnArithmeticColumn, Subtract, long, double, -},
   {ColumnArithmeticColumn, Multiply, long, double, *},
-  {ColumnArithmeticColumn, Modulo, long, double, %},
 
   {ColumnArithmeticColumn, Add, double, long, +},
   {ColumnArithmeticColumn, Subtract, double, long, -},
   {ColumnArithmeticColumn, Multiply, double, long, *},
-  {ColumnArithmeticColumn, Modulo, double, long, %},
 
   {ColumnArithmeticColumn, Add, double, double, +},
   {ColumnArithmeticColumn, Subtract, double, double, -},
   {ColumnArithmeticColumn, Multiply, double, double, *},
-  {ColumnArithmeticColumn, Modulo, double, double, %},
+
 
   {ColumnDivideScalar, Divide, long, double, /},
   {ColumnDivideScalar, Divide, double, long, /},
@@ -105,6 +94,19 @@ public class GenVectorCode extends Task 
   {ColumnDivideColumn, Divide, double, long, /},
   {ColumnDivideColumn, Divide, double, double, /},
 
+  {ColumnDivideScalar, Modulo, long, long, %},
+  {ColumnDivideScalar, Modulo, long, double, %},
+  {ColumnDivideScalar, Modulo, double, long, %},
+  {ColumnDivideScalar, Modulo, double, double, %},
+  {ScalarDivideColumn, Modulo, long, long, %},
+  {ScalarDivideColumn, Modulo, long, double, %},
+  {ScalarDivideColumn, Modulo, double, long, %},
+  {ScalarDivideColumn, Modulo, double, double, %},
+  {ColumnDivideColumn, Modulo, long, long, %},
+  {ColumnDivideColumn,