svn commit: r1651367 - /hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java

2015-01-13 Thread gopalv
Author: gopalv
Date: Tue Jan 13 14:57:47 2015
New Revision: 1651367

URL: http://svn.apache.org/r1651367
Log:
HIVE-9310 : Flush hive CLI history to disk on exit (Gopal V, reviewed by 
Prasanth J)

Modified:
hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java

Modified: hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1651367&r1=1651366&r2=1651367&view=diff
==
--- hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Tue Jan 
13 14:57:47 2015
@@ -40,6 +40,7 @@ import com.google.common.base.Splitter;
 import jline.console.ConsoleReader;
 import jline.console.completer.Completer;
 import jline.console.history.FileHistory;
+import jline.console.history.PersistentHistory;
 import jline.console.completer.StringsCompleter;
 import jline.console.completer.ArgumentCompleter;
 import jline.console.completer.ArgumentCompleter.ArgumentDelimiter;
@@ -721,10 +722,12 @@ public class CliDriver {
 String line;
 final String HISTORYFILE = ".hivehistory";
 String historyDirectory = System.getProperty("user.home");
+PersistentHistory history = null;
 try {
   if ((new File(historyDirectory)).exists()) {
 String historyFile = historyDirectory + File.separator + HISTORYFILE;
-reader.setHistory(new FileHistory(new File(historyFile)));
+history = new FileHistory(new File(historyFile));
+reader.setHistory(history);
   } else {
 System.err.println("WARNING: Directory for Hive history file: " + 
historyDirectory +
" does not exist.   History will not be available 
during this session.");
@@ -759,6 +762,10 @@ public class CliDriver {
 continue;
   }
 }
+
+if (history != null) {
+  history.flush();
+}
 return ret;
   }
 




svn commit: r1651419 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec: ExprNodeEvaluatorFactory.java SelectOperator.java

2015-01-13 Thread hashutosh
Author: hashutosh
Date: Tue Jan 13 17:31:55 2015
New Revision: 1651419

URL: http://svn.apache.org/r1651419
Log:
HIVE-7550 : Extend cached evaluation to multiple expressions (Navis via 
Ashutosh Chauhan)

Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java?rev=1651419&r1=1651418&r2=1651419&view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
 Tue Jan 13 17:31:55 2015
@@ -64,6 +64,20 @@ public final class ExprNodeEvaluatorFact
 "Cannot find ExprNodeEvaluator for the exprNodeDesc = " + desc);
   }
 
+  public static ExprNodeEvaluator[] toCachedEvals(ExprNodeEvaluator[] evals) {
+EvaluatorContext context = new EvaluatorContext();
+for (int i = 0; i < evals.length; i++) {
+  if (evals[i] instanceof ExprNodeGenericFuncEvaluator) {
+iterate(evals[i], context);
+if (context.hasReference) {
+  evals[i] = new ExprNodeEvaluatorHead(evals[i]);
+  context.hasReference = false;
+}
+  }
+}
+return evals;
+  }
+
   /**
* Should be called before eval is initialized
*/

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java?rev=1651419&r1=1651418&r2=1651419&view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java 
Tue Jan 13 17:31:55 2015
@@ -28,7 +28,6 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.SelectDesc;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
-import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 
 /**
  * Select operator implementation.
@@ -55,12 +54,12 @@ public class SelectOperator extends Oper
 for (int i = 0; i < colList.size(); i++) {
   assert (colList.get(i) != null);
   eval[i] = ExprNodeEvaluatorFactory.get(colList.get(i));
-  if (HiveConf.getBoolVar(hconf, 
HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) {
-eval[i] = ExprNodeEvaluatorFactory.toCachedEval(eval[i]);
-  }
+}
+if (HiveConf.getBoolVar(hconf, HiveConf.ConfVars.HIVEEXPREVALUATIONCACHE)) 
{
+  eval = ExprNodeEvaluatorFactory.toCachedEvals(eval);
 }
 output = new Object[eval.length];
-LOG.info("SELECT " + ((StructObjectInspector) 
inputObjInspectors[0]).getTypeName());
+LOG.info("SELECT " + inputObjInspectors[0].getTypeName());
 outputObjInspector = initEvaluatorsAndReturnStruct(eval, 
conf.getOutputColumnNames(),
 inputObjInspectors[0]);
 initializeChildren(hconf);




svn commit: r1651445 - in /hive/trunk/hcatalog: src/test/e2e/templeton/ src/test/e2e/templeton/deployers/ src/test/e2e/templeton/drivers/ src/test/e2e/templeton/tests/ webhcat/svr/src/main/java/org/ap

2015-01-13 Thread ekoifman
Author: ekoifman
Date: Tue Jan 13 19:15:27 2015
New Revision: 1651445

URL: http://svn.apache.org/r1651445
Log:
HIVE-9351 Running Hive Jobs with Tez cause templeton to never report percent 
complete

Modified:
hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
hive/trunk/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
hive/trunk/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/README.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/README.txt?rev=1651445&r1=1651444&r2=1651445&view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/README.txt (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/README.txt Tue Jan 13 19:15:27 
2015
@@ -223,3 +223,14 @@ enough map slots (10?) (mapred.tasktrack
 Adding Tests
 
 ToDo: add some guidelines
+
+Running on Tez
+1. set up Tez as in http://tez.apache.org/install.html
+2. set hive.execution.engine=tez in hive-site.xml (actually is this needed?)
+3. add hive.execution.engine=tez to templeton.hive.properties in 
webhcat-site.xml
+4. add to mapred-env.sh/yarn-env.sh (as you defined these in step 1)
+export TEZ_VERSION=0.5.3
+export TEZ_JARS=/Users/ekoifman/dev/apache-tez-client-${TEZ_VERSION}
+export TEZ_CONF_DIR=${TEZ_JARS}/conf
+export 
HADOOP_CLASSPATH=${TEZ_CONF_DIR}:${TEZ_JARS}/*:${TEZ_JARS}/lib/*:${HADOOP_CLASSPATH}
+(w/o this you'll see something like "java.lang.NoClassDefFoundError: 
org/apache/tez/dag/api/SessionNotRunning")

Modified: 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh?rev=1651445&r1=1651444&r2=1651445&view=diff
==
--- 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
(original)
+++ 
hive/trunk/hcatalog/src/test/e2e/templeton/deployers/deploy_e2e_artifacts.sh 
Tue Jan 13 19:15:27 2015
@@ -48,5 +48,17 @@ ${HADOOP_HOME}/bin/hadoop fs -put ${PIG_
 
 ${HADOOP_HOME}/bin/hadoop fs -put 
/Users/ekoifman/dev/sqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz 
/apps/templeton/sqoop-1.4.5.bin__hadoop-2.0.4-alpha.tar.gz
 ${HADOOP_HOME}/bin/hadoop fs -put 
/Users/ekoifman/dev/mysql-connector-java-5.1.30/mysql-connector-java-5.1.30-bin.jar
 /apps/templeton/jdbc/mysql-connector-java.jar
+
+#Tez set up (http://tez.apache.org/install.html)
+#if not using Tez - ignore this
+${HADOOP_HOME}/bin/hdfs dfs -put 
/Users/ekoifman/dev/apache-tez-${TEZ_VERSION}-src/tez-dist/target/tez-${TEZ_VERSION}.tar.gz
 /apps/tez-${TEZ_VERSION}.tar.gz
+${HADOOP_HOME}/bin/hdfs dfs -mkdir /tmp/tezin
+${HADOOP_HOME}/bin/hdfs dfs -mkdir /tmp/tezout
+${HADOOP_HOME}/bin/hdfs dfs -put /Users/ekoifman/dev/hive/build.sh /tmp/tezin
+#Above line is for Sanity Check: this is to run #6 in 
http://tez.apache.org/install.html
+#$HADOOP_HOME/bin/hadoop jar tez-examples-0.5.3.jar orderedwordcount 
/tmp/tezin /tmp/tezout
+
+
+
 #check what got deployed
-${HADOOP_HOME}/bin/hdfs dfs -ls -R /apps/templeton webhcate2e /user/templeton 
/user/hive/warehouse
+${HADOOP_HOME}/bin/hdfs dfs -ls -R /apps webhcate2e /user/templeton 
/user/hive/warehouse

Modified: hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh?rev=1651445&r1=1651444&r2=1651445&view=diff
==
--- hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh (original)
+++ hive/trunk/hcatalog/src/test/e2e/templeton/deployers/env.sh Tue Jan 13 
19:15:27 2015
@@ -22,14 +22,29 @@
 
 # define necessary env vars here and source it in other files
 
-export HADOOP_VERSION=2.4.1-SNAPSHOT
-#export HIVE_VERSION=0.14.0-SNAPSHOT
-export PIG_VERSION=0.12.2-SNAPSHOT
+echo ${HADOOP_VERSION};
+
+if [ -z ${HADOOP_VERSION} ]; then
+  export HADOOP_VERSION=2.4.1-SNAPSHOT
+fi
+
+if [ -z ${HIVE_VERSION} ]; then
+  export HIVE_VERSION=0.14.0-SNAPSHOT
+fi
+
+if [ -z ${PIG_VERSION} ]; then
+  export PIG_VERSION=0.12.2-SNAPSHOT
+fi
 
 #Root of project source tree
-export PROJ_HOME=/Users/${USER}/dev/hive
+if [ -z ${PROJ_HOME} ]; then
+  export PROJ_HOME=/Users/${USER}/dev/hive
+fi
 export 
HIVE_HOME=${PROJ_HOME}/packaging/target/apache-hive-${HIVE_VERSION}-bin/apache-hive-${HIVE_VERSION}-bin
-export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_VERSION}
+
+if [ -z ${HADOOP_HOME} ]; then
+  export 
HADOOP_HOME=/Users/${USER}/dev/hwxhadoop/hadoop-dist/target/hadoop-${HADOOP_

svn commit: r1651547 - in /hive/branches/branch-0.14/ql/src: java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java test/queries/clientpositive/expr_cached.q test/results/clientpositive/exp

2015-01-13 Thread hashutosh
Author: hashutosh
Date: Wed Jan 14 01:27:42 2015
New Revision: 1651547

URL: http://svn.apache.org/r1651547
Log:
HIVE-9278 : Cached expression feature broken in one case (Navis via Ashutosh 
Chauhan)

Added:
hive/branches/branch-0.14/ql/src/test/queries/clientpositive/expr_cached.q

hive/branches/branch-0.14/ql/src/test/results/clientpositive/expr_cached.q.out
Modified:

hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java

Modified: 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java?rev=1651547&r1=1651546&r2=1651547&view=diff
==
--- 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
 (original)
+++ 
hive/branches/branch-0.14/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluatorFactory.java
 Wed Jan 14 01:27:42 2015
@@ -100,12 +100,14 @@ public final class ExprNodeEvaluatorFact
 
   private static class EvaluatorContext {
 
-private final Map cached = new HashMap();
+private final Map cached = 
+new HashMap();
 
 private boolean hasReference;
 
 public ExprNodeEvaluator getEvaluated(ExprNodeEvaluator eval) {
-  String key = eval.getExpr().toString();
+  ExprNodeDesc.ExprNodeDescEqualityWrapper key = 
+  new ExprNodeDesc.ExprNodeDescEqualityWrapper(eval.expr); 
   ExprNodeEvaluator prev = cached.get(key);
   if (prev == null) {
 cached.put(key, eval);

Added: 
hive/branches/branch-0.14/ql/src/test/queries/clientpositive/expr_cached.q
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/ql/src/test/queries/clientpositive/expr_cached.q?rev=1651547&view=auto
==
--- hive/branches/branch-0.14/ql/src/test/queries/clientpositive/expr_cached.q 
(added)
+++ hive/branches/branch-0.14/ql/src/test/queries/clientpositive/expr_cached.q 
Wed Jan 14 01:27:42 2015
@@ -0,0 +1,4 @@
+set hive.fetch.task.conversion=more;
+
+-- should return a value
+select * from src tablesample (1 rows) where length(key) <> reverse(key);
\ No newline at end of file

Added: 
hive/branches/branch-0.14/ql/src/test/results/clientpositive/expr_cached.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/ql/src/test/results/clientpositive/expr_cached.q.out?rev=1651547&view=auto
==
--- 
hive/branches/branch-0.14/ql/src/test/results/clientpositive/expr_cached.q.out 
(added)
+++ 
hive/branches/branch-0.14/ql/src/test/results/clientpositive/expr_cached.q.out 
Wed Jan 14 01:27:42 2015
@@ -0,0 +1,11 @@
+PREHOOK: query: -- should return a value
+select * from src tablesample (1 rows) where length(key) <> reverse(key)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+ A masked pattern was here 
+POSTHOOK: query: -- should return a value
+select * from src tablesample (1 rows) where length(key) <> reverse(key)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+ A masked pattern was here 
+238val_238




svn commit: r1651558 - /hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java

2015-01-13 Thread ekoifman
Author: ekoifman
Date: Wed Jan 14 02:46:32 2015
New Revision: 1651558

URL: http://svn.apache.org/r1651558
Log:
HIVE-8914 HDFSCleanup thread holds reference to FileSystem (shanyu zhao via 
Eugene Koifman)

Modified:

hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java

Modified: 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java?rev=1651558&r1=1651557&r2=1651558&view=diff
==
--- 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
 (original)
+++ 
hive/trunk/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/HDFSCleanup.java
 Wed Jan 14 02:46:32 2015
@@ -91,18 +91,25 @@ public class HDFSCleanup extends Thread
*
*/
   public void run() {
-FileSystem fs = null;
 while (!stop) {
   try {
 // Put each check in a separate try/catch, so if that particular
 // cycle fails, it'll try again on the next cycle.
+FileSystem fs=null;
 try {
-  if (fs == null) {
-fs = new Path(storage_root).getFileSystem(appConf);
-  }
+  fs = new Path(storage_root).getFileSystem(appConf);
   checkFiles(fs);
 } catch (Exception e) {
   LOG.error("Cleanup cycle failed: " + e.getMessage());
+} finally {
+  if(fs != null) {
+try {
+  fs.close();
+}
+catch (Exception e) {
+  LOG.error("Closing file system failed: " + e.getMessage());
+}
+  }
 }
 
 long sleepMillis = (long) (Math.random() * interval);




svn commit: r1651559 - in /hive/branches/llap: common/src/java/org/apache/hadoop/hive/conf/ llap-client/src/java/org/apache/hadoop/hive/llap/io/api/ llap-client/src/java/org/apache/hadoop/hive/llap/io

2015-01-13 Thread sershe
Author: sershe
Date: Wed Jan 14 02:47:08 2015
New Revision: 1651559

URL: http://svn.apache.org/r1651559
Log:
Finish reworking LRFU policy for low-level cache (not clear if it's a good pick 
due to concurrency); tests; some pipeline adjustments

Added:
hive/branches/llap/llap-server/src/test/org/apache/hadoop/hive/llap/cache/

hive/branches/llap/llap-server/src/test/org/apache/hadoop/hive/llap/cache/TestLowLevelLrfuCachePolicy.java
Removed:

hive/branches/llap/llap-server/src/test/org/apache/hadoop/hive/llap/old/TestLrfuCachePolicy.java
Modified:
hive/branches/llap/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/EncodedColumn.java

hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/cache/LowLevelCache.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LlapCacheableBuffer.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelBuddyCache.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCachePolicy.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelCachePolicyBase.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/cache/LowLevelLrfuCachePolicy.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/VectorReader.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapIoImpl.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/ColumnVectorProducer.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataProducer.java

hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/orc/LLAPRecordReaderImpl.java

hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java

hive/branches/llap/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java

Modified: 
hive/branches/llap/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1651559&r1=1651558&r2=1651559&view=diff
==
--- 
hive/branches/llap/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ 
hive/branches/llap/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
Wed Jan 14 02:47:08 2015
@@ -1969,10 +1969,11 @@ public class HiveConf extends Configurat
 "Updates tez job execution progress in-place in the terminal."),
 
 LLAP_ENABLED("hive.llap.enabled", true, ""),
-LLAP_ORC_CACHE_MIN_ALLOC("hive.llap.cache.orc.minalloc", 128 * 1024, ""),
-LLAP_ORC_CACHE_MAX_ALLOC("hive.llap.cache.orc.minalloc", 16 * 1024 * 1024, 
""),
-LLAP_ORC_CACHE_ARENA_SIZE("hive.llap.cache.orc.minalloc", 128L * 1024 * 
1024, ""),
-LLAP_ORC_CACHE_MAX_SIZE("hive.llap.cache.orc.minalloc", 1024L * 1024 * 
1024, ""),
+LLAP_LOW_LEVEL_CACHE("hive.llap.use.lowlevel.cache", true, ""),
+LLAP_ORC_CACHE_MIN_ALLOC("hive.llap.cache.orc.alloc.min", 128 * 1024, ""),
+LLAP_ORC_CACHE_MAX_ALLOC("hive.llap.cache.orc.alloc.max", 16 * 1024 * 
1024, ""),
+LLAP_ORC_CACHE_ARENA_SIZE("hive.llap.cache.orc.arena.size", 128 * 1024 * 
1024, ""),
+LLAP_ORC_CACHE_MAX_SIZE("hive.llap.cache.orc.size", 1024L * 1024 * 1024, 
""),
 LLAP_REQUEST_THREAD_COUNT("hive.llap.request.thread.count", 16, ""),
 LLAP_USE_LRFU("hive.llap.use.lrfu", true, ""),
 LLAP_LRFU_LAMBDA("hive.llap.lrfu.lambda", 0.01f, "")

Modified: 
hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/EncodedColumn.java
URL: 
http://svn.apache.org/viewvc/hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/EncodedColumn.java?rev=1651559&r1=1651558&r2=1651559&view=diff
==
--- 
hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/EncodedColumn.java
 (original)
+++ 
hive/branches/llap/llap-client/src/java/org/apache/hadoop/hive/llap/io/api/EncodedColumn.java
 Wed Jan 14 02:47:08 2015
@@ -18,10 +18,17 @@
 
 package org.apache.hadoop.hive.llap.io.api;
 
+import org.apache.hadoop.hive.llap.io.api.cache.LlapMemoryBuffer;
+
 public class EncodedColumn {
   // TODO: temporary class. Will be filled in when reading (ORC) is 
implemented. Need to balance
   //   generality, and ability to not copy data from underlying low-level 
cached buffers.
-  public static class ColumnBuffer {}
+  public static class ColumnBuffer {
+// TODO: given how ORC will allocate, it might make sense to share array 
between all
+//   returned encodedColumn-s, and store index and length in the array.
+public LlapMemoryBuffer[] cacheBuffers;
+public in

svn commit: r1651574 - /hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java

2015-01-13 Thread szehon
Author: szehon
Date: Wed Jan 14 06:28:57 2015
New Revision: 1651574

URL: http://svn.apache.org/r1651574
Log:
HIVE-9360 : TestSparkClient throws Timeoutexception (Szehon, reviewed by Brock)

Modified:

hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java

Modified: 
hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java?rev=1651574&r1=1651573&r2=1651574&view=diff
==
--- 
hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
 (original)
+++ 
hive/trunk/spark-client/src/test/java/org/apache/hive/spark/client/TestSparkClient.java
 Wed Jan 14 06:28:57 2015
@@ -50,7 +50,7 @@ import com.google.common.io.ByteStreams;
 public class TestSparkClient {
 
   // Timeouts are bad... mmmkay.
-  private static final long TIMEOUT = 10;
+  private static final long TIMEOUT = 20;
 
   private Map createConf(boolean local) {
 Map conf = new HashMap();