[2/2] hive git commit: HIVE-20225: SerDe to support Teradata Binary Format (Lu Li via cws)

2018-08-29 Thread cws
HIVE-20225: SerDe to support Teradata Binary Format (Lu Li via cws)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b8d82844
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b8d82844
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b8d82844

Branch: refs/heads/master
Commit: b8d82844b9743d7a35dcc7fe6c702486fc4a9d72
Parents: cf5486d
Author: Carl Steinbach 
Authored: Wed Aug 29 13:30:34 2018 -0700
Committer: Carl Steinbach 
Committed: Wed Aug 29 13:50:56 2018 -0700

--
 .../ql/io/TeradataBinaryFileInputFormat.java|  66 ++
 .../ql/io/TeradataBinaryFileOutputFormat.java   | 112 
 .../hive/ql/io/TeradataBinaryRecordReader.java  | 280 +
 .../clientpositive/test_teradatabinaryfile.q| 123 
 .../test_teradatabinaryfile.q.out   | 537 +
 .../teradata/TeradataBinaryDataInputStream.java | 199 +++
 .../TeradataBinaryDataOutputStream.java | 270 +
 .../serde2/teradata/TeradataBinarySerde.java| 597 +++
 .../TestTeradataBinarySerdeForDate.java |  76 +++
 .../TestTeradataBinarySerdeForDecimal.java  | 106 
 .../TestTeradataBinarySerdeForTimeStamp.java| 111 
 .../TestTeradataBinarySerdeGeneral.java | 133 +
 12 files changed, 2610 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b8d82844/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
new file mode 100644
index 000..bed87c5
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+import java.io.IOException;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+
+/**
+ * https://cwiki.apache.org/confluence/display/Hive/TeradataBinarySerde.
+ * FileInputFormat for Teradata binary files.
+ *
+ * In the Teradata Binary File, each record constructs as below:
+ * The first 2 bytes represents the length of the bytes next for this record.
+ * Then the null bitmap whose length is depended on the number of fields is 
followed.
+ * Then each field of the record is serialized into bytes - the serialization 
strategy is decided by the type of field.
+ * At last, there is one byte (0x0a) in the end of the record.
+ *
+ * This InputFormat currently doesn't support the split of the file.
+ * Teradata binary files are using little endian.
+ */
+public class TeradataBinaryFileInputFormat extends 
FileInputFormat {
+
+  @Override public RecordReader 
getRecordReader(InputSplit split, JobConf job,
+  Reporter reporter) throws IOException {
+reporter.setStatus(split.toString());
+return new TeradataBinaryRecordReader(job, (FileSplit) split);
+  }
+
+  /**
+   * the TeradataBinaryFileInputFormat is not splittable right 
now.
+   * Override the isSplitable function.
+   *
+   * @param fs the file system that the file is on
+   * @param filename the file name to check
+   * @return is this file splitable?
+   */
+  @Override protected boolean isSplitable(FileSystem fs, Path filename) {
+return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/b8d82844/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java
--
diff --git 
a/ql/src/java/org/apac

[1/2] hive git commit: HIVE-20225: SerDe to support Teradata Binary Format (Lu Li via cws)

2018-08-29 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master cf5486dd3 -> b8d82844b


http://git-wip-us.apache.org/repos/asf/hive/blob/b8d82844/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
--
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
new file mode 100644
index 000..ccf5f44
--- /dev/null
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
@@ -0,0 +1,597 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.teradata;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.AbstractSerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
+import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.StructField;
+import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DateObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.DoubleObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.ShortObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.hive.common.type.Date;
+
+import javax.annotation.Nullable;
+import java.io.ByteArrayInputStream;
+import java.io.EOFException;
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static java.lang.String.format;
+
+/**
+ * https://cwiki.apache.org/confluence/display/Hive/TeradataBinarySerde.
+ * TeradataBinarySerde handles the serialization and deserialization of 
Teradata Binary Record
+ * passed from 

hive git commit: HIVE-17530: ClassCastException when converting uniontype (Anthony Hsu via Carl Steinbach)

2017-09-18 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master a51ae9c8f -> 527d13b08


HIVE-17530: ClassCastException when converting uniontype (Anthony Hsu via Carl 
Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/527d13b0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/527d13b0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/527d13b0

Branch: refs/heads/master
Commit: 527d13b080ed970b527c7cb2bfd98fd6fd17f5d5
Parents: a51ae9c
Author: Carl Steinbach 
Authored: Mon Sep 18 11:47:29 2017 -0700
Committer: Carl Steinbach 
Committed: Mon Sep 18 11:47:29 2017 -0700

--
 .../ql/exec/vector/VectorDeserializeRow.java|  4 +-
 .../orc_avro_partition_uniontype.q  |  9 +
 .../orc_avro_partition_uniontype.q.out  | 40 
 .../ObjectInspectorConverters.java  |  4 +-
 .../SettableUnionObjectInspector.java   |  6 +--
 .../StandardUnionObjectInspector.java   | 16 +---
 .../TestObjectInspectorConverters.java  | 30 ---
 7 files changed, 83 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/527d13b0/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
index 2ad06fc..3826182 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorDeserializeRow.java
@@ -1251,8 +1251,8 @@ public final class VectorDeserializeRow {
 
 final Object union = unionOI.create();
 final int tag = deserializeRead.currentInt;
-unionOI.addField(union, new StandardUnion((byte) tag,
-convertComplexFieldRowColumn(unionColumnVector.fields[tag], 
batchIndex, fields[tag])));
+unionOI.setFieldAndTag(union, new StandardUnion((byte) tag,
+convertComplexFieldRowColumn(unionColumnVector.fields[tag], 
batchIndex, fields[tag])), (byte) tag);
 deserializeRead.finishComplexVariableFieldsType();
 return union;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/527d13b0/ql/src/test/queries/clientpositive/orc_avro_partition_uniontype.q
--
diff --git a/ql/src/test/queries/clientpositive/orc_avro_partition_uniontype.q 
b/ql/src/test/queries/clientpositive/orc_avro_partition_uniontype.q
new file mode 100644
index 000..47ac526
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_avro_partition_uniontype.q
@@ -0,0 +1,9 @@
+SET hive.exec.schema.evolution = false;
+
+CREATE TABLE avro_orc_partitioned_uniontype (a uniontype) 
PARTITIONED BY (b int) STORED AS ORC;
+
+INSERT INTO avro_orc_partitioned_uniontype PARTITION (b=1) SELECT 
create_union(1, true, value) FROM src LIMIT 5;
+
+ALTER TABLE avro_orc_partitioned_uniontype SET FILEFORMAT AVRO;
+
+SELECT * FROM avro_orc_partitioned_uniontype;

http://git-wip-us.apache.org/repos/asf/hive/blob/527d13b0/ql/src/test/results/clientpositive/orc_avro_partition_uniontype.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/orc_avro_partition_uniontype.q.out 
b/ql/src/test/results/clientpositive/orc_avro_partition_uniontype.q.out
new file mode 100644
index 000..da8d9cc
--- /dev/null
+++ b/ql/src/test/results/clientpositive/orc_avro_partition_uniontype.q.out
@@ -0,0 +1,40 @@
+PREHOOK: query: CREATE TABLE avro_orc_partitioned_uniontype (a 
uniontype) PARTITIONED BY (b int) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@avro_orc_partitioned_uniontype
+POSTHOOK: query: CREATE TABLE avro_orc_partitioned_uniontype (a 
uniontype) PARTITIONED BY (b int) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@avro_orc_partitioned_uniontype
+PREHOOK: query: INSERT INTO avro_orc_partitioned_uniontype PARTITION (b=1) 
SELECT create_union(1, true, value) FROM src LIMIT 5
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@avro_orc_partitioned_uniontype@b=1
+POSTHOOK: query: INSERT INTO avro_orc_partitioned_uniontype PARTITION (b=1) 
SELECT create_union(1, true, value) FROM src LIMIT 5
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@avro_orc_partitioned_uniontype@b=1
+POSTHOOK: Lineage: avro_orc_partitioned_uniontype PARTITION(b=1).a EXPRESSION 
[(src)src.FieldSchema(name:value, type:string, comment:default), ]

hive git commit: HIVE-17394: AvroSerde is regenerating TypeInfo objects for each nullable Avro field for every row (Anthony Hsu, reviewed by Carl Steinbach)

2017-09-12 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 55d8ee0b5 -> bc0a2f1a1


HIVE-17394: AvroSerde is regenerating TypeInfo objects for each nullable Avro 
field for every row (Anthony Hsu, reviewed by Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bc0a2f1a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bc0a2f1a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bc0a2f1a

Branch: refs/heads/master
Commit: bc0a2f1a19a86ecef3fbf9644379b62265e94c7b
Parents: 55d8ee0
Author: Carl Steinbach 
Authored: Tue Sep 12 14:44:43 2017 -0700
Committer: Carl Steinbach 
Committed: Tue Sep 12 14:46:58 2017 -0700

--
 .../hive/serde2/avro/AvroDeserializer.java  | 22 +---
 1 file changed, 10 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bc0a2f1a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
--
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
index ecfe15f..b7b3d12 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
@@ -201,8 +201,8 @@ class AvroDeserializer {
 // Klaxon! Klaxon! Klaxon!
 // Avro requires NULLable types to be defined as unions of some type T
 // and NULL.  This is annoying and we're going to hide it from the user.
-if(AvroSerdeUtils.isNullableType(recordSchema)) {
-  return deserializeNullableUnion(datum, fileSchema, recordSchema);
+if (AvroSerdeUtils.isNullableType(recordSchema)) {
+  return deserializeNullableUnion(datum, fileSchema, recordSchema, 
columnType);
 }
 
 switch(columnType.getCategory()) {
@@ -301,29 +301,28 @@ class AvroDeserializer {
   }
 
   /**
-   * Extract either a null or the correct type from a Nullable type.  This is
-   * horrible in that we rebuild the TypeInfo every time.
+   * Extract either a null or the correct type from a Nullable type.
*/
-  private Object deserializeNullableUnion(Object datum, Schema fileSchema, 
Schema recordSchema)
+  private Object deserializeNullableUnion(Object datum, Schema fileSchema, 
Schema recordSchema, TypeInfo columnType)
 throws AvroSerdeException {
 if (recordSchema.getTypes().size() == 2) {
   // A type like [NULL, T]
-  return deserializeSingleItemNullableUnion(datum, fileSchema, 
recordSchema);
+  return deserializeSingleItemNullableUnion(datum, fileSchema, 
recordSchema, columnType);
 } else {
   // Types like [NULL, T1, T2, ...]
   if (datum == null) {
 return null;
   } else {
 Schema newRecordSchema = 
AvroSerdeUtils.getOtherTypeFromNullableType(recordSchema);
-return worker(datum, fileSchema, newRecordSchema,
-SchemaToTypeInfo.generateTypeInfo(newRecordSchema, null));
+return worker(datum, fileSchema, newRecordSchema, columnType);
   }
 }
   }
 
   private Object deserializeSingleItemNullableUnion(Object datum,
 Schema fileSchema,
-Schema recordSchema)
+Schema recordSchema,
+TypeInfo columnType)
   throws AvroSerdeException {
 int tag = GenericData.get().resolveUnion(recordSchema, datum); // 
Determine index of value
 Schema schema = recordSchema.getTypes().get(tag);
@@ -361,8 +360,7 @@ class AvroDeserializer {
 currentFileSchema = fileSchema;
   }
 }
-return worker(datum, currentFileSchema, schema,
-  SchemaToTypeInfo.generateTypeInfo(schema, null));
+return worker(datum, currentFileSchema, schema, columnType);
   }
 
   private Object deserializeStruct(GenericData.Record datum, Schema 
fileSchema, StructTypeInfo columnType)
@@ -377,7 +375,7 @@ class AvroDeserializer {
 
   private Object deserializeUnion(Object datum, Schema fileSchema, Schema 
recordSchema,
   UnionTypeInfo columnType) throws 
AvroSerdeException {
-// Calculate tags individually since the schema can evolve and can have 
different tags. In worst case, both schemas are same 
+// Calculate tags individually since the schema can evolve and can have 
different tags. In worst case, both schemas are same
 // and we would end up doing calculations twice to get the same tag
 int fsTag = GenericData.get().resolveUnion(fileSchema, datum); // 
Determine index of value from fileSchema
 

hive git commit: HIVE-16831: Add unit tests for NPE fixes in HIVE-12054 (Sunitha Beeram, reviewed by Carl Steinbach)

2017-06-13 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master f572ce6d2 -> d2e870438


HIVE-16831: Add unit tests for NPE fixes in HIVE-12054 (Sunitha Beeram, 
reviewed by Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d2e87043
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d2e87043
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d2e87043

Branch: refs/heads/master
Commit: d2e870438a99fe84369154d6f399f7864a36319a
Parents: f572ce6
Author: Carl Steinbach 
Authored: Tue Jun 13 17:12:13 2017 -0700
Committer: Carl Steinbach 
Committed: Tue Jun 13 17:12:13 2017 -0700

--
 .../queries/clientpositive/orc_empty_table.q| 11 
 .../clientpositive/orc_empty_table.q.out| 68 
 2 files changed, 79 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d2e87043/ql/src/test/queries/clientpositive/orc_empty_table.q
--
diff --git a/ql/src/test/queries/clientpositive/orc_empty_table.q 
b/ql/src/test/queries/clientpositive/orc_empty_table.q
new file mode 100644
index 000..05bba28
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/orc_empty_table.q
@@ -0,0 +1,11 @@
+CREATE TABLE test_orc_empty_table_with_struct (struct_field STRUCT) STORED AS ORC;
+SELECT count(*) FROM test_orc_empty_table_with_struct;
+
+CREATE TABLE test_orc_empty_table_with_map (map_field MAP) 
STORED AS ORC;
+SELECT count(*) FROM test_orc_empty_table_with_map;
+
+CREATE TABLE test_orc_empty_table_with_list (list_field ARRAY) STORED AS 
ORC;
+SELECT count(*) FROM test_orc_empty_table_with_list;
+
+CREATE TABLE test_orc_empty_table_with_union (union_field UNIONTYPE) STORED AS ORC;
+SELECT count(*) FROM test_orc_empty_table_with_union;

http://git-wip-us.apache.org/repos/asf/hive/blob/d2e87043/ql/src/test/results/clientpositive/orc_empty_table.q.out
--
diff --git a/ql/src/test/results/clientpositive/orc_empty_table.q.out 
b/ql/src/test/results/clientpositive/orc_empty_table.q.out
new file mode 100644
index 000..e95589e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/orc_empty_table.q.out
@@ -0,0 +1,68 @@
+PREHOOK: query: CREATE TABLE test_orc_empty_table_with_struct (struct_field 
STRUCT) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_orc_empty_table_with_struct
+POSTHOOK: query: CREATE TABLE test_orc_empty_table_with_struct (struct_field 
STRUCT) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_orc_empty_table_with_struct
+PREHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_struct
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_orc_empty_table_with_struct
+ A masked pattern was here 
+POSTHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_struct
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_orc_empty_table_with_struct
+ A masked pattern was here 
+0
+PREHOOK: query: CREATE TABLE test_orc_empty_table_with_map (map_field 
MAP) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_orc_empty_table_with_map
+POSTHOOK: query: CREATE TABLE test_orc_empty_table_with_map (map_field 
MAP) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_orc_empty_table_with_map
+PREHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_map
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_orc_empty_table_with_map
+ A masked pattern was here 
+POSTHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_map
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_orc_empty_table_with_map
+ A masked pattern was here 
+0
+PREHOOK: query: CREATE TABLE test_orc_empty_table_with_list (list_field 
ARRAY) STORED AS ORC
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@test_orc_empty_table_with_list
+POSTHOOK: query: CREATE TABLE test_orc_empty_table_with_list (list_field 
ARRAY) STORED AS ORC
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@test_orc_empty_table_with_list
+PREHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_list
+PREHOOK: type: QUERY
+PREHOOK: Input: default@test_orc_empty_table_with_list
+ A masked pattern was here 
+POSTHOOK: query: SELECT count(*) FROM test_orc_empty_table_with_list
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@test_orc_empty_table_with_list
+ A masked pattern was here 
+0
+PREHOOK: query: CREATE TABLE 

hive git commit: HIVE-16844: Fix Connection leak in ObjectStore when new Conf object is used (Sunitha Beeram, reviewed by Carl Steinbach)

2017-06-13 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 49ae8b694 -> f572ce6d2


HIVE-16844: Fix Connection leak in ObjectStore when new Conf object is used 
(Sunitha Beeram, reviewed by Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/f572ce6d
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/f572ce6d
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/f572ce6d

Branch: refs/heads/master
Commit: f572ce6d2c708ae0a08254d042ce5739c096ba18
Parents: 49ae8b6
Author: Carl Steinbach 
Authored: Tue Jun 13 17:08:46 2017 -0700
Committer: Carl Steinbach 
Committed: Tue Jun 13 17:08:46 2017 -0700

--
 .../src/java/org/apache/hadoop/hive/metastore/ObjectStore.java | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/f572ce6d/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index d02c5da..e13612e 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -290,6 +290,8 @@ public class ObjectStore implements RawStore, Configurable {
   if (propsChanged) {
 if (pmf != null){
   clearOutPmfClassLoaderCache(pmf);
+  // close the underlying connection pool to avoid leaks
+  pmf.close();
 }
 pmf = null;
 prop = null;



hive git commit: HIVE-15229: 'like any' and 'like all' operators in hive (Simanchal Das via Carl Steinbach)

2017-05-03 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master ed6501ed3 -> 740779f66


HIVE-15229: 'like any' and 'like all' operators in hive (Simanchal Das via Carl 
Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/740779f6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/740779f6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/740779f6

Branch: refs/heads/master
Commit: 740779f66a4678324428ca0c240ae3ca44a00974
Parents: ed6501e
Author: Carl Steinbach 
Authored: Wed May 3 14:46:31 2017 -0700
Committer: Carl Steinbach 
Committed: Wed May 3 14:46:31 2017 -0700

--
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   2 +
 .../org/apache/hadoop/hive/ql/parse/HiveLexer.g |   1 +
 .../apache/hadoop/hive/ql/parse/HiveParser.g|   2 +
 .../hadoop/hive/ql/parse/IdentifiersParser.g|   6 +
 .../hive/ql/parse/TypeCheckProcFactory.java |   3 +
 .../hive/ql/udf/generic/GenericUDFLikeAll.java  | 133 +
 .../hive/ql/udf/generic/GenericUDFLikeAny.java  | 134 +
 .../ql/udf/generic/TestGenericUDFLikeAll.java   |  88 +
 .../ql/udf/generic/TestGenericUDFLikeAny.java   |  87 +
 .../queries/clientnegative/udf_likeall_wrong1.q |   2 +
 .../queries/clientnegative/udf_likeany_wrong1.q |   2 +
 .../test/queries/clientpositive/udf_likeall.q   |  57 ++
 .../test/queries/clientpositive/udf_likeany.q   |  57 ++
 .../clientnegative/udf_likeall_wrong1.q.out |   1 +
 .../clientnegative/udf_likeany_wrong1.q.out |   1 +
 .../results/clientpositive/show_functions.q.out |   2 +
 .../results/clientpositive/udf_likeall.q.out| 187 +++
 .../results/clientpositive/udf_likeany.q.out| 187 +++
 18 files changed, 952 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/740779f6/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 8dc5f2e..1b556ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -272,6 +272,8 @@ public final class FunctionRegistry {
 system.registerGenericUDF("initcap", GenericUDFInitCap.class);
 
 system.registerUDF("like", UDFLike.class, true);
+system.registerGenericUDF("likeany", GenericUDFLikeAny.class);
+system.registerGenericUDF("likeall", GenericUDFLikeAll.class);
 system.registerGenericUDF("rlike", GenericUDFRegExp.class);
 system.registerGenericUDF("regexp", GenericUDFRegExp.class);
 system.registerUDF("regexp_replace", UDFRegExpReplace.class, false);

http://git-wip-us.apache.org/repos/asf/hive/blob/740779f6/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
index 0721b92..190b66b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
@@ -49,6 +49,7 @@ KW_AND : 'AND';
 KW_OR : 'OR';
 KW_NOT : 'NOT' | '!';
 KW_LIKE : 'LIKE';
+KW_ANY : 'ANY';
 
 KW_IF : 'IF';
 KW_EXISTS : 'EXISTS';

http://git-wip-us.apache.org/repos/asf/hive/blob/740779f6/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index d98a663..ca639d3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -397,6 +397,8 @@ TOK_OPERATOR;
 TOK_EXPRESSION;
 TOK_DETAIL;
 TOK_BLOCKING;
+TOK_LIKEANY;
+TOK_LIKEALL;
 }
 
 

http://git-wip-us.apache.org/repos/asf/hive/blob/740779f6/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 8598fae..645ced9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -567,6 +567,12 @@ precedenceSimilarExpressionAtom[CommonTree t]
 |
 KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND 
(max=precedenceBitwiseOrExpression)
 -> ^(TOK_FUNCTION Identifier["between"] KW_FALSE {$t} $min $max)
+|
+KW_LIKE KW_ANY 

hive git commit: HIVE-16393: Fix visibility of CodahaleReporter interface (Sunitha Beeram via Carl Steinbach)

2017-04-09 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 7629dbd5b -> c3aba153c


HIVE-16393: Fix visibility of CodahaleReporter interface (Sunitha Beeram via 
Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c3aba153
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c3aba153
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c3aba153

Branch: refs/heads/master
Commit: c3aba153cb92d71d41554a0714a6694980f9b363
Parents: 7629dbd
Author: Carl Steinbach 
Authored: Sat Apr 8 23:42:01 2017 -0700
Committer: Carl Steinbach 
Committed: Sat Apr 8 23:42:01 2017 -0700

--
 .../hadoop/hive/common/metrics/metrics2/CodahaleReporter.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c3aba153/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java
index ba4d8e4..9424f28 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleReporter.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.common.metrics.metrics2;
 import com.codahale.metrics.Reporter;
 import java.io.Closeable;
 
-interface CodahaleReporter extends Closeable, Reporter {
+public interface CodahaleReporter extends Closeable, Reporter {
 
   /**
* Start the reporter.



hive git commit: HIVE-16206: Make Codahale metrics reporters pluggable (Sunitha Beeram via Carl Steinbach)

2017-04-03 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 6a82a898e -> bf98700a7


HIVE-16206: Make Codahale metrics reporters pluggable (Sunitha Beeram via Carl 
Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/bf98700a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/bf98700a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/bf98700a

Branch: refs/heads/master
Commit: bf98700a760ea1cca30f236e7dce10272fffdd48
Parents: 6a82a89
Author: Carl Steinbach 
Authored: Mon Apr 3 10:09:53 2017 -0700
Committer: Carl Steinbach 
Committed: Mon Apr 3 10:09:53 2017 -0700

--
 .../metrics/metrics2/CodahaleMetrics.java   | 192 +--
 .../metrics/metrics2/CodahaleReporter.java  |  29 +++
 .../metrics2/ConsoleMetricsReporter.java|  55 ++
 .../metrics/metrics2/JmxMetricsReporter.java|  56 ++
 .../metrics2/JsonFileMetricsReporter.java   | 136 +
 .../metrics/metrics2/Metrics2Reporter.java  |  62 ++
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  22 ++-
 .../metrics/metrics2/TestCodahaleMetrics.java   |   7 +-
 .../metrics2/TestCodahaleReportersConf.java | 145 ++
 9 files changed, 589 insertions(+), 115 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/bf98700a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
--
diff --git 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
index e8abf6c..2d6c1b4 100644
--- 
a/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
+++ 
b/common/src/java/org/apache/hadoop/hive/common/metrics/metrics2/CodahaleMetrics.java
@@ -44,6 +44,8 @@ import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
 import com.google.common.collect.Lists;
 
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
@@ -190,22 +192,8 @@ public class CodahaleMetrics implements 
org.apache.hadoop.hive.common.metrics.co
 registerAll("threads", new ThreadStatesGaugeSet());
 registerAll("classLoading", new ClassLoadingGaugeSet());
 
-//Metrics reporter
-Set finalReporterList = new HashSet();
-List metricsReporterNames = Lists.newArrayList(
-  
Splitter.on(",").trimResults().omitEmptyStrings().split(conf.getVar(HiveConf.ConfVars.HIVE_METRICS_REPORTER)));
-
-if(metricsReporterNames != null) {
-  for (String metricsReportingName : metricsReporterNames) {
-try {
-  MetricsReporting reporter = 
MetricsReporting.valueOf(metricsReportingName.trim().toUpperCase());
-  finalReporterList.add(reporter);
-} catch (IllegalArgumentException e) {
-  LOGGER.warn("Metrics reporter skipped due to invalid configured 
reporter: " + metricsReportingName);
-}
-  }
-}
-initReporting(finalReporterList);
+//initialize reporters
+initReporting();
   }
 
 
@@ -385,107 +373,99 @@ public class CodahaleMetrics implements 
org.apache.hadoop.hive.common.metrics.co
   }
 
   /**
-   * Should be only called once to initialize the reporters
+   * Initializes reporters from HIVE_CODAHALE_METRICS_REPORTER_CLASSES or 
HIVE_METRICS_REPORTER if the former is not defined.
+   * Note: if both confs are defined, only  
HIVE_CODAHALE_METRICS_REPORTER_CLASSES will be used.
*/
-  private void initReporting(Set reportingSet) {
-for (MetricsReporting reporting : reportingSet) {
-  switch(reporting) {
-case CONSOLE:
-  final ConsoleReporter consoleReporter = 
ConsoleReporter.forRegistry(metricRegistry)
-.convertRatesTo(TimeUnit.SECONDS)
-.convertDurationsTo(TimeUnit.MILLISECONDS)
-.build();
-  consoleReporter.start(1, TimeUnit.SECONDS);
-  reporters.add(consoleReporter);
-  break;
-case JMX:
-  final JmxReporter jmxReporter = 
JmxReporter.forRegistry(metricRegistry)
-.convertRatesTo(TimeUnit.SECONDS)
-.convertDurationsTo(TimeUnit.MILLISECONDS)
-.build();
-  jmxReporter.start();
-  reporters.add(jmxReporter);
-  break;
-case JSON_FILE:
-  final JsonFileReporter jsonFileReporter = new JsonFileReporter();
-  jsonFileReporter.start();
-  reporters.add(jsonFileReporter);
-  break;
-case HADOOP2:
-  String applicationName = 

hive git commit: HIVE-13046: DependencyResolver should not lowercase the dependency URI's authority (Anthony Hsu via Carl Steinbach

2016-10-17 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 8029e11b3 -> 8d3925f61


HIVE-13046: DependencyResolver should not lowercase the dependency URI's 
authority (Anthony Hsu via Carl Steinbach


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8d3925f6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8d3925f6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8d3925f6

Branch: refs/heads/master
Commit: 8d3925f6138fce0e0b7c7832f386e89dfd5b0575
Parents: 8029e11
Author: Carl Steinbach 
Authored: Mon Oct 17 10:18:11 2016 -0700
Committer: Carl Steinbach 
Committed: Mon Oct 17 10:18:11 2016 -0700

--
 ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8d3925f6/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java
index 15d0fa1..d080c47 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java
@@ -81,7 +81,7 @@ public class DependencyResolver {
 if (authority == null) {
   throw new URISyntaxException(authority, "Invalid url: Expected 
'org:module:version', found null");
 }
-String[] authorityTokens = authority.toLowerCase().split(":");
+String[] authorityTokens = authority.split(":");
 
 if (authorityTokens.length != 3) {
   throw new URISyntaxException(authority, "Invalid url: Expected 
'org:module:version', found " + authority);



hive git commit: HIVE-14159 : sorting of tuple array using multiple field[s] (Simanchal Das via Carl Steinbach)

2016-09-07 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 2de450a1c -> 6e76ee3ae


HIVE-14159 : sorting of tuple array using multiple field[s] (Simanchal Das via 
Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6e76ee3a
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6e76ee3a
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6e76ee3a

Branch: refs/heads/master
Commit: 6e76ee3aef2210b2a1efa20d92ac997800cfcb75
Parents: 2de450a
Author: Carl Steinbach 
Authored: Wed Sep 7 11:28:35 2016 -0700
Committer: Carl Steinbach 
Committed: Wed Sep 7 11:28:35 2016 -0700

--
 .../test/resources/testconfiguration.properties |   1 +
 .../resources/testconfiguration.properties.orig |   8 +-
 .../hadoop/hive/ql/exec/FunctionRegistry.java   |   1 +
 .../udf/generic/GenericUDFSortArrayByField.java | 202 ++
 .../generic/TestGenericUDFSortArrayByField.java | 228 +++
 .../clientnegative/udf_sort_array_by_wrong1.q   |   2 +
 .../clientnegative/udf_sort_array_by_wrong2.q   |   2 +
 .../clientnegative/udf_sort_array_by_wrong3.q   |  16 +
 .../queries/clientpositive/udf_sort_array_by.q  | 136 +++
 .../beelinepositive/show_functions.q.out|   1 +
 .../udf_sort_array_by_wrong1.q.out  |   1 +
 .../udf_sort_array_by_wrong2.q.out  |   1 +
 .../udf_sort_array_by_wrong3.q.out  |  37 ++
 .../results/clientpositive/show_functions.q.out |   1 +
 .../clientpositive/udf_sort_array_by.q.out  | 401 +++
 15 files changed, 1036 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6e76ee3a/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index a920ca9..96a03f6 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -686,6 +686,7 @@ beeline.positive.exclude=add_part_exist.q,\
   udf_printf.q,\
   udf_sentences.q,\
   udf_sort_array.q,\
+  udf_sort_array_by.q,\
   udf_split.q,\
   udf_struct.q,\
   udf_substr.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/6e76ee3a/itests/src/test/resources/testconfiguration.properties.orig
--
diff --git a/itests/src/test/resources/testconfiguration.properties.orig 
b/itests/src/test/resources/testconfiguration.properties.orig
index d5ee9ed..a920ca9 100644
--- a/itests/src/test/resources/testconfiguration.properties.orig
+++ b/itests/src/test/resources/testconfiguration.properties.orig
@@ -33,7 +33,6 @@ minimr.query.files=auto_sortmerge_join_16.q,\
   load_fs2.q,\
   load_hdfs_file_with_space_in_the_name.q,\
   non_native_window_udf.q, \
-  orc_merge_diff_fs.q,\
   parallel_orderby.q,\
   quotedid_smb.q,\
   reduce_deduplicate.q,\
@@ -79,6 +78,11 @@ minitez.query.files.shared=delete_orig_table.q,\
 # NOTE: Add tests to minitez only if it is very
 # specific to tez and cannot be added to minillap.
 minitez.query.files=explainuser_3.q,\
+  explainanalyze_1.q,\
+  explainanalyze_2.q,\
+  explainanalyze_3.q,\
+  explainanalyze_4.q,\
+  explainanalyze_5.q,\
   hybridgrace_hashjoin_1.q,\
   hybridgrace_hashjoin_2.q,\
   partition_column_names_with_leading_and_trailing_spaces.q,\
@@ -222,6 +226,7 @@ minillap.shared.query.files=acid_globallimit.q,\
   orc_merge7.q,\
   orc_merge8.q,\
   orc_merge9.q,\
+  orc_merge_diff_fs.q,\
   orc_merge_incompat1.q,\
   orc_merge_incompat2.q,\
   orc_merge_incompat3.q,\
@@ -457,7 +462,6 @@ minillap.query.files=acid_bucket_pruning.q,\
   orc_llap_counters.q,\
   orc_llap_counters1.q,\
   orc_llap_nonvector.q,\
-  orc_merge_diff_fs.q,\
   orc_ppd_basic.q,\
   schema_evol_orc_acid_part.q,\
   schema_evol_orc_acid_part_update.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/6e76ee3a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 1333c77..60646ba 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -431,6 +431,7 @@ public final class FunctionRegistry {
 system.registerGenericUDF("elt", GenericUDFElt.class);
 system.registerGenericUDF("concat_ws", GenericUDFConcatWS.class);
 system.registerGenericUDF("sort_array", GenericUDFSortArray.class);
+system.registerGenericUDF("sort_array_by", 

hive git commit: HIVE-13363: Add hive.metastore.token.signature property to HiveConf (Anthony Hsu, reviewed by Carl Steinbach)

2016-05-11 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 868413a37 -> 6187e2a6b


HIVE-13363: Add hive.metastore.token.signature property to HiveConf (Anthony 
Hsu, reviewed by Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6187e2a6
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6187e2a6
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6187e2a6

Branch: refs/heads/master
Commit: 6187e2a6b2848c67070764c76cdbd62b13ff71f2
Parents: 868413a
Author: Carl Steinbach 
Authored: Wed May 11 14:18:13 2016 -0700
Committer: Carl Steinbach 
Committed: Wed May 11 14:24:44 2016 -0700

--
 common/src/java/org/apache/hadoop/hive/conf/HiveConf.java | 3 +++
 .../src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java   | 2 +-
 .../org/apache/hive/hcatalog/templeton/SecureProxySupport.java| 2 +-
 .../org/apache/hadoop/hive/thrift/TestHadoopAuthBridge23.java | 2 +-
 .../org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java | 2 +-
 .../apache/hive/service/cli/session/HiveSessionImplwithUGI.java   | 2 +-
 6 files changed, 8 insertions(+), 5 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/6187e2a6/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index f509e8b..f2273c0 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -177,6 +177,7 @@ public class HiveConf extends Configuration {
   HiveConf.ConfVars.METASTORE_KERBEROS_KEYTAB_FILE,
   HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL,
   HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL,
+  HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE,
   HiveConf.ConfVars.METASTORE_CACHE_PINOBJTYPES,
   HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE,
   HiveConf.ConfVars.METASTORE_VALIDATE_TABLES,
@@ -630,6 +631,8 @@ public class HiveConf extends Configuration {
 
METASTORE_USE_THRIFT_COMPACT_PROTOCOL("hive.metastore.thrift.compact.protocol.enabled",
 false,
 "If true, the metastore Thrift interface will use TCompactProtocol. 
When false (default) TBinaryProtocol will be used.\n" +
 "Setting it to true will break compatibility with older clients 
running TBinaryProtocol."),
+METASTORE_TOKEN_SIGNATURE("hive.metastore.token.signature", "",
+"The delegation token service name to match when selecting a token 
from the current user's tokens."),
 
METASTORE_CLUSTER_DELEGATION_TOKEN_STORE_CLS("hive.cluster.delegation.token.store.class",
 "org.apache.hadoop.hive.thrift.MemoryTokenStore",
 "The delegation token store implementation. Set to 
org.apache.hadoop.hive.thrift.ZooKeeperTokenStore for load-balanced cluster."),

http://git-wip-us.apache.org/repos/asf/hive/blob/6187e2a6/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
--
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
index 3ee30ed..f3bfcfa 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
@@ -627,7 +627,7 @@ public class HCatUtil {
 }
 
 if (conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) {
-  hiveConf.set("hive.metastore.token.signature",
+  hiveConf.setVar(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE,
 conf.get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE));
 }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/6187e2a6/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
--
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
index 13f3c9b..e71bc04 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
@@ -128,7 +128,7 @@ public class SecureProxySupport {
   public void addArgs(List args) {
 if (isEnabled) {
   args.add("-D");
-  args.add("hive.metastore.token.signature=" + getHcatServiceStr());
+  args.add(HiveConf.ConfVars.METASTORE_TOKEN_SIGNATURE + "=" + 
getHcatServiceStr());
   

hive git commit: HIVE-13115: MetaStore Direct SQL getPartitions call fail when the columns schemas for a partition are null (Ratandeep Ratti reviewed by Carl Steinbach)

2016-03-27 Thread cws
Repository: hive
Updated Branches:
  refs/heads/branch-1.2 2f8175bd4 -> 510ef503b


HIVE-13115: MetaStore Direct SQL getPartitions call fail when the columns 
schemas for a partition are null (Ratandeep Ratti reviewed by Carl Steinbach)

(cherry picked from commit 69cfd357eb482c426161aa3c4a00d574ee29416e)
(cherry picked from commit 3a39aba340e1cac60fe27d920e40d816f48ef38f)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/510ef503
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/510ef503
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/510ef503

Branch: refs/heads/branch-1.2
Commit: 510ef503ba7467c0784c1e0c03129d7ed2986fe5
Parents: 2f8175b
Author: Carl Steinbach 
Authored: Sun Mar 27 15:41:38 2016 -0700
Committer: Carl Steinbach 
Committed: Sun Mar 27 16:05:50 2016 -0700

--
 .../hive/metastore/TestHiveMetaStore.java   | 20 +++-
 .../hive/metastore/MetaStoreDirectSql.java  | 32 +++-
 2 files changed, 37 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/510ef503/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index 6efb334..6f9e9c7 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -273,6 +273,24 @@ public abstract class TestHiveMetaStore extends TestCase {
   }
   assertTrue("Partitions are not same", part.equals(part_get));
 
+  // check null cols schemas for a partition
+  List vals6 = makeVals("2016-02-22 00:00:00", "16");
+  Partition part6 = makePartitionObject(dbName, tblName, vals6, tbl, 
"/part5");
+  part6.getSd().setCols(null);
+  LOG.info("Creating partition will null field schema");
+  client.add_partition(part6);
+  LOG.info("Listing all partitions for table " + dbName + "." + tblName);
+  final List partitions = client.listPartitions(dbName, 
tblName, (short) -1);
+  boolean foundPart = false;
+  for (Partition p : partitions) {
+if (p.getValues().equals(vals6)) {
+  assertNull(p.getSd().getCols());
+  LOG.info("Found partition " + p + " having null field schema");
+  foundPart = true;
+}
+  }
+  assertTrue(foundPart);
+
   String partName = "ds=" + FileUtils.escapePathName("2008-07-01 
14:13:12") + "/hr=14";
   String part2Name = "ds=" + FileUtils.escapePathName("2008-07-01 
14:13:12") + "/hr=15";
   String part3Name = "ds=" + FileUtils.escapePathName("2008-07-02 
14:13:12") + "/hr=15";
@@ -306,7 +324,7 @@ public abstract class TestHiveMetaStore extends TestCase {
   partialVals.clear();
   partialVals.add("");
   partialNames = client.listPartitionNames(dbName, tblName, partialVals, 
(short) -1);
-  assertTrue("Should have returned 4 partition names", partialNames.size() 
== 4);
+  assertTrue("Should have returned 5 partition names", partialNames.size() 
== 5);
   assertTrue("Not all part names returned", 
partialNames.containsAll(partNames));
 
   // Test partition listing with a partial spec - hr is specified but ds 
is not

http://git-wip-us.apache.org/repos/asf/hive/blob/510ef503/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index 071a4c4..eb0fec2 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -531,14 +531,14 @@ class MetaStoreDirectSql {
   Long sdId = extractSqlLong(fields[1]);
   Long colId = extractSqlLong(fields[2]);
   Long serdeId = extractSqlLong(fields[3]);
-  // A partition must have either everything set, or nothing set if it's a 
view.
-  if (sdId == null || colId == null || serdeId == null) {
+  // A partition must have at least sdId and serdeId set, or nothing set 
if it's a view.
+  if (sdId == null || serdeId == null) {
 if (isView == null) {
   isView = isViewTable(dbName, tblName);
 }
 if ((sdId != null || colId != null || serdeId != null) || !isView) {
-  throw new MetaException("Unexpected null for one of the IDs, SD " + 

hive git commit: HIVE-13115: MetaStore Direct SQL getPartitions call fail when the columns schemas for a partition are null (Ratandeep Ratti reviewed by Carl Steinbach)

2016-03-27 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 968620932 -> 69cfd357e


HIVE-13115: MetaStore Direct SQL getPartitions call fail when the columns 
schemas for a partition are null (Ratandeep Ratti reviewed by Carl Steinbach)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/69cfd357
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/69cfd357
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/69cfd357

Branch: refs/heads/master
Commit: 69cfd357eb482c426161aa3c4a00d574ee29416e
Parents: 9686209
Author: Carl Steinbach 
Authored: Sun Mar 27 15:41:38 2016 -0700
Committer: Carl Steinbach 
Committed: Sun Mar 27 15:41:38 2016 -0700

--
 .../hive/metastore/TestHiveMetaStore.java   | 20 +++-
 .../hive/metastore/MetaStoreDirectSql.java  | 32 +++-
 2 files changed, 37 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/69cfd357/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index 5da4165..83fb15c 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -280,6 +280,24 @@ public abstract class TestHiveMetaStore extends TestCase {
   }
   assertTrue("Partitions are not same", part.equals(part_get));
 
+  // check null cols schemas for a partition
+  List vals6 = makeVals("2016-02-22 00:00:00", "16");
+  Partition part6 = makePartitionObject(dbName, tblName, vals6, tbl, 
"/part5");
+  part6.getSd().setCols(null);
+  LOG.info("Creating partition will null field schema");
+  client.add_partition(part6);
+  LOG.info("Listing all partitions for table " + dbName + "." + tblName);
+  final List partitions = client.listPartitions(dbName, 
tblName, (short) -1);
+  boolean foundPart = false;
+  for (Partition p : partitions) {
+if (p.getValues().equals(vals6)) {
+  assertNull(p.getSd().getCols());
+  LOG.info("Found partition " + p + " having null field schema");
+  foundPart = true;
+}
+  }
+  assertTrue(foundPart);
+
   String partName = "ds=" + FileUtils.escapePathName("2008-07-01 
14:13:12") + "/hr=14";
   String part2Name = "ds=" + FileUtils.escapePathName("2008-07-01 
14:13:12") + "/hr=15";
   String part3Name = "ds=" + FileUtils.escapePathName("2008-07-02 
14:13:12") + "/hr=15";
@@ -313,7 +331,7 @@ public abstract class TestHiveMetaStore extends TestCase {
   partialVals.clear();
   partialVals.add("");
   partialNames = client.listPartitionNames(dbName, tblName, partialVals, 
(short) -1);
-  assertTrue("Should have returned 4 partition names", partialNames.size() 
== 4);
+  assertTrue("Should have returned 5 partition names", partialNames.size() 
== 5);
   assertTrue("Not all part names returned", 
partialNames.containsAll(partNames));
 
   // Test partition listing with a partial spec - hr is specified but ds 
is not

http://git-wip-us.apache.org/repos/asf/hive/blob/69cfd357/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index d51f58d..06e9f78 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -557,14 +557,14 @@ class MetaStoreDirectSql {
   Long sdId = extractSqlLong(fields[1]);
   Long colId = extractSqlLong(fields[2]);
   Long serdeId = extractSqlLong(fields[3]);
-  // A partition must have either everything set, or nothing set if it's a 
view.
-  if (sdId == null || colId == null || serdeId == null) {
+  // A partition must have at least sdId and serdeId set, or nothing set 
if it's a view.
+  if (sdId == null || serdeId == null) {
 if (isView == null) {
   isView = isViewTable(dbName, tblName);
 }
 if ((sdId != null || colId != null || serdeId != null) || !isView) {
-  throw new MetaException("Unexpected null for one of the IDs, SD " + 
sdId + ", column "
-  + colId + ", serde " + serdeId + " for a " + (isView ? "" : 
"non-") + " view");
+  throw new 

hive git commit: HIVE-10187: Avro backed tables don't handle cyclical or recursive records (Mark Wagner via cws)

2016-02-12 Thread cws
Repository: hive
Updated Branches:
  refs/heads/master 28cf89266 -> 3bf2f679c


HIVE-10187: Avro backed tables don't handle cyclical or recursive records (Mark 
Wagner via cws)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/3bf2f679
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/3bf2f679
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/3bf2f679

Branch: refs/heads/master
Commit: 3bf2f679cc7b1da7d0c09041c9751c6bcd7e0d7e
Parents: 28cf892
Author: Carl Steinbach <cstei...@linkedin.com>
Authored: Fri Feb 12 12:55:55 2016 -0800
Committer: Carl Steinbach <cstei...@linkedin.com>
Committed: Fri Feb 12 12:55:55 2016 -0800

--
 pom.xml |  6 +++
 .../hive/serde2/avro/SchemaToTypeInfo.java  |  4 +-
 .../hive/serde2/avro/TestAvroSerializer.java| 44 
 .../hive/serde2/avro/TestSchemaToTypeInfo.java  | 54 
 4 files changed, 63 insertions(+), 45 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/3bf2f679/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 15e3522..af2e3d1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -258,6 +258,12 @@
 com.google.code.tempus-fugit
 tempus-fugit
 ${tempus-fugit.version}
+
+  
+org.hamcrest
+hamcrest-core
+  
+
   
   
 com.googlecode.javaewah

http://git-wip-us.apache.org/repos/asf/hive/blob/3bf2f679/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
--
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java 
b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
index e60168c..35d83bd 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/SchemaToTypeInfo.java
@@ -217,7 +217,9 @@ class SchemaToTypeInfo {
 if (seenSchemas == null) {
 seenSchemas = Collections.newSetFromMap(new IdentityHashMap<Schema, 
Boolean>());
 } else if (seenSchemas.contains(schema)) {
-return primitiveTypeToTypeInfo.get(Schema.Type.NULL);
+  throw new AvroSerdeException(
+  "Recursive schemas are not supported. Recursive schema was " + schema
+  .getFullName());
 }
 seenSchemas.add(schema);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/3bf2f679/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
--
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
index 0b94dc5..45be2dd 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroSerializer.java
@@ -491,48 +491,4 @@ public class TestAvroSerializer {
 
 assertArrayEquals(fixed.bytes(), ((GenericData.Fixed) 
r.get("fixed1")).bytes());
   }
-
-  @Test
-  public void canSerializeCyclesInSchema() throws SerDeException, IOException {
-// Create parent-child avro-record and avro-schema
-AvroCycleParent parent = new AvroCycleParent();
-AvroCycleChild child = new AvroCycleChild();
-parent.setChild (child);
-Schema parentS = 
ReflectData.AllowNull.get().getSchema(AvroCycleParent.class);
-GenericData.Record parentRec = new GenericData.Record(parentS);
-Schema childS = 
ReflectData.AllowNull.get().getSchema(AvroCycleChild.class);
-GenericData.Record childRec  = new GenericData.Record(childS);
-parentRec.put("child", childRec);
-
-// Initialize Avro SerDe
-AvroSerializer as = new AvroSerializer();
-AvroDeserializer ad = new AvroDeserializer();
-AvroObjectInspectorGenerator aoig = new 
AvroObjectInspectorGenerator(parentS);
-ObjectInspector oi = aoig.getObjectInspector();
-List columnNames = aoig.getColumnNames();
-List columnTypes = aoig.getColumnTypes();
-
-// Check serialization and deserialization
-AvroGenericRecordWritable agrw = 
Utils.serializeAndDeserializeRecord(parentRec);
-Object obj = ad.deserialize(columnNames, columnTypes, agrw, parentS);
-
-Writable result = as.serialize(obj, oi, columnNames, columnTypes, parentS);
-assertTrue(result instanceof AvroGenericRecordWritable);
-GenericRecord r2 = ((AvroGenericRecordWritable) result).getRecord();
-assertEquals(parentS, r2.getSchema());
-  }
-
-  private static class AvroCycleParent {
-AvroCycleChild child;
-public AvroCycleChild ge

svn commit: r1670246 - in /hive/trunk: ./ conf/ itests/ packaging/src/main/assembly/ ql/ ql/src/java/org/apache/hadoop/hive/ql/session/ ql/src/test/org/apache/hadoop/hive/ql/session/ ql/src/test/queri

2015-03-30 Thread cws
Author: cws
Date: Tue Mar 31 02:48:56 2015
New Revision: 1670246

URL: http://svn.apache.org/r1670246
Log:
HIVE-9664. Hive 'add jar' command should be able to download and add jars from 
a repository (Anant Nag via cws)

Added:
hive/trunk/conf/ivysettings.xml

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/DependencyResolver.java

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/session/TestAddResource.java
hive/trunk/ql/src/test/queries/clientnegative/ivyDownload.q
hive/trunk/ql/src/test/queries/clientpositive/ivyDownload.q
hive/trunk/ql/src/test/results/clientnegative/ivyDownload.q.out
hive/trunk/ql/src/test/results/clientpositive/ivyDownload.q.out
Modified:
hive/trunk/itests/pom.xml
hive/trunk/packaging/src/main/assembly/bin.xml
hive/trunk/pom.xml
hive/trunk/ql/pom.xml
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java

Added: hive/trunk/conf/ivysettings.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/conf/ivysettings.xml?rev=1670246view=auto
==
--- hive/trunk/conf/ivysettings.xml (added)
+++ hive/trunk/conf/ivysettings.xml Tue Mar 31 02:48:56 2015
@@ -0,0 +1,37 @@
+
+!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the License); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an AS IS BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+   --
+
+!--This file is used by grapes to download dependencies from a maven 
repository.
+This is just a template and can be edited to add more repositories.
+--
+
+ivysettings
+  !--name of the defaultResolver should always be 'downloadGrapes'. --
+  settings defaultResolver=downloadGrapes/
+  resolvers
+!-- more resolvers can be added here --
+chain name=downloadGrapes
+  !-- This resolver uses ibiblio to find artifacts, compatible with 
maven2 repository --
+  ibiblio name=central m2compatible=true/
+  !-- File resolver to add jars from the local system. --
+  filesystem name=test checkmodified=true
+artifact pattern=/tmp/[module]-[revision](-[classifier]).jar /
+  /filesystem
+/chain
+  /resolvers
+/ivysettings

Modified: hive/trunk/itests/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/itests/pom.xml?rev=1670246r1=1670245r2=1670246view=diff
==
--- hive/trunk/itests/pom.xml (original)
+++ hive/trunk/itests/pom.xml Tue Mar 31 02:48:56 2015
@@ -93,6 +93,9 @@
   mkdir -p $DOWNLOAD_DIR
   download 
http://d3jw87u4immizc.cloudfront.net/spark-tarball/spark-${spark.version}-bin-hadoop2-without-hive.tgz;
 spark
   cp -f $HIVE_ROOT/data/conf/spark/log4j.properties 
$BASE_DIR/spark/conf/
+  sed '/package /d' 
${basedir}/${hive.path.to.root}/contrib/src/java/org/apache/hadoop/hive/contrib/udf/example/UDFExampleAdd.java
  /tmp/UDFExampleAdd.java
+  javac -cp  
${settings.localRepository}/org/apache/hive/hive-exec/${project.version}/hive-exec-${project.version}.jar
 /tmp/UDFExampleAdd.java -d /tmp
+  jar -cf /tmp/udfexampleadd-1.0.jar -C /tmp 
UDFExampleAdd.class
 /echo
   /target
 /configuration

Modified: hive/trunk/packaging/src/main/assembly/bin.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/packaging/src/main/assembly/bin.xml?rev=1670246r1=1670245r2=1670246view=diff
==
--- hive/trunk/packaging/src/main/assembly/bin.xml (original)
+++ hive/trunk/packaging/src/main/assembly/bin.xml Tue Mar 31 02:48:56 2015
@@ -146,6 +146,7 @@
   directory${project.parent.basedir}/conf/directory
   includes
 include*.template/include
+includeivysettings.xml/include
   /includes
   outputDirectoryconf/outputDirectory
 /fileSet

Modified: hive/trunk/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1670246r1=1670245r2=1670246view=diff
==
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Tue Mar 31 02:48:56 2015
@@ -126,6 +126,7 @@
 !-- httpcomponents are not always in version sync --
 httpcomponents.client.version4.2.5

svn commit: r1641009 - /hive/trunk/pom.xml

2014-11-21 Thread cws
Author: cws
Date: Fri Nov 21 22:13:55 2014
New Revision: 1641009

URL: http://svn.apache.org/r1641009
Log:
HIVE-8933. Check release builds for SNAPSHOT dependencies

Modified:
hive/trunk/pom.xml

Modified: hive/trunk/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1641009r1=1641008r2=1641009view=diff
==
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Fri Nov 21 22:13:55 2014
@@ -776,6 +776,28 @@
   /plugin
   plugin
 groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-enforcer-plugin/artifactId
+executions
+  execution
+idenforce-no-snapshots/id
+goals
+  goalenforce/goal
+/goals
+configuration
+  rules
+requireReleaseDeps
+  messageRelease builds are not allowed to have SNAPSHOT 
depenendencies/message
+  searchTransitivetrue/searchTransitive
+  onlyWhenReleasetrue/onlyWhenRelease
+/requireReleaseDeps
+  /rules
+  failtrue/fail
+/configuration
+  /execution
+/executions
+  /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
 artifactIdmaven-surefire-plugin/artifactId
 configuration
   excludes




svn commit: r1641011 - /hive/branches/branch-0.14/pom.xml

2014-11-21 Thread cws
Author: cws
Date: Fri Nov 21 22:30:42 2014
New Revision: 1641011

URL: http://svn.apache.org/r1641011
Log:
HIVE-8933. Check release builds for SNAPSHOT dependencies

Modified:
hive/branches/branch-0.14/pom.xml

Modified: hive/branches/branch-0.14/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.14/pom.xml?rev=1641011r1=1641010r2=1641011view=diff
==
--- hive/branches/branch-0.14/pom.xml (original)
+++ hive/branches/branch-0.14/pom.xml Fri Nov 21 22:30:42 2014
@@ -772,6 +772,28 @@
   /plugin
   plugin
 groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-enforcer-plugin/artifactId
+executions
+  execution
+idenforce-no-snapshots/id
+goals
+  goalenforce/goal
+/goals
+configuration
+  rules
+requireReleaseDeps
+  messageRelease builds are not allowed to have SNAPSHOT 
depenendencies/message
+  searchTransitivetrue/searchTransitive
+  onlyWhenReleasetrue/onlyWhenRelease
+/requireReleaseDeps
+  /rules
+  failtrue/fail
+/configuration
+  /execution
+/executions
+  /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
 artifactIdmaven-surefire-plugin/artifactId
 configuration
   excludes




svn commit: r1614534 - in /hive/trunk: ./ hcatalog/core/ hcatalog/core/src/test/java/org/apache/hive/hcatalog/fileformats/ hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/ ql/src/test/o

2014-07-29 Thread cws
Author: cws
Date: Wed Jul 30 01:22:46 2014
New Revision: 1614534

URL: http://svn.apache.org/r1614534
Log:
HIVE-7286: Parameterize HCatMapReduceTest for testing against all Hive storage 
formats (David Chen via cws)

Added:
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/StorageFormats.java
Removed:
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/fileformats/
Modified:
hive/trunk/.gitignore
hive/trunk/hcatalog/core/pom.xml

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatDynamicPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatExternalDynamicPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatExternalNonPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatExternalPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMutableDynamicPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMutableNonPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatMutablePartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatNonPartitioned.java

hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitioned.java

Modified: hive/trunk/.gitignore
URL: 
http://svn.apache.org/viewvc/hive/trunk/.gitignore?rev=1614534r1=1614533r2=1614534view=diff
==
--- hive/trunk/.gitignore (original)
+++ hive/trunk/.gitignore Wed Jul 30 01:22:46 2014
@@ -13,6 +13,7 @@ common/src/gen
 *.iml
 *.ipr
 *.iws
+*.swp
 derby.log
 datanucleus.log
 .arc

Modified: hive/trunk/hcatalog/core/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/pom.xml?rev=1614534r1=1614533r2=1614534view=diff
==
--- hive/trunk/hcatalog/core/pom.xml (original)
+++ hive/trunk/hcatalog/core/pom.xml Wed Jul 30 01:22:46 2014
@@ -60,6 +60,13 @@
   artifactIdhive-exec/artifactId
   version${project.version}/version
 /dependency
+dependency
+  groupIdorg.apache.hive/groupId
+  artifactIdhive-exec/artifactId
+  version${project.version}/version
+  typetest-jar/type
+  scopetest/scope
+/dependency
 !-- inter-project --
 dependency
   groupIdcom.google.guava/groupId

Modified: 
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java?rev=1614534r1=1614533r2=1614534view=diff
==
--- 
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
 (original)
+++ 
hive/trunk/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
 Wed Jul 30 01:22:46 2014
@@ -19,13 +19,15 @@
 
 package org.apache.hive.hcatalog.mapreduce;
 
+import com.google.common.collect.ImmutableSet;
+
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
-import junit.framework.Assert;
+import java.util.Set;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -40,10 +42,10 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
-import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
-import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
+import org.apache.hadoop.hive.ql.io.StorageFormats;
+import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
@@ -53,15 +55,23 @@ import org.apache.hadoop.mapreduce.JobSt
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
+
 import org.apache.hive.hcatalog.common.HCatConstants;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.DefaultHCatRecord;
 import org.apache.hive.hcatalog.data.HCatRecord;
 import

svn commit: r1613483 - in /hive/trunk: ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/main/resources/META-INF/services/ ql/src/test/org/apache/hadoop/hive/ql/io/ ql/src/test/queries/clientpositive/

2014-07-25 Thread cws
Author: cws
Date: Fri Jul 25 18:01:55 2014
New Revision: 1613483

URL: http://svn.apache.org/r1613483
Log:
HIVE-6806: Support CREATE TABLE ... STORED AS AVRO (Ashish Kumar Singh via cws)

Added:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AvroStorageFormatDescriptor.java

hive/trunk/ql/src/test/queries/clientpositive/avro_compression_enabled_native.q
hive/trunk/ql/src/test/queries/clientpositive/avro_decimal_native.q
hive/trunk/ql/src/test/queries/clientpositive/avro_joins_native.q
hive/trunk/ql/src/test/queries/clientpositive/avro_native.q
hive/trunk/ql/src/test/queries/clientpositive/avro_partitioned_native.q
hive/trunk/ql/src/test/queries/clientpositive/avro_schema_evolution_native.q

hive/trunk/ql/src/test/results/clientpositive/avro_compression_enabled_native.q.out
hive/trunk/ql/src/test/results/clientpositive/avro_decimal_native.q.out
hive/trunk/ql/src/test/results/clientpositive/avro_joins_native.q.out
hive/trunk/ql/src/test/results/clientpositive/avro_native.q.out
hive/trunk/ql/src/test/results/clientpositive/avro_partitioned_native.q.out

hive/trunk/ql/src/test/results/clientpositive/avro_schema_evolution_native.q.out

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/TypeInfoToSchema.java

hive/trunk/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestTypeInfoToSchema.java
hive/trunk/serde/src/test/resources/
hive/trunk/serde/src/test/resources/avro-nested-struct.avsc
hive/trunk/serde/src/test/resources/avro-struct.avsc
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java

hive/trunk/ql/src/main/resources/META-INF/services/org.apache.hadoop.hive.ql.io.StorageFormatDescriptor

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestStorageFormatDescriptor.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java

hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java

Added: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AvroStorageFormatDescriptor.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AvroStorageFormatDescriptor.java?rev=1613483view=auto
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AvroStorageFormatDescriptor.java
 (added)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/AvroStorageFormatDescriptor.java
 Fri Jul 25 18:01:55 2014
@@ -0,0 +1,48 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.io;
+
+
+import com.google.common.collect.ImmutableSet;
+import org.apache.hadoop.hive.ql.io.AbstractStorageFormatDescriptor;
+import org.apache.hadoop.hive.ql.io.IOConstants;
+import org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat;
+import org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat;
+import org.apache.hadoop.hive.serde2.avro.AvroSerDe;
+
+import java.util.Set;
+
+public class AvroStorageFormatDescriptor extends 
AbstractStorageFormatDescriptor {
+  @Override
+  public SetString getNames() {
+return ImmutableSet.of(IOConstants.AVRO, IOConstants.AVROFILE);
+  }
+  @Override
+  public String getInputFormat() {
+return AvroContainerInputFormat.class.getName();
+  }
+  @Override
+  public String getOutputFormat() {
+return AvroContainerOutputFormat.class.getName();
+  }
+  @Override
+  public String getSerde() {
+return AvroSerDe.class.getName();
+  }
+}

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java?rev=1613483r1=1613482r2=1613483view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/IOConstants.java Fri 
Jul 25 18:01:55 2014
@@ -33,6 +33,8 @@ public final class IOConstants {
   public static final String ORCFILE = ORCFILE;
   public static

svn commit: r1604731 - /hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/

2014-06-23 Thread cws
Author: cws
Date: Mon Jun 23 10:17:29 2014
New Revision: 1604731

URL: http://svn.apache.org/r1604731
Log:
HIVE-7094: Separate out static/dynamic partitioning code in 
FileRecordWriterContainer (David Chen via cws)

Added:

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/StaticPartitionFileRecordWriterContainer.java
Modified:

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java

hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java

Added: 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java?rev=1604731view=auto
==
--- 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
 (added)
+++ 
hive/trunk/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java
 Mon Jun 23 10:17:29 2014
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hive.hcatalog.mapreduce;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
+import org.apache.hadoop.hive.serde2.SerDe;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RecordWriter;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.OutputCommitter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.hcatalog.common.ErrorType;
+import org.apache.hive.hcatalog.common.HCatException;
+import org.apache.hive.hcatalog.common.HCatUtil;
+import org.apache.hive.hcatalog.data.HCatRecord;
+
+/**
+ * Record writer container for tables using dynamic partitioning. See
+ * {@link FileOutputFormatContainer} for more information
+ */
+class DynamicPartitionFileRecordWriterContainer extends 
FileRecordWriterContainer {
+  private final ListInteger dynamicPartCols;
+  private int maxDynamicPartitions;
+
+  private final MapString, RecordWriter? super WritableComparable?, ? 
super Writable baseDynamicWriters;
+  private final MapString, SerDe baseDynamicSerDe;
+  private final MapString, org.apache.hadoop.mapred.OutputCommitter 
baseDynamicCommitters;
+  private final MapString, org.apache.hadoop.mapred.TaskAttemptContext 
dynamicContexts;
+  private final MapString, ObjectInspector dynamicObjectInspectors;
+  private MapString, OutputJobInfo dynamicOutputJobInfo;
+
+  /**
+   * @param baseWriter RecordWriter to contain
+   * @param context current TaskAttemptContext
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public DynamicPartitionFileRecordWriterContainer(
+  RecordWriter? super WritableComparable?, ? super Writable baseWriter,
+  TaskAttemptContext context) throws IOException, InterruptedException {
+super(baseWriter, context);
+maxDynamicPartitions = jobInfo.getMaxDynamicPartitions();
+dynamicPartCols = jobInfo.getPosOfDynPartCols();
+if (dynamicPartCols == null) {
+  throw new HCatException(It seems that setSchema() is not called on 
+  + HCatOutputFormat. Please make sure that method is called

svn commit: r1596681 - /hive/trunk/pom.xml

2014-05-21 Thread cws
Author: cws
Date: Wed May 21 21:02:15 2014
New Revision: 1596681

URL: http://svn.apache.org/r1596681
Log:
HIVE-7104: Unit tests are disabled (David Chen via cws)

Modified:
hive/trunk/pom.xml

Modified: hive/trunk/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/pom.xml?rev=1596681r1=1596680r2=1596681view=diff
==
--- hive/trunk/pom.xml (original)
+++ hive/trunk/pom.xml Wed May 21 21:02:15 2014
@@ -738,9 +738,6 @@
 exclude**/TestHiveServer2Concurrency.java/exclude
 exclude**/TestHiveMetaStore.java/exclude
   /excludes
- includes
- include**/${testPackage}/*.java/include
- /includes
   redirectTestOutputToFiletrue/redirectTestOutputToFile
   reuseForksfalse/reuseForks
   failIfNoTestsfalse/failIfNoTests




svn commit: r1596693 - /hive/trunk/ql/pom.xml

2014-05-21 Thread cws
Author: cws
Date: Wed May 21 21:17:52 2014
New Revision: 1596693

URL: http://svn.apache.org/r1596693
Log:
HIVE-7066: hive-exec jar is missing avro core (David Chen via cws)

Modified:
hive/trunk/ql/pom.xml

Modified: hive/trunk/ql/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/pom.xml?rev=1596693r1=1596692r2=1596693view=diff
==
--- hive/trunk/ql/pom.xml (original)
+++ hive/trunk/ql/pom.xml Wed May 21 21:17:52 2014
@@ -498,6 +498,7 @@
   includeorg.apache.thrift:libthrift/include
   includecommons-lang:commons-lang/include
   includeorg.json:json/include
+  includeorg.apache.avro:avro/include
   includeorg.apache.avro:avro-mapred/include
   includeorg.apache.hive.shims:hive-shims-0.20/include
   includeorg.apache.hive.shims:hive-shims-0.20S/include




svn commit: r1554993 - in /hive/trunk/metastore/scripts/upgrade: mysql/ oracle/ postgres/

2014-01-02 Thread cws
Author: cws
Date: Fri Jan  3 01:50:29 2014
New Revision: 1554993

URL: http://svn.apache.org/r1554993
Log:
HIVE-5911: Recent change to schema upgrade scripts breaks file naming 
conventions (Sergey Shelukhin via cws)

Added:
hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql
hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql
hive/trunk/metastore/scripts/upgrade/postgres/015-HIVE-5700.postgres.sql
Modified:
hive/trunk/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql

hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql

hive/trunk/metastore/scripts/upgrade/oracle/upgrade-0.12.0-to-0.13.0.oracle.sql
hive/trunk/metastore/scripts/upgrade/postgres/011-HIVE-3649.postgres.sql
hive/trunk/metastore/scripts/upgrade/postgres/014-HIVE-3764.postgres.sql

hive/trunk/metastore/scripts/upgrade/postgres/upgrade-0.12.0-to-0.13.0.postgres.sql

Modified: hive/trunk/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql?rev=1554993r1=1554992r2=1554993view=diff
==
--- hive/trunk/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql 
(original)
+++ hive/trunk/metastore/scripts/upgrade/mysql/014-HIVE-3764.mysql.sql Fri Jan  
3 01:50:29 2014
@@ -1,3 +1,5 @@
+SELECT ' HIVE-3764 Support metastore version consistency check ' AS ' ';
+
 -- Table structure for VERSION
 CREATE TABLE IF NOT EXISTS `VERSION` (
   `VER_ID` BIGINT NOT NULL,

Added: hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql?rev=1554993view=auto
==
--- hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql (added)
+++ hive/trunk/metastore/scripts/upgrade/mysql/015-HIVE-5700.mysql.sql Fri Jan  
3 01:50:29 2014
@@ -0,0 +1,11 @@
+SELECT ' HIVE-5700 enforce single date format for partition column storage ' 
AS ' ';
+
+-- Normalize the date partition column values as best we can. No schema 
changes.
+
+
+UPDATE PARTITION_KEY_VALS
+  INNER JOIN PARTITIONS ON PARTITION_KEY_VALS.PART_ID = PARTITIONS.PART_ID
+  INNER JOIN PARTITION_KEYS ON PARTITION_KEYS.TBL_ID = PARTITIONS.TBL_ID
+AND PARTITION_KEYS.INTEGER_IDX = PARTITION_KEY_VALS.INTEGER_IDX
+AND PARTITION_KEYS.PKEY_TYPE = 'date'
+SET PART_KEY_VAL = IFNULL(DATE_FORMAT(cast(PART_KEY_VAL as date),'%Y-%m-%d'), 
PART_KEY_VAL);

Modified: 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql?rev=1554993r1=1554992r2=1554993view=diff
==
--- 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql 
(original)
+++ 
hive/trunk/metastore/scripts/upgrade/mysql/upgrade-0.12.0-to-0.13.0.mysql.sql 
Fri Jan  3 01:50:29 2014
@@ -1,11 +1,6 @@
 SELECT 'Upgrading MetaStore schema from 0.12.0 to 0.13.0' AS ' ';
 
-UPDATE PARTITION_KEY_VALS
-  INNER JOIN PARTITIONS ON PARTITION_KEY_VALS.PART_ID = PARTITIONS.PART_ID
-  INNER JOIN PARTITION_KEYS ON PARTITION_KEYS.TBL_ID = PARTITIONS.TBL_ID
-AND PARTITION_KEYS.INTEGER_IDX = PARTITION_KEY_VALS.INTEGER_IDX
-AND PARTITION_KEYS.PKEY_TYPE = 'date'
-SET PART_KEY_VAL = IFNULL(DATE_FORMAT(cast(PART_KEY_VAL as date),'%Y-%m-%d'), 
PART_KEY_VAL);
+SOURCE 015-HIVE-5700.mysql.sql;
 
 UPDATE VERSION SET SCHEMA_VERSION='0.13.0', VERSION_COMMENT='Hive release 
version 0.13.0' where VER_ID=1;
 SELECT 'Finished upgrading MetaStore schema from 0.12.0 to 0.13.0' AS ' ';

Added: hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql?rev=1554993view=auto
==
--- hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql (added)
+++ hive/trunk/metastore/scripts/upgrade/oracle/015-HIVE-5700.oracle.sql Fri 
Jan  3 01:50:29 2014
@@ -0,0 +1,26 @@
+-- Normalize the date partition column values as best we can. No schema 
changes.
+
+CREATE FUNCTION hive13_to_date(date_str IN VARCHAR2)
+RETURN DATE
+IS dt DATE;
+BEGIN
+  dt := TO_DATE(date_str, '-MM-DD');
+  RETURN dt;
+EXCEPTION
+  WHEN others THEN RETURN null;
+END;
+/
+
+MERGE INTO PARTITION_KEY_VALS
+USING (
+  SELECT SRC.PART_ID as IPART_ID, SRC.INTEGER_IDX as IINTEGER_IDX, 
+ NVL(TO_CHAR(hive13_to_date(PART_KEY_VAL),'-MM-DD'), PART_KEY_VAL) as 
NORM
+  FROM PARTITION_KEY_VALS SRC
+INNER JOIN PARTITIONS ON SRC.PART_ID = PARTITIONS.PART_ID
+INNER JOIN PARTITION_KEYS ON PARTITION_KEYS.TBL_ID = PARTITIONS.TBL_ID

svn commit: r1481161 - /hive/trunk/build.xml

2013-05-10 Thread cws
Author: cws
Date: Fri May 10 19:47:10 2013
New Revision: 1481161

URL: http://svn.apache.org/r1481161
Log:
HIVE-4530. Enforce minmum ant version required in build script (Arup Malakar 
via cws)

Modified:
hive/trunk/build.xml

Modified: hive/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1481161r1=1481160r2=1481161view=diff
==
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Fri May 10 19:47:10 2013
@@ -59,6 +59,14 @@
   property name=rat.build.dir location=${build.dir.hive}/rat/
   property name=md5sum.format value={0}  {1}/
 
+  !-- Check minimum ant version required --
+  fail message=Please use ant version 1.8.0 or greater for building hive.
+condition
+  not
+antversion atleast=1.8.0/
+  /not
+/condition
+  /fail
 
   condition property=is-offline value=true else=false
 isset property=offline/




svn commit: r1479677 - /hive/trunk/build.properties

2013-05-06 Thread cws
Author: cws
Date: Mon May  6 18:59:43 2013
New Revision: 1479677

URL: http://svn.apache.org/r1479677
Log:
HIVE-4497. beeline module tests don't get run by default (Thejas Nair via cws)

Modified:
hive/trunk/build.properties

Modified: hive/trunk/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1479677r1=1479676r2=1479677view=diff
==
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Mon May  6 18:59:43 2013
@@ -75,7 +75,7 @@ common.jar=${hadoop.root}/lib/commons-ht
 # module names needed for build process
 
iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
 
iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,testutils,hcatalog
-iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,odbc,serde,service,hcatalog
+iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,beeline,metastore,odbc,serde,service,hcatalog
 iterate.hive.thrift=ql,service,metastore,serde
 iterate.hive.protobuf=ql
 iterate.hive.cpp=odbc




svn commit: r1479685 - /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java

2013-05-06 Thread cws
Author: cws
Date: Mon May  6 19:24:34 2013
New Revision: 1479685

URL: http://svn.apache.org/r1479685
Log:
HIVE-3957. Add pseudo-BNF grammar for RCFile to Javadoc (Mark Grover via cws)

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java?rev=1479685r1=1479684r2=1479685view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/RCFile.java Mon May  6 
19:24:34 2013
@@ -140,7 +140,200 @@ import org.apache.hadoop.util.Reflection
  * /ul
  * /li
  * /ul
+ * p
+ * pre
+ * {@code
+ * The following is a pseudo-BNF grammar for RCFile. Comments are prefixed
+ * with dashes:
  *
+ * rcfile ::=
+ *   file-header
+ *   rcfile-rowgroup+
+ *
+ * file-header ::=
+ *   file-version-header
+ *   file-key-class-name  (only exists if version is seq6)
+ *   file-value-class-name(only exists if version is seq6)
+ *   file-is-compressed
+ *   file-is-block-compressed (only exists if version is seq6)
+ *   [file-compression-codec-class]
+ *   file-header-metadata
+ *   file-sync-field
+ *
+ * -- The normative RCFile implementation included with Hive is actually
+ * -- based on a modified version of Hadoop's SequenceFile code. Some
+ * -- things which should have been modified were not, including the code
+ * -- that writes out the file version header. Consequently, RCFile and
+ * -- SequenceFile originally shared the same version header.  A newer
+ * -- release has created a unique version string.
+ *
+ * file-version-header ::= Byte[4] {'S', 'E', 'Q', 6}
+ * |   Byte[4] {'R', 'C', 'F', 1}
+ *
+ * -- The name of the Java class responsible for reading the key buffer
+ * -- component of the rowgroup.
+ *
+ * file-key-class-name ::=
+ *   Text {org.apache.hadoop.hive.ql.io.RCFile$KeyBuffer}
+ *
+ * -- The name of the Java class responsible for reading the value buffer
+ * -- component of the rowgroup.
+ *
+ * file-value-class-name ::=
+ *   Text {org.apache.hadoop.hive.ql.io.RCFile$ValueBuffer}
+ *
+ * -- Boolean variable indicating whether or not the file uses compression
+ * -- for the key and column buffer sections.
+ *
+ * file-is-compressed ::= Byte[1]
+ *
+ * -- A boolean field indicating whether or not the file is block compressed.
+ * -- This field is *always* false. According to comments in the original
+ * -- RCFile implementation this field was retained for backwards
+ * -- compatability with the SequenceFile format.
+ *
+ * file-is-block-compressed ::= Byte[1] {false}
+ *
+ * -- The Java class name of the compression codec iff file-is-compressed
+ * -- is true. The named class must implement
+ * -- org.apache.hadoop.io.compress.CompressionCodec.
+ * -- The expected value is org.apache.hadoop.io.compress.GzipCodec.
+ *
+ * file-compression-codec-class ::= Text
+ *
+ * -- A collection of key-value pairs defining metadata values for the
+ * -- file. The Map is serialized using standard JDK serialization, i.e.
+ * -- an Int corresponding to the number of key-value pairs, followed by
+ * -- Text key and value pairs. The following metadata properties are
+ * -- mandatory for all RCFiles:
+ * --
+ * -- hive.io.rcfile.column.number: the number of columns in the RCFile
+ *
+ * file-header-metadata ::= MapText, Text
+ *
+ * -- A 16 byte marker that is generated by the writer. This marker appears
+ * -- at regular intervals at the beginning of rowgroup-headers, and is
+ * -- intended to enable readers to skip over corrupted rowgroups.
+ *
+ * file-sync-hash ::= Byte[16]
+ *
+ * -- Each row group is split into three sections: a header, a set of
+ * -- key buffers, and a set of column buffers. The header section includes
+ * -- an optional sync hash, information about the size of the row group, and
+ * -- the total number of rows in the row group. Each key buffer
+ * -- consists of run-length encoding data which is used to decode
+ * -- the length and offsets of individual fields in the corresponding column
+ * -- buffer.
+ *
+ * rcfile-rowgroup ::=
+ *   rowgroup-header
+ *   rowgroup-key-data
+ *   rowgroup-column-buffers
+ *
+ * rowgroup-header ::=
+ *   [rowgroup-sync-marker, rowgroup-sync-hash]
+ *   rowgroup-record-length
+ *   rowgroup-key-length
+ *   rowgroup-compressed-key-length
+ *
+ * -- rowgroup-key-data is compressed if the column data is compressed.
+ * rowgroup-key-data ::=
+ *   rowgroup-num-rows
+ *   rowgroup-key-buffers
+ *
+ * -- An integer (always -1) signaling the beginning of a sync-hash
+ * -- field.
+ *
+ * rowgroup-sync-marker ::= Int
+ *
+ * -- A 16 byte sync field. This must match the file-sync-hash value read
+ * -- in the file header.
+ *
+ * rowgroup-sync-hash ::= Byte[16

svn commit: r1464257 - in /hive/site/publish: credits.html credits.pdf index.html irc.html issue_tracking.html linkmap.html linkmap.pdf mailing_lists.html releases.html version_control.html

2013-04-03 Thread cws
Author: cws
Date: Thu Apr  4 02:39:42 2013
New Revision: 1464257

URL: http://svn.apache.org/r1464257
Log:
Publish changes made in last several commits (cws)

Modified:
hive/site/publish/credits.html
hive/site/publish/credits.pdf
hive/site/publish/index.html
hive/site/publish/irc.html
hive/site/publish/issue_tracking.html
hive/site/publish/linkmap.html
hive/site/publish/linkmap.pdf
hive/site/publish/mailing_lists.html
hive/site/publish/releases.html
hive/site/publish/version_control.html

Modified: hive/site/publish/credits.html
URL: 
http://svn.apache.org/viewvc/hive/site/publish/credits.html?rev=1464257r1=1464256r2=1464257view=diff
==
--- hive/site/publish/credits.html (original)
+++ hive/site/publish/credits.html Thu Apr  4 02:39:42 2013
@@ -80,6 +80,12 @@
 li
 a class=unselected 
href=http://cwiki.apache.org/confluence/display/Hive/Home;Wiki/a
 /li
+li
+a class=unselected href=hcatalog/index.htmlHCatalog/a
+/li
+li
+a class=unselected 
href=https://cwiki.apache.org/confluence/display/HCATALOG;HCatalog Wiki/a
+/li
 /ul
 !--+
 |end Tabs
@@ -121,6 +127,9 @@ document.write(Last Published:  + docu
 div class=menuitem
 a href=releases.html#NewsNews/a
 /div
+div class=menuitem
+a href=hcatalog/releases.htmlOld HCatalog Releases/a
+/div
 div class=menupage
 div class=menupagetitleCredits/div
 /div
@@ -166,6 +175,9 @@ document.write(Last Published:  + docu
 div class=menuitem
 a href=http://hive.apache.org/docs/r0.3.0/;Release 0.3.0/a
 /div
+div class=menuitem
+a href=http://hive.apache.org/docs/hcat_r0.5.0/;HCatalog Release 0.5.0/a
+/div
 /div
 div onclick=SwitchMenu('menu_1.3', 'skin/') id=menu_1.3Title 
class=menutitleDevelopers/div
 div id=menu_1.3 class=menuitemgroup
@@ -217,6 +229,9 @@ document.write(Last Published:  + docu
 a href=#Hive+CommittersHive Committers/a
 /li
 li
+a href=#HCatalog+CommittersHCatalog Committers/a
+/li
+li
 a href=#ContributorsContributors/a
 /li
 /ul
@@ -378,6 +393,15 @@ document.write(Last Published:  + docu
   
 tr
 
+td colspan=1 rowspan=1gangtimliu/td !-- username --
+td colspan=1 rowspan=1Gang Tim Liu/td !-- name --
+td colspan=1 rowspan=1Facebook/td !-- organization --
+  
+/tr
+
+  
+tr
+
 td colspan=1 rowspan=1kevinwilfong/td !-- username --
 td colspan=1 rowspan=1Kevin Wilfong/td !-- name --
 td colspan=1 rowspan=1Facebook/td !-- organization --
@@ -404,9 +428,88 @@ document.write(Last Published:  + docu
 
 /table
 /div
+
+  
+a name=N101EB/aa name=HCatalog+Committers/a
+h2 class=h3HCatalog Committers/h2
+div class=section
+table class=ForrestTable cellspacing=1 cellpadding=4
+  
+tr
+
+th colspan=1 rowspan=1username/th
+th colspan=1 rowspan=1name/th
+th colspan=1 rowspan=1organization/th
+  
+/tr
+  
+tr
+
+td colspan=1 rowspan=1gates/td
+td colspan=1 rowspan=1Alan Gates/td
+td colspan=1 rowspan=1Hortonworks/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1khorgath/td
+td colspan=1 rowspan=1Sushanth Sowmyan/td
+td colspan=1 rowspan=1Hortonworks/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1toffer/td
+td colspan=1 rowspan=1Francis Christopher Liu/td
+td colspan=1 rowspan=1Yahoo!/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1daijy/td
+td colspan=1 rowspan=1Daniel Dai/td
+td colspan=1 rowspan=1Hortonworks/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1avandana/td
+td colspan=1 rowspan=1Vandana Ayyalasomayajula/td
+td colspan=1 rowspan=1Yahoo!/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1travis/td
+td colspan=1 rowspan=1Travis Crawford/td
+td colspan=1 rowspan=1Twitter/td
+  
+/tr
+
+  
+tr
+
+td colspan=1 rowspan=1mithun/td
+td colspan=1 rowspan=1Mithun Radhakrishnan/td
+td colspan=1 rowspan=1Yahoo!/td
+  
+/tr
+
+/table
+/div
 
   
-a name=N101D3/aa name=Contributors/a
+a name=N10285/aa name=Contributors/a
 h2 class=h3Contributors/h2
 div class=section
 pA list of Hive contributors and their contributions is available from

Modified: hive/site/publish/credits.pdf
URL: 
http://svn.apache.org/viewvc/hive/site/publish/credits.pdf?rev=1464257r1=1464256r2=1464257view=diff
==
--- hive/site/publish/credits.pdf (original)
+++ hive/site/publish/credits.pdf Thu Apr  4 02:39:42 2013
@@ -5,10 +5,10 @@
 /Producer (FOP 0.20.5) 
 endobj
 5 0 obj
- /Length 596 /Filter [ /ASCII85Decode /FlateDecode ]
+ /Length 626 /Filter [ /ASCII85Decode /FlateDecode ]
  
 stream
-Gaua;9lK#F;KZL'lsMWlhV9MAcilMfUWOmL2`'?^(;o`U[s*bUpLpLh(Qea!DAB*XgIO=bM8YYMg3M41A0'XkZ15O+p,9g$kA/Lh@m0dJdW(sKf4bR`Q`q@D\ATaHPEX9E\Z`stOB/H\+J_8UN=H;P!@9d0pK.\pIeP_W`8k'0pMDO'pZI$A,cMk`N*r`/?`LXjH+$TJ(Qk$sITeF

svn commit: r1459014 - in /hive/trunk: ivy/libraries.properties metastore/ivy.xml

2013-03-20 Thread cws
Author: cws
Date: Wed Mar 20 19:36:58 2013
New Revision: 1459014

URL: http://svn.apache.org/r1459014
Log:
HIVE-4187. QL build-grammar target fails after HIVE-4148 (Gunther Hagleitner 
via cws)

Modified:
hive/trunk/ivy/libraries.properties
hive/trunk/metastore/ivy.xml

Modified: hive/trunk/ivy/libraries.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/ivy/libraries.properties?rev=1459014r1=1459013r2=1459014view=diff
==
--- hive/trunk/ivy/libraries.properties (original)
+++ hive/trunk/ivy/libraries.properties Wed Mar 20 19:36:58 2013
@@ -65,6 +65,8 @@ sqlline.version=1_0_2
 sqlline.branch=1.0.2
 slf4j-api.version=1.6.1
 slf4j-log4j12.version=1.6.1
+ST4.version=4.0.4
+stringtemplate.version=3.2.1
 tempus-fugit.version=1.1
 snappy.version=0.2
 velocity.version=1.5

Modified: hive/trunk/metastore/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/ivy.xml?rev=1459014r1=1459013r2=1459014view=diff
==
--- hive/trunk/metastore/ivy.xml (original)
+++ hive/trunk/metastore/ivy.xml Wed Mar 20 19:36:58 2013
@@ -26,10 +26,12 @@
 include file=${ivy.conf.dir}/common-configurations.xml/
   /configurations
   dependencies
+dependency org=org.antlr name=antlr rev=${antlr.version} 
transitive=false/
+dependency org=org.antlr name=antlr-runtime 
rev=${antlr-runtime.version} transitive=false/
+dependency org=org.antlr name=ST4 rev=${ST4.version} 
transitive=false/!-- manually added (antlr dep), bad POM --
+dependency org=org.antlr name=stringtemplate 
rev=${stringtemplate.version} transitive=false/!-- manually added (antlr 
dep), bad POM --
 dependency org=org.apache.hive name=hive-serde rev=${version}
 conf=compile-default /
-dependency org=org.antlr name=antlr rev=${antlr.version}/
-dependency org=org.antlr name=antlr-runtime 
rev=${antlr-runtime.version}/
 dependency org=commons-dbcp name=commons-dbcp 
rev=${commons-dbcp.version}
   exclude module=commons-pool /
   exclude org=org.apache.geronimo.specs module=geronimo-jta_1.1_spec/




svn commit: r1457171 - in /hive/trunk: ./ common/src/gen/ eclipse-templates/ hcatalog/ hcatalog/src/ hcatalog/src/java/ hcatalog/src/java/org/ hcatalog/src/java/org/apache/ hcatalog/src/java/org/apach

2013-03-15 Thread cws
Author: cws
Date: Sat Mar 16 00:11:43 2013
New Revision: 1457171

URL: http://svn.apache.org/r1457171
Log:
HIVE-4145. Create hcatalog stub directory and add it to the build (Carl 
Steinbach via cws)

Added:
hive/trunk/hcatalog/
hive/trunk/hcatalog/build.xml
hive/trunk/hcatalog/ivy.xml
hive/trunk/hcatalog/src/
hive/trunk/hcatalog/src/java/
hive/trunk/hcatalog/src/java/org/
hive/trunk/hcatalog/src/java/org/apache/
hive/trunk/hcatalog/src/java/org/apache/hive/
hive/trunk/hcatalog/src/java/org/apache/hive/hcatalog/
hive/trunk/hcatalog/src/java/org/apache/hive/hcatalog/package-info.java
hive/trunk/hcatalog/src/test/
hive/trunk/hcatalog/src/test/.gitignore
Removed:
hive/trunk/common/src/gen/
Modified:
hive/trunk/.gitignore
hive/trunk/build-common.xml
hive/trunk/build.properties
hive/trunk/build.xml
hive/trunk/eclipse-templates/.classpath

Modified: hive/trunk/.gitignore
URL: 
http://svn.apache.org/viewvc/hive/trunk/.gitignore?rev=1457171r1=1457170r2=1457171view=diff
==
--- hive/trunk/.gitignore (original)
+++ hive/trunk/.gitignore Sat Mar 16 00:11:43 2013
@@ -8,3 +8,4 @@ build-eclipse
 *.launch
 *~
 metastore_db
+common/src/gen

Modified: hive/trunk/build-common.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1457171r1=1457170r2=1457171view=diff
==
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Sat Mar 16 00:11:43 2013
@@ -86,6 +86,7 @@
   pathelement location=${build.dir.hive}/hwi/test/classes/
   pathelement location=${build.dir.hive}/jdbc/test/classes/
   pathelement location=${build.dir.hive}/metastore/test/classes/
+  pathelement location=${build.dir.hive}/hcatalog/test/classes/
   pathelement location=${build.dir.hive}/ql/test/classes/
   pathelement location=${build.dir.hive}/serde/test/classes/
   pathelement location=${build.dir.hive}/service/test/classes/
@@ -197,6 +198,7 @@
 pathelement location=${build.dir.hive}/common/classes/
 pathelement location=${build.dir.hive}/serde/classes/
 pathelement location=${build.dir.hive}/metastore/classes/
+pathelement location=${build.dir.hive}/hcatalog/classes/
 pathelement location=${build.dir.hive}/ql/classes/
 pathelement location=${build.dir.hive}/beeline/classes/
 pathelement location=${build.dir.hive}/cli/classes/

Modified: hive/trunk/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1457171r1=1457170r2=1457171view=diff
==
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Sat Mar 16 00:11:43 2013
@@ -72,10 +72,10 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ex
 common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
 
 # module names needed for build process
-iterate.hive.all=ant,shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
-iterate.hive.modules=shims,common,serde,metastore,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
-iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,odbc,serde,service
-iterate.hive.thrift=ql,service,metastore,serde
+iterate.hive.all=ant,shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
+iterate.hive.modules=shims,common,serde,metastore,hcatalog,ql,contrib,service,cli,jdbc,beeline,hwi,hbase-handler,pdk,builtins,testutils
+iterate.hive.tests=ql,contrib,hbase-handler,hwi,jdbc,metastore,hcatalog,odbc,serde,service
+iterate.hive.thrift=ql,service,metastore,hcatalog,serde
 iterate.hive.protobuf=ql
 iterate.hive.cpp=odbc
 

Modified: hive/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1457171r1=1457170r2=1457171view=diff
==
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Sat Mar 16 00:11:43 2013
@@ -136,6 +136,7 @@
 pathelement location=${build.dir.hive}/common/classes/
 pathelement location=${build.dir.hive}/serde/classes/
 pathelement location=${build.dir.hive}/metastore/classes/
+pathelement location=${build.dir.hive}/hcatalog/classes/
 pathelement location=${build.dir.hive}/ql/classes/
 pathelement location=${build.dir.hive}/cli/classes/
 pathelement location=${build.dir.hive}/beeline/classes/
@@ -473,6 +474,7 @@
   fileset file=${build.dir.hive}/common/hive-common-${version}.jar/
   fileset file=${build.dir.hive}/ql/hive-exec-${version}.jar/
   fileset 
file=${build.dir.hive}/metastore/hive-metastore-${version}.jar/
+  fileset file=${build.dir.hive}/hcatalog/hive-hcatalog-${version}.jar/
   fileset file=${build.dir.hive}/hwi/hive-hwi-${version}.war

svn commit: r1443771 - in /hive/site: author/src/documentation/content/xdocs/ publish/

2013-02-07 Thread cws
Author: cws
Date: Thu Feb  7 23:08:18 2013
New Revision: 1443771

URL: http://svn.apache.org/r1443771
Log:
Update credits page (cws)

Modified:
hive/site/author/src/documentation/content/xdocs/credits.xml
hive/site/publish/credits.html
hive/site/publish/credits.pdf
hive/site/publish/index.html
hive/site/publish/index.pdf
hive/site/publish/irc.html
hive/site/publish/irc.pdf
hive/site/publish/issue_tracking.html
hive/site/publish/issue_tracking.pdf
hive/site/publish/linkmap.html
hive/site/publish/linkmap.pdf
hive/site/publish/mailing_lists.html
hive/site/publish/mailing_lists.pdf
hive/site/publish/releases.html
hive/site/publish/releases.pdf
hive/site/publish/version_control.html
hive/site/publish/version_control.pdf

Modified: hive/site/author/src/documentation/content/xdocs/credits.xml
URL: 
http://svn.apache.org/viewvc/hive/site/author/src/documentation/content/xdocs/credits.xml?rev=1443771r1=1443770r2=1443771view=diff
==
--- hive/site/author/src/documentation/content/xdocs/credits.xml (original)
+++ hive/site/author/src/documentation/content/xdocs/credits.xml Thu Feb  7 
23:08:18 2013
@@ -5,166 +5,144 @@
 
 document 
 
-header
-  titleHive credits/title 
-/header 
-
-body
-
-section
-titleCommitters/title
-
-pHive's active committers are:/p
-
-table
-
-  tr
-thusername/th
-thname/th
-thorganization/th
-throles/th
-thtimezone/th
-  /tr
-
-  tr
-tdathusoo/td
-tdAshish Thusoo/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-
-  tr
-tdzshao/td
-tdZheng Shao/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-
-  tr
-tddhruba/td
-tdDhruba Borthakur/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdjohan/td
-tda href=http://blog.oskarsson.nu;Johan Oskarsson/a/td
-tdLast.fm/td
-td/td
-td0/td
-  /tr
-  
-  tr
-tdnamit/td
-tdNamit Jain/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdprasadc/td
-tdPrasad Chakka/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdjsensarma/td
-tdJoydeep Sen Sarma/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdrmurthy/td
-tdRaghotam Murthy/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdhe yongqiang/td
-tdHe Yongqiang/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdappodictic/td
-tdEdward Capriolo/td
-td/td
-td/td
-td-5/td
-  /tr
-
-  tr
-tdnzhang/td
-tdNing Zhang/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdjvs/td
-tdJohn Sichi/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdpauly/td
-tdPaul Yang/td
-tdFacebook/td
-td/td
-td-8/td
-  /tr
-  
-  tr
-tdamareshwari/td
-tdAmareshwari Sriramadasu/td
-tdInMobi/td
-td/td
-td+5.5/td
-  /tr
-
-  tr
-tdcws/td
-tdCarl Steinbach/td
-tdCloudera/td
-td/td
-td-8/td
-  /tr
-  
- tr
-tdhashutosh/td
-tda href=http://people.apache.org/~hashutosh/;Ashutosh 
Chauhan/a/td
-tdHortonworks/td
-td/td
-td-8/td
- /tr
-  
-/table
-/section
-
-section
-titleContributors/title
-
-pA list of Hive contributors and their contributions is available from
-a 
href=https://issues.apache.org/jira/secure/ConfigureReport.jspa?versionId=-1amp;selectedProjectId=12310843amp;reportKey=com.sourcelabs.jira.plugin.report.contributions%3Acontributionreportamp;Next=Next;Jira/a
-/p
+  header
+titleHive credits/title 
+  /header 
+
+  body
+
+section
+
+titleHive PMC/title
+table
+  tr
+thusername/th
+thname/th
+thorganization/th
+throles/th
+  /tr
+
+  tr
+tdathusoo/td !-- username --
+tdAshish Thusoo/td !-- name --
+tdQubole/td !-- organization --
+td/td !-- roles --
+  /tr
+
+  tr
+tdcws/td !-- username --
+tdCarl Steinbach/td !-- name --
+tdCitus Data/td !-- organization --
+tdVP/td !-- roles --
+  /tr
+
+  tr
+tdecapriolo/td !-- username --
+tdEdward Capriolo/td !-- name --
+td/td !-- organization --
+td/td !-- roles --
+  /tr
+
+  tr
+tdhashutosh/td !-- username --
+tdAshutosh Chauhan/td !-- name --
+tdHortonWorks/td !-- organization --
+td/td !-- roles --
+  /tr
+
+  tr
+tdheyongqiang/td !-- username --
+tdYongqiang He/td !-- name --
+tdFacebook/td !-- organization --
+td/td !-- roles --
+  /tr
+
+  tr
+tdjssarma/td !-- username --
+tdJoydeep Sensarma/td !-- name --
+tdQubole/td !-- organization --
+td/td !-- roles --
+  /tr
+
+  tr
+tdnamit/td !-- username --
+tdNamit Jain/td !-- name --
+tdFacebook/td !-- organization --
+td/td !-- roles

svn commit: r1443773 - in /hive/site: author/src/documentation/ publish/

2013-02-07 Thread cws
Author: cws
Date: Thu Feb  7 23:12:39 2013
New Revision: 1443773

URL: http://svn.apache.org/r1443773
Log:
Change copyright date to 2013 (cws)

Modified:
hive/site/author/src/documentation/skinconf.xml
hive/site/publish/credits.html
hive/site/publish/credits.pdf
hive/site/publish/index.html
hive/site/publish/index.pdf
hive/site/publish/irc.html
hive/site/publish/irc.pdf
hive/site/publish/issue_tracking.html
hive/site/publish/issue_tracking.pdf
hive/site/publish/linkmap.html
hive/site/publish/linkmap.pdf
hive/site/publish/mailing_lists.html
hive/site/publish/mailing_lists.pdf
hive/site/publish/releases.html
hive/site/publish/releases.pdf
hive/site/publish/version_control.html
hive/site/publish/version_control.pdf

Modified: hive/site/author/src/documentation/skinconf.xml
URL: 
http://svn.apache.org/viewvc/hive/site/author/src/documentation/skinconf.xml?rev=1443773r1=1443772r2=1443773view=diff
==
--- hive/site/author/src/documentation/skinconf.xml (original)
+++ hive/site/author/src/documentation/skinconf.xml Thu Feb  7 23:12:39 2013
@@ -84,7 +84,7 @@ which will be used to configure the chos
   favicon-urlimages/favicon.ico/favicon-url
 
   !-- The following are used to construct a copyright statement --
-  year2010/year
+  year2013/year
   vendorThe Apache Software Foundation./vendor
   copyright-linkhttp://www.apache.org/licenses//copyright-link
 

Modified: hive/site/publish/credits.html
URL: 
http://svn.apache.org/viewvc/hive/site/publish/credits.html?rev=1443773r1=1443772r2=1443773view=diff
==
--- hive/site/publish/credits.html (original)
+++ hive/site/publish/credits.html Thu Feb  7 23:12:39 2013
@@ -433,7 +433,7 @@ document.write(Last Published:  + docu
 /div
 div class=copyright
 Copyright copy;
- 2010 a href=http://www.apache.org/licenses/;The Apache Software 
Foundation./a
+ 2013 a href=http://www.apache.org/licenses/;The Apache Software 
Foundation./a
 br
   Apache Hadoop, Hadoop, HDFS, Avro, Cassandra, Chukwa, HBase, 
   Hive, Mahout, Pig, Zookeeper are trademarks of the Apache 

Modified: hive/site/publish/credits.pdf
URL: 
http://svn.apache.org/viewvc/hive/site/publish/credits.pdf?rev=1443773r1=1443772r2=1443773view=diff
==
--- hive/site/publish/credits.pdf (original)
+++ hive/site/publish/credits.pdf Thu Feb  7 23:12:39 2013
@@ -8,7 +8,7 @@ endobj
  /Length 596 /Filter [ /ASCII85Decode /FlateDecode ]
  
 stream
-Gaua;9lK#F;KZL'lsMWlhV9MAcilMfUWOmL2`'?^(;o`U[s*bUpLpLh(Qea!DAB*XgIO=bM8YYMg3M41A0'XkZ15O+p,9g$kA/Lh@m0dJdW(sKf4bR`Q`q@D\ATaHPEX9E\Z`stOB/H\+J_8UN=H;P!@9d0pK.\pIeP_W`8k'0pMDO'pZI$A,cMk`N*r`/?`LXjH+$TJ(Qk$sITeF$HePLjI;p+NoB_DZg=@lp:;MY3=$ZK$G4)ol?[o;/nW^McpHad@AOHAbB:B\NdNe;\;1+)-I4L2rX7aVi60'q'FsH4@4%G_U!ghe^OAfkaeOo4-Ho/iH?/roIms);a=]2Eqqm='jro#U[QU+nLJqN%Rr1Qf63gDRnr+`S^`s0gTmlT3Ag8Hn?M@^FA/gojc!INhpsH2!S$G=lAu[%T/i*\-GhQE!`pBlm9AIf6KCJ$RR]EkQ-QD/\N!7-61qeiZ;a8-A8L*X`eVk@QmYc%cdBgQ;[a:$$uF6ad;q-Y%='n$VO\4a^mBWcM`qAXL2GrpIX2g_2SQAF+fP+7PLYKOubncsF.E5V~
+Gaua;9lK#F;KZL'lsMWlhV9MAcilMfUWOmL2`'?^(;o`U[s*bUpLpLh(Qea!DAB*XgIO=bM8YYMg3M41A0'XkZ15O+p,9g$kA/Lh@m0dJdW(sKf4bR`Q`q@D\ATaHPEX9E\Z`stOB/H\+J_8UN=H;P!@9d0pK.\pIeP_W`8k'0pMDO'pZI$A,cMk`N*r`/?`LXjH+$TJ(Qk$sITeF$HePLjI;p+NoB_DZg=@lp:;MY3=$ZK$G4)ol?[o;/nW^McpHad@AOHAbB:B\NdNe;\;1+)-I4L2rX7aVi60'q'FsH4@4%G_U!ghe^OAfkaeOo4-Ho/iH?/roIms);a=]2Eqqm='jro#U[QU+nLJqN%Rr1Qf63gDRnr+`S^`s0gTmlT3Ag8Hn?M@UCA/gojc!INhpsH2!S$G=lAu[%T/i*\-GhQE!`pBlm9AIf6KCJ$RR]EkQ-QD/\N!7-61qeiZ;a8-A8L*X`eVk@QmYc%cdBgQ;[a:$$uF6ad;q-Y%='n$VO\4a^mBWcM`qAXL2GrpIX2g_2SQAF+fP+7PLYKOubncsURE5q~
 endstream
 endobj
 6 0 obj
@@ -72,7 +72,7 @@ endobj
  /Length 3023 /Filter [ /ASCII85Decode /FlateDecode ]
  
 stream
-GatV#?$DgI(5D;V/%A:)C'a[qj3hr,]'47(`*VhGg2eEanA);AE.P9Tqs0:I,S4I:p?o:H@A;#tIV-JP)T#3qW-IXeH*R_=cVU'Wb)$?5\$O@AH@R'VmY34RfJJ%d)bDIp7K+1.%g)#lHA;BEaIlVAlMY-u\(KU#Vj%)qN/)*e#J;#PQ:\F6(J6-XT[+?hXRuQq$@*'k[HpcG#WF/K\p@#TO=gYt6AWd``nresVqHO3.\;a3)bGn#^O,m-mL^e=T9'rZR*`Hds0I:Fu9oWc*Op0r6$b2?$mFf3f-D:1%U1h%@bS=D*AKMbn;aZ\OR@V/K,m0(r32BeQBgG9E,1L8MHM3Y/tCAnMcP)Vb`H:=[1t\ERklq/cUrM=`T*3).f6jbd1dL'Ab;;TO/V!+J'a@P2CXrd.G`paJQ#af6VbfUG,%I1AJisJRcqS0jn!MJ_+jot+7)I/1,\1-GR]:DUE@Df9Q32ZH@((7Lb_'0OtfC2,MRHSa_VrKB%j\)bA=^!FX3h-I$XJ.`9b^pml)2@f89WWZ?[G$C+_PTs_i:'Y'@DaZDWMc(hqoa(aBuRd%fI\^@n+ThKHRIL[`d!:O?8JiE(]mY(8YOj1?U8Wsf;YU#+j''Z?=0+n3Ks-RkUKYN?a]BD)#r[3*^nI+'ZG0)MYEq2HHE;dnGCIVVPA1QhK#)O6kp%\0/q[T%!]u;rHIuB8I@4C*9^`7.U4l/uSOfu6fa)aFGX+J:1Z%Fc0aYC90]fe7YqE)4Klmk8NaM_)iZsliQ:EpIQt+=I9\qYH8R2=Ck7pGaZr4Fo0SeIAVgF=4T4@3(0h5+WjpAp(e(6eC,]RhjB@Wq1^Q=Tl2W44QVoFj?]tV)\8s33;s`5EB]aqJckiRS.,6/K0pAnUW1L(3?gi%m_+a8Xq/f4g#(f7!e?:`b+Y8-QV,oE1Y`#N!X6o@
 
UIICD[]As9?))rg^`S+lW9N+__3F$/O)WifhG\Ts..S^*md^(N;'mlNQ,gVOhXlMpuV$XquHiKuYk3A*_Z+_[pC#/AL:M#J4l)5F`]*]1CF

svn commit: r1414590 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/lockmgr/ test/org/apache/hadoop/hive/ql/lockmgr/

2012-11-28 Thread cws
Author: cws
Date: Wed Nov 28 08:53:53 2012
New Revision: 1414590

URL: http://svn.apache.org/viewvc?rev=1414590view=rev
Log:
HIVE-3531 [jira] Simple lock manager for dedicated hive server
(Navis Ryu via Carl Steinbach)

Summary:
DPAL-1906 Implement simple lock manager for hive server

In many cases, we uses hive server as a sole proxy for executing all the 
queries. For that, current default lock manager based on zookeeper seemed a 
little heavy. Simple in-memory lock manager could be enough.

Test Plan: TestDedicatedLockManager

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D5871

Added:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
   (with props)
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestEmbeddedLockManager.java
   (with props)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/HiveLockObject.java

Added: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java?rev=1414590view=auto
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
 (added)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lockmgr/EmbeddedLockManager.java
 Wed Nov 28 08:53:53 2012
@@ -0,0 +1,454 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.lockmgr;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Stack;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.lockmgr.HiveLockObject.HiveLockObjectData;
+import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Partition;
+import org.apache.hadoop.hive.ql.metadata.Table;
+
+/**
+ * shared lock manager for dedicated hive server. all locks are managed in 
memory
+ */
+public class EmbeddedLockManager implements HiveLockManager {
+
+  private static final Log LOG = LogFactory.getLog(EmbeddedHiveLockManager);
+
+  private final Node root = new Node();
+
+  private HiveLockManagerCtx ctx;
+
+  private int sleepTime = 1000;
+  private int numRetriesForLock = 0;
+  private int numRetriesForUnLock = 0;
+
+  public EmbeddedLockManager() {
+  }
+
+  public void setContext(HiveLockManagerCtx ctx) throws LockException {
+this.ctx = ctx;
+refresh();
+  }
+
+  public HiveLock lock(HiveLockObject key, HiveLockMode mode, boolean 
keepAlive)
+  throws LockException {
+return lock(key, mode, numRetriesForLock, sleepTime);
+  }
+
+  public ListHiveLock lock(ListHiveLockObj objs, boolean keepAlive) throws 
LockException {
+return lock(objs, numRetriesForLock, sleepTime);
+  }
+
+  public void unlock(HiveLock hiveLock) throws LockException {
+unlock(hiveLock, numRetriesForUnLock, sleepTime);
+  }
+
+  public void releaseLocks(ListHiveLock hiveLocks) {
+releaseLocks(hiveLocks, numRetriesForUnLock, sleepTime);
+  }
+
+  public ListHiveLock getLocks(boolean verifyTablePartitions, boolean 
fetchData)
+  throws LockException {
+return getLocks(verifyTablePartitions, fetchData, ctx.getConf());
+  }
+
+  public ListHiveLock getLocks(HiveLockObject key, boolean 
verifyTablePartitions,
+  boolean fetchData) throws LockException {
+return getLocks(key, verifyTablePartitions, fetchData, ctx.getConf());
+  }
+
+  public void prepareRetry() {
+  }
+
+  public void refresh() {
+HiveConf conf = ctx.getConf();
+sleepTime = 
conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_SLEEP_BETWEEN_RETRIES) * 1000;
+numRetriesForLock = conf.getIntVar(HiveConf.ConfVars.HIVE_LOCK_NUMRETRIES

svn commit: r1410559 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java test/queries/clientpositive/inse

2012-11-16 Thread cws
Author: cws
Date: Fri Nov 16 20:29:00 2012
New Revision: 1410559

URL: http://svn.apache.org/viewvc?rev=1410559view=rev
Log:
HIVE-3676 [jira] INSERT INTO regression caused by HIVE-3465
(Navis Ryu via Carl Steinbach)

Summary: DPAL-1931 INSERT INTO regression caused by HIVE-3465

Test Plan: EMPTY

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D6741

Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/test/queries/clientpositive/insert1.q
hive/trunk/ql/src/test/results/clientpositive/insert1.q.out

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1410559r1=1410558r2=1410559view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
 Fri Nov 16 20:29:00 2012
@@ -57,6 +57,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
@@ -322,28 +323,30 @@ public abstract class BaseSemanticAnalyz
   }
 
   /**
-   * Get the name from a table node.
-   * @param tableNameNode the table node
-   * @return if DB name is give, db.tab is returned. Otherwise, tab.
-   */
-  public static String getUnescapedName(ASTNode tableNameNode) {
-return getUnescapedName(tableNameNode, false);
+   * Get dequoted name from a table/column node.
+   * @param tableOrColumnNode the table or column node
+   * @return for table node, db.tab or tab. for column node column.
+   */
+  public static String getUnescapedName(ASTNode tableOrColumnNode) {
+return getUnescapedName(tableOrColumnNode, null);
   }
 
-  public static String getUnescapedName(ASTNode tableNameNode, boolean 
prependDefaultDB) {
-if (tableNameNode.getToken().getType() == HiveParser.TOK_TABNAME) {
-  if (tableNameNode.getChildCount() == 2) {
-String dbName = 
unescapeIdentifier(tableNameNode.getChild(0).getText());
-String tableName = 
unescapeIdentifier(tableNameNode.getChild(1).getText());
+  public static String getUnescapedName(ASTNode tableOrColumnNode, String 
currentDatabase) {
+if (tableOrColumnNode.getToken().getType() == HiveParser.TOK_TABNAME) {
+  // table node
+  if (tableOrColumnNode.getChildCount() == 2) {
+String dbName = 
unescapeIdentifier(tableOrColumnNode.getChild(0).getText());
+String tableName = 
unescapeIdentifier(tableOrColumnNode.getChild(1).getText());
 return dbName + . + tableName;
   }
-  String tableName = 
unescapeIdentifier(tableNameNode.getChild(0).getText());
-  if (prependDefaultDB) {
-return MetaStoreUtils.DEFAULT_DATABASE_NAME + . + tableName;
+  String tableName = 
unescapeIdentifier(tableOrColumnNode.getChild(0).getText());
+  if (currentDatabase != null) {
+return currentDatabase + . + tableName;
   }
   return tableName;
 }
-return unescapeIdentifier(tableNameNode.getText());
+// column node
+return unescapeIdentifier(tableOrColumnNode.getText());
   }
 
   /**

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1410559r1=1410558r2=1410559view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
Fri Nov 16 20:29:00 2012
@@ -704,7 +704,8 @@ public class SemanticAnalyzer extends Ba
 break;
 
   case HiveParser.TOK_INSERT_INTO:
-String tab_name = 
getUnescapedName((ASTNode)ast.getChild(0).getChild(0), true);
+String currentDatabase = db.getCurrentDatabase();
+String tab_name = 
getUnescapedName((ASTNode)ast.getChild(0).getChild(0), currentDatabase);
 qbp.addInsertIntoTable(tab_name);
 
   case HiveParser.TOK_DESTINATION:
@@ -946,7 +947,7 @@ public class SemanticAnalyzer extends Ba
 }
 
 // Disallow INSERT INTO on bucketized tables
-if(qb.getParseInfo().isInsertIntoTable(tab.getDbName(), tab_name) 
+if(qb.getParseInfo

svn commit: r1407312 - /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java

2012-11-08 Thread cws
Author: cws
Date: Thu Nov  8 23:12:15 2012
New Revision: 1407312

URL: http://svn.apache.org/viewvc?rev=1407312view=rev
Log:
HIVE-3651. bucketmapjoin?.q tests fail with hadoop 0.23 (Prasad Mujumdar via 
cws)

Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java?rev=1407312r1=1407311r2=1407312view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/BucketMapJoinContext.java 
Thu Nov  8 23:12:15 2012
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.ql.plan;
 
 import java.io.Serializable;
+import java.net.URI;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
@@ -199,7 +200,7 @@ public class BucketMapJoinContext implem
   private String prependPartSpec(String inputPath, String fileName) {
 MapString, String mapping = inputToPartSpecMapping == null ?
 inputToPartSpecMapping = revert(bigTablePartSpecToFileMapping) : 
inputToPartSpecMapping;
-String partSpec = mapping.get(inputPath);
+String partSpec = mapping.get(URI.create(inputPath).getPath());
 return partSpec == null || partSpec.isEmpty() ? fileName :
   ( + FileUtils.escapePathName(partSpec) + ) + fileName;
   }
@@ -210,7 +211,7 @@ public class BucketMapJoinContext implem
 for (Map.EntryString, ListString entry : mapping.entrySet()) {
   String partSpec = entry.getKey();
   for (String file : entry.getValue()) {
-converted.put(file, partSpec);
+converted.put(URI.create(file).getPath(), partSpec);
   }
 }
 return converted;




svn commit: r1406325 - in /hive/trunk/ql/src/test/results/compiler/plan: groupby1.q.xml groupby2.q.xml groupby3.q.xml groupby4.q.xml groupby5.q.xml groupby6.q.xml

2012-11-06 Thread cws
Author: cws
Date: Tue Nov  6 21:02:54 2012
New Revision: 1406325

URL: http://svn.apache.org/viewvc?rev=1406325view=rev
Log:
HIVE-3674. Test case TestParse broken after recent checkin (Sambavi 
Muthukrishnan via cws)

Modified:
hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby5.q.xml
hive/trunk/ql/src/test/results/compiler/plan/groupby6.q.xml

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml?rev=1406325r1=1406324r2=1406325view=diff
==
--- hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby1.q.xml Tue Nov  6 
21:02:54 2012
@@ -674,6 +674,9 @@
   void method=add 
stringCNTR_NAME_GBY_2_FATAL_ERROR/string 
   /void 
+  void method=add 
+   stringCNTR_NAME_GBY_2_COUNT_HASH_OUT/string 
+  /void 
  /object 
 /void 
 void property=operatorId 
@@ -1504,6 +1507,9 @@
 void method=add 
  stringCNTR_NAME_GBY_4_FATAL_ERROR/string 
 /void 
+void method=add 
+ stringCNTR_NAME_GBY_4_COUNT_HASH_OUT/string 
+/void 
/object 
   /void 
   void property=operatorId 

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml?rev=1406325r1=1406324r2=1406325view=diff
==
--- hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby2.q.xml Tue Nov  6 
21:02:54 2012
@@ -776,6 +776,9 @@
   void method=add 
stringCNTR_NAME_GBY_2_FATAL_ERROR/string 
   /void 
+  void method=add 
+   stringCNTR_NAME_GBY_2_COUNT_HASH_OUT/string 
+  /void 
  /object 
 /void 
 void property=operatorId 
@@ -1773,6 +1776,9 @@
 void method=add 
  stringCNTR_NAME_GBY_4_FATAL_ERROR/string 
 /void 
+void method=add 
+ stringCNTR_NAME_GBY_4_COUNT_HASH_OUT/string 
+/void 
/object 
   /void 
   void property=operatorId 

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml?rev=1406325r1=1406324r2=1406325view=diff
==
--- hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby3.q.xml Tue Nov  6 
21:02:54 2012
@@ -971,6 +971,9 @@
   void method=add 
stringCNTR_NAME_GBY_2_FATAL_ERROR/string 
   /void 
+  void method=add 
+   stringCNTR_NAME_GBY_2_COUNT_HASH_OUT/string 
+  /void 
  /object 
 /void 
 void property=operatorId 
@@ -2098,6 +2101,9 @@
 void method=add 
  stringCNTR_NAME_GBY_4_FATAL_ERROR/string 
 /void 
+void method=add 
+ stringCNTR_NAME_GBY_4_COUNT_HASH_OUT/string 
+/void 
/object 
   /void 
   void property=operatorId 

Modified: hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml?rev=1406325r1=1406324r2=1406325view=diff
==
--- hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml (original)
+++ hive/trunk/ql/src/test/results/compiler/plan/groupby4.q.xml Tue Nov  6 
21:02:54 2012
@@ -482,6 +482,9 @@
   void method=add 
stringCNTR_NAME_GBY_2_FATAL_ERROR/string 
   /void 
+  void method=add 
+   stringCNTR_NAME_GBY_2_COUNT_HASH_OUT/string 
+  /void 
  /object 
 /void 
 void property=operatorId 
@@ -1225,6 +1228,9 @@
 void method=add 
  stringCNTR_NAME_GBY_4_FATAL_ERROR/string 
 /void 
+void method=add 
+ stringCNTR_NAME_GBY_4_COUNT_HASH_OUT/string 
+/void 
/object 
   /void 
   void property=operatorId 

Modified

svn commit: r1406338 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/plan/ test/queries/clientnegative/ test/queries/

2012-11-06 Thread cws
Author: cws
Date: Tue Nov  6 21:11:54 2012
New Revision: 1406338

URL: http://svn.apache.org/viewvc?rev=1406338view=rev
Log:
HIVE-1977. DESCRIBE TABLE syntax doesn't support specifying a database 
qualified table name (Zhenxiao Luo via cws)

Added:
hive/trunk/ql/src/test/queries/clientnegative/desc_failure3.q
hive/trunk/ql/src/test/queries/clientpositive/describe_syntax.q
hive/trunk/ql/src/test/results/clientnegative/desc_failure3.q.out
hive/trunk/ql/src/test/results/clientpositive/describe_syntax.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1406338r1=1406337r2=1406338view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Nov  
6 21:11:54 2012
@@ -2779,9 +2779,8 @@ public class DDLTask extends TaskDDLWor
*   Throws this exception if an unexpected error occurs.
*/
   private int describeTable(Hive db, DescTableDesc descTbl) throws 
HiveException {
-String colPath = descTbl.getTableName();
-String tableName = colPath.substring(0,
-colPath.indexOf('.') == -1 ? colPath.length() : colPath.indexOf('.'));
+String colPath = descTbl.getColumnPath();
+String tableName = descTbl.getTableName();
 
 // describe the table - populate the output stream
 Table tbl = db.getTable(tableName, false);

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1406338r1=1406337r2=1406338view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
Tue Nov  6 21:11:54 2012
@@ -1374,20 +1374,234 @@ public class DDLSemanticAnalyzer extends
   }
 
   /**
-   * Get the fully qualified name in the ast. e.g. the ast of the form ^(DOT
-   * ^(DOT a b) c) will generate a name of the form a.b.c
-   *
-   * @param ast
-   *  The AST from which the qualified name has to be extracted
-   * @return String
+   * Utility class to resolve QualifiedName
*/
-  private String getFullyQualifiedName(ASTNode ast) {
-if (ast.getChildCount() == 0) {
-  return ast.getText();
-}
+  static class QualifiedNameUtil {
+
+// delimiter to check DOT delimited qualified names
+static String delimiter = \\.;
 
-return getFullyQualifiedName((ASTNode) ast.getChild(0)) + .
+/**
+ * Get the fully qualified name in the ast. e.g. the ast of the form ^(DOT
+ * ^(DOT a b) c) will generate a name of the form a.b.c
+ *
+ * @param ast
+ *  The AST from which the qualified name has to be extracted
+ * @return String
+ */
+static public String getFullyQualifiedName(ASTNode ast) {
+  if (ast.getChildCount() == 0) {
+return ast.getText();
+  } else if (ast.getChildCount() == 2) {
+return getFullyQualifiedName((ASTNode) ast.getChild(0)) + .
 + getFullyQualifiedName((ASTNode) ast.getChild(1));
+  } else if (ast.getChildCount() == 3) {
+return getFullyQualifiedName((ASTNode) ast.getChild(0)) + .
++ getFullyQualifiedName((ASTNode) ast.getChild(1)) + .
++ getFullyQualifiedName((ASTNode) ast.getChild(2));
+  } else {
+return null;
+  }
+}
+
+// assume the first component of DOT delimited name is tableName
+// get the attemptTableName
+static public String getAttemptTableName(Hive db, String qualifiedName, 
boolean isColumn) {
+  // check whether the name starts with table
+  // DESCRIBE table
+  // DESCRIBE table.column
+  // DECRIBE table column
+  String tableName = qualifiedName.substring(0,
+qualifiedName.indexOf('.') == -1 ?
+qualifiedName.length() : qualifiedName.indexOf('.'));
+  try {
+Table tab = db.getTable(tableName);
+if (tab != null) {
+  if (isColumn) {
+// if attempt to get columnPath
+// return the whole qualifiedName(table.column or table)
+return qualifiedName;
+  } else {
+// if attempt to get tableName
+// return table
+return tableName;
+  }
+}
+  } catch (HiveException e

svn commit: r1406465 [3/15] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/a

2012-11-06 Thread cws
Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h?rev=1406465r1=1406464r2=1406465view=diff
==
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h (original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.h Wed Nov  
7 04:55:00 2012
@@ -70,6 +70,12 @@ class ThriftHiveMetastoreIf : virtual pu
   virtual void get_index_by_name(Index _return, const std::string db_name, 
const std::string tbl_name, const std::string index_name) = 0;
   virtual void get_indexes(std::vectorIndex  _return, const std::string 
db_name, const std::string tbl_name, const int16_t max_indexes) = 0;
   virtual void get_index_names(std::vectorstd::string  _return, const 
std::string db_name, const std::string tbl_name, const int16_t max_indexes) = 
0;
+  virtual bool update_table_column_statistics(const ColumnStatistics 
stats_obj) = 0;
+  virtual bool update_partition_column_statistics(const ColumnStatistics 
stats_obj) = 0;
+  virtual void get_table_column_statistics(ColumnStatistics _return, const 
std::string db_name, const std::string tbl_name, const std::string col_name) 
= 0;
+  virtual void get_partition_column_statistics(ColumnStatistics _return, 
const std::string db_name, const std::string tbl_name, const std::string 
part_name, const std::string col_name) = 0;
+  virtual bool delete_partition_column_statistics(const std::string db_name, 
const std::string tbl_name, const std::string part_name, const std::string 
col_name) = 0;
+  virtual bool delete_table_column_statistics(const std::string db_name, 
const std::string tbl_name, const std::string col_name) = 0;
   virtual bool create_role(const Role role) = 0;
   virtual bool drop_role(const std::string role_name) = 0;
   virtual void get_role_names(std::vectorstd::string  _return) = 0;
@@ -261,6 +267,28 @@ class ThriftHiveMetastoreNull : virtual 
   void get_index_names(std::vectorstd::string  /* _return */, const 
std::string /* db_name */, const std::string /* tbl_name */, const int16_t /* 
max_indexes */) {
 return;
   }
+  bool update_table_column_statistics(const ColumnStatistics /* stats_obj */) 
{
+bool _return = false;
+return _return;
+  }
+  bool update_partition_column_statistics(const ColumnStatistics /* stats_obj 
*/) {
+bool _return = false;
+return _return;
+  }
+  void get_table_column_statistics(ColumnStatistics /* _return */, const 
std::string /* db_name */, const std::string /* tbl_name */, const 
std::string /* col_name */) {
+return;
+  }
+  void get_partition_column_statistics(ColumnStatistics /* _return */, const 
std::string /* db_name */, const std::string /* tbl_name */, const 
std::string /* part_name */, const std::string /* col_name */) {
+return;
+  }
+  bool delete_partition_column_statistics(const std::string /* db_name */, 
const std::string /* tbl_name */, const std::string /* part_name */, const 
std::string /* col_name */) {
+bool _return = false;
+return _return;
+  }
+  bool delete_table_column_statistics(const std::string /* db_name */, const 
std::string /* tbl_name */, const std::string /* col_name */) {
+bool _return = false;
+return _return;
+  }
   bool create_role(const Role /* role */) {
 bool _return = false;
 return _return;
@@ -8167,38 +8195,38 @@ class ThriftHiveMetastore_get_index_name
 
 };
 
-typedef struct _ThriftHiveMetastore_create_role_args__isset {
-  _ThriftHiveMetastore_create_role_args__isset() : role(false) {}
-  bool role;
-} _ThriftHiveMetastore_create_role_args__isset;
+typedef struct _ThriftHiveMetastore_update_table_column_statistics_args__isset 
{
+  _ThriftHiveMetastore_update_table_column_statistics_args__isset() : 
stats_obj(false) {}
+  bool stats_obj;
+} _ThriftHiveMetastore_update_table_column_statistics_args__isset;
 
-class ThriftHiveMetastore_create_role_args {
+class ThriftHiveMetastore_update_table_column_statistics_args {
  public:
 
-  ThriftHiveMetastore_create_role_args() {
+  ThriftHiveMetastore_update_table_column_statistics_args() {
   }
 
-  virtual ~ThriftHiveMetastore_create_role_args() throw() {}
+  virtual ~ThriftHiveMetastore_update_table_column_statistics_args() throw() {}
 
-  Role role;
+  ColumnStatistics stats_obj;
 
-  _ThriftHiveMetastore_create_role_args__isset __isset;
+  _ThriftHiveMetastore_update_table_column_statistics_args__isset __isset;
 
-  void __set_role(const Role val) {
-role = val;
+  void __set_stats_obj(const ColumnStatistics val) {
+stats_obj = val;
   }
 
-  bool operator == (const ThriftHiveMetastore_create_role_args  rhs) const
+  bool operator == (const 
ThriftHiveMetastore_update_table_column_statistics_args  rhs) const
   {
-if (!(role == rhs.role))
+if (!(stats_obj == rhs.stats_obj))
   return false;
 return true;
   }
-  bool operator != 

svn commit: r1406465 [9/15] - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ data/files/ metastore/if/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/org/a

2012-11-06 Thread cws
Modified: 
hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/hive_metastore_types.php
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/hive_metastore_types.php?rev=1406465r1=1406464r2=1406465view=diff
==
--- 
hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/hive_metastore_types.php
 (original)
+++ 
hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/hive_metastore_types.php
 Wed Nov  7 04:55:00 2012
@@ -3424,6 +3424,1197 @@ class Index {
 
 }
 
+class BooleanColumnStatsData {
+  static $_TSPEC;
+
+  public $numTrues = null;
+  public $numFalses = null;
+  public $numNulls = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 = array(
+  'var' = 'numTrues',
+  'type' = TType::I64,
+  ),
+2 = array(
+  'var' = 'numFalses',
+  'type' = TType::I64,
+  ),
+3 = array(
+  'var' = 'numNulls',
+  'type' = TType::I64,
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['numTrues'])) {
+$this-numTrues = $vals['numTrues'];
+  }
+  if (isset($vals['numFalses'])) {
+$this-numFalses = $vals['numFalses'];
+  }
+  if (isset($vals['numNulls'])) {
+$this-numNulls = $vals['numNulls'];
+  }
+}
+  }
+
+  public function getName() {
+return 'BooleanColumnStatsData';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input-readStructBegin($fname);
+while (true)
+{
+  $xfer += $input-readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  }
+  switch ($fid)
+  {
+case 1:
+  if ($ftype == TType::I64) {
+$xfer += $input-readI64($this-numTrues);
+  } else {
+$xfer += $input-skip($ftype);
+  }
+  break;
+case 2:
+  if ($ftype == TType::I64) {
+$xfer += $input-readI64($this-numFalses);
+  } else {
+$xfer += $input-skip($ftype);
+  }
+  break;
+case 3:
+  if ($ftype == TType::I64) {
+$xfer += $input-readI64($this-numNulls);
+  } else {
+$xfer += $input-skip($ftype);
+  }
+  break;
+default:
+  $xfer += $input-skip($ftype);
+  break;
+  }
+  $xfer += $input-readFieldEnd();
+}
+$xfer += $input-readStructEnd();
+return $xfer;
+  }
+
+  public function write($output) {
+$xfer = 0;
+$xfer += $output-writeStructBegin('BooleanColumnStatsData');
+if ($this-numTrues !== null) {
+  $xfer += $output-writeFieldBegin('numTrues', TType::I64, 1);
+  $xfer += $output-writeI64($this-numTrues);
+  $xfer += $output-writeFieldEnd();
+}
+if ($this-numFalses !== null) {
+  $xfer += $output-writeFieldBegin('numFalses', TType::I64, 2);
+  $xfer += $output-writeI64($this-numFalses);
+  $xfer += $output-writeFieldEnd();
+}
+if ($this-numNulls !== null) {
+  $xfer += $output-writeFieldBegin('numNulls', TType::I64, 3);
+  $xfer += $output-writeI64($this-numNulls);
+  $xfer += $output-writeFieldEnd();
+}
+$xfer += $output-writeFieldStop();
+$xfer += $output-writeStructEnd();
+return $xfer;
+  }
+
+}
+
+class DoubleColumnStatsData {
+  static $_TSPEC;
+
+  public $lowValue = null;
+  public $highValue = null;
+  public $numNulls = null;
+  public $numDVs = null;
+
+  public function __construct($vals=null) {
+if (!isset(self::$_TSPEC)) {
+  self::$_TSPEC = array(
+1 = array(
+  'var' = 'lowValue',
+  'type' = TType::DOUBLE,
+  ),
+2 = array(
+  'var' = 'highValue',
+  'type' = TType::DOUBLE,
+  ),
+3 = array(
+  'var' = 'numNulls',
+  'type' = TType::I64,
+  ),
+4 = array(
+  'var' = 'numDVs',
+  'type' = TType::I64,
+  ),
+);
+}
+if (is_array($vals)) {
+  if (isset($vals['lowValue'])) {
+$this-lowValue = $vals['lowValue'];
+  }
+  if (isset($vals['highValue'])) {
+$this-highValue = $vals['highValue'];
+  }
+  if (isset($vals['numNulls'])) {
+$this-numNulls = $vals['numNulls'];
+  }
+  if (isset($vals['numDVs'])) {
+$this-numDVs = $vals['numDVs'];
+  }
+}
+  }
+
+  public function getName() {
+return 'DoubleColumnStatsData';
+  }
+
+  public function read($input)
+  {
+$xfer = 0;
+$fname = null;
+$ftype = 0;
+$fid = 0;
+$xfer += $input-readStructBegin($fname);
+while (true)
+{
+  $xfer += $input-readFieldBegin($fname, $ftype, $fid);
+  if ($ftype == TType::STOP) {
+break;
+  

svn commit: r1400210 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java conf/hive-default.xml.template service/src/java/org/apache/hadoop/hive/service/HiveServer.java

2012-10-19 Thread cws
Author: cws
Date: Fri Oct 19 18:26:50 2012
New Revision: 1400210

URL: http://svn.apache.org/viewvc?rev=1400210view=rev
Log:
HIVE-3590. TCP KeepAlive and connection timeout for the HiveServer (Esteban 
Gutierrez via cws)

Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/conf/hive-default.xml.template
hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1400210r1=1400209r2=1400210view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Fri 
Oct 19 18:26:50 2012
@@ -645,6 +645,12 @@ public class HiveConf extends Configurat
  * This will be removed once the rest of the DML changes are committed.
  */
 
HIVE_INTERNAL_DDL_LIST_BUCKETING_ENABLE(hive.internal.ddl.list.bucketing.enable,
 false),
+
+// Allow TCP Keep alive socket option for for HiveServer or a maximum 
timeout for the socket.
+
+SERVER_READ_SOCKET_TIMEOUT(hive.server.read.socket.timeout, 10),
+SERVER_TCP_KEEP_ALIVE(hive.server.tcp.keepalive, true),
+
 ;
 
 public final String varname;

Modified: hive/trunk/conf/hive-default.xml.template
URL: 
http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1400210r1=1400209r2=1400210view=diff
==
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Fri Oct 19 18:26:50 2012
@@ -1491,5 +1491,19 @@
descriptionThe number of miliseconds between HMSHandler retry 
attempts/description
 /property
 
+
+property
+   namehive.server.read.socket.timeout/name
+   value10/value
+   descriptionTimeout for the HiveServer to close the connection if no 
response from the client in N seconds, defaults to 10 seconds./description
+/property
+
+property
+   namehive.server.tcp.keepalive/name
+   valuetrue/value
+   descriptionWhether to enable TCP keepalive for the Hive server. Keepalive 
will prevent accumulation of half-open connections./description
+/property
+
+
 /configuration
 

Modified: 
hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=1400210r1=1400209r2=1400210view=diff
==
--- hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java 
(original)
+++ hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java 
Fri Oct 19 18:26:50 2012
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.metastore.TServerSocketKeepAlive;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.plan.api.QueryPlan;
@@ -651,6 +652,7 @@ public class HiveServer extends ThriftHi
 
   cli.parse(args);
 
+
   // NOTE: It is critical to do this prior to initializing log4j, otherwise
   // any log specific settings via hiveconf will be ignored
   Properties hiveconf = cli.addHiveconfToSystemProperties();
@@ -665,7 +667,11 @@ public class HiveServer extends ThriftHi
 
   HiveConf conf = new HiveConf(HiveServerHandler.class);
   ServerUtils.cleanUpScratchDir(conf);
-  TServerTransport serverTransport = new TServerSocket(cli.port);
+
+
+  boolean tcpKeepAlive = 
conf.getBoolVar(HiveConf.ConfVars.SERVER_TCP_KEEP_ALIVE);
+
+  TServerTransport serverTransport = tcpKeepAlive ? new 
TServerSocketKeepAlive(cli.port) : new TServerSocket(cli.port, 1000 * 
conf.getIntVar(HiveConf.ConfVars.SERVER_READ_SOCKET_TIMEOUT));
 
   // set all properties specified on the command line
   for (Map.EntryObject, Object item : hiveconf.entrySet()) {
@@ -688,6 +694,9 @@ public class HiveServer extends ThriftHi
 +  with  + cli.minWorkerThreads +  min worker threads and 
 + cli.maxWorkerThreads +  max worker threads;
   HiveServerHandler.LOG.info(msg);
+
+  HiveServerHandler.LOG.info(TCP keepalive =  + tcpKeepAlive);
+
   if (cli.isVerbose()) {
 System.err.println(msg);
   }




svn commit: r1399929 - in /hive/trunk: common/src/java/conf/hive-log4j.properties ql/src/java/conf/hive-exec-log4j.properties

2012-10-18 Thread cws
Author: cws
Date: Fri Oct 19 00:02:10 2012
New Revision: 1399929

URL: http://svn.apache.org/viewvc?rev=1399929view=rev
Log:
HIVE-3523. Hive info logging is broken (Carl Steinbach via cws)

Modified:
hive/trunk/common/src/java/conf/hive-log4j.properties
hive/trunk/ql/src/java/conf/hive-exec-log4j.properties

Modified: hive/trunk/common/src/java/conf/hive-log4j.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/conf/hive-log4j.properties?rev=1399929r1=1399928r2=1399929view=diff
==
--- hive/trunk/common/src/java/conf/hive-log4j.properties (original)
+++ hive/trunk/common/src/java/conf/hive-log4j.properties Fri Oct 19 00:02:10 
2012
@@ -15,8 +15,8 @@
 # limitations under the License.
 
 # Define some default values that can be overridden by system properties
-hive.log.threshold=WARN
-hive.root.logger=${hive.log.threshold},DRFA
+hive.log.threshold=ALL
+hive.root.logger=WARN,DRFA
 hive.log.dir=/tmp/${user.name}
 hive.log.file=hive.log
 

Modified: hive/trunk/ql/src/java/conf/hive-exec-log4j.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/conf/hive-exec-log4j.properties?rev=1399929r1=1399928r2=1399929view=diff
==
--- hive/trunk/ql/src/java/conf/hive-exec-log4j.properties (original)
+++ hive/trunk/ql/src/java/conf/hive-exec-log4j.properties Fri Oct 19 00:02:10 
2012
@@ -15,8 +15,8 @@
 # limitations under the License.
 
 # Define some default values that can be overridden by system properties
-hive.log.threshold=INFO
-hive.root.logger=${hive.log.threshold},FA
+hive.log.threshold=ALL
+hive.root.logger=INFO,FA
 hive.log.dir=/tmp/${user.name}
 hive.log.file=${hive.query.id}.log
 




svn commit: r1392202 - in /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql: QueryPlan.java exec/HadoopJobExecHelper.java plan/ReducerTimeStatsPerJob.java

2012-09-30 Thread cws
Author: cws
Date: Mon Oct  1 04:28:06 2012
New Revision: 1392202

URL: http://svn.apache.org/viewvc?rev=1392202view=rev
Log:
add instrumentation to capture if there is skew in reducers (Arun Dobriya via 
cws)

Added:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReducerTimeStatsPerJob.java
   (with props)
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java?rev=1392202r1=1392201r2=1392202view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java Mon Oct  1 
04:28:06 2012
@@ -45,6 +45,7 @@ import org.apache.hadoop.hive.ql.hooks.R
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.ReducerTimeStatsPerJob;
 import org.apache.hadoop.hive.ql.plan.api.AdjacencyType;
 import org.apache.hadoop.hive.ql.plan.api.NodeType;
 import org.apache.hadoop.hive.ql.plan.api.TaskType;
@@ -67,6 +68,7 @@ public class QueryPlan implements Serial
 
   private ArrayListTask? extends Serializable rootTasks;
   private FetchTask fetchTask;
+  private final ListReducerTimeStatsPerJob reducerTimeStatsPerJobList;
 
   private HashSetReadEntity inputs;
   /**
@@ -94,12 +96,14 @@ public class QueryPlan implements Serial
   private transient Long queryStartTime;
 
   public QueryPlan() {
+this.reducerTimeStatsPerJobList = new ArrayListReducerTimeStatsPerJob();
   }
 
   public QueryPlan(String queryString, BaseSemanticAnalyzer sem, Long 
startTime) {
 this.queryString = queryString;
 
 rootTasks = new ArrayListTask? extends Serializable();
+this.reducerTimeStatsPerJobList = new ArrayListReducerTimeStatsPerJob();
 rootTasks.addAll(sem.getRootTasks());
 fetchTask = sem.getFetchTask();
 // Note that inputs and outputs can be changed when the query gets executed
@@ -706,6 +710,10 @@ public class QueryPlan implements Serial
 return query;
   }
 
+  public ListReducerTimeStatsPerJob getReducerTimeStatsPerJobList() {
+return this.reducerTimeStatsPerJobList;
+  }
+
   public void setQuery(org.apache.hadoop.hive.ql.plan.api.Query query) {
 this.query = query;
   }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1392202r1=1392201r2=1392202view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
Mon Oct  1 04:28:06 2012
@@ -42,6 +42,7 @@ import org.apache.hadoop.hive.ql.exec.Op
 import org.apache.hadoop.hive.ql.exec.errors.ErrorAndSolution;
 import org.apache.hadoop.hive.ql.exec.errors.TaskLogProcessor;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
+import org.apache.hadoop.hive.ql.plan.ReducerTimeStatsPerJob;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.stats.ClientStatsPublisher;
@@ -696,6 +697,12 @@ public class HadoopJobExecHelper {
 // for special modes. In that case, SessionState.get() is empty.
 if (SessionState.get() != null) {
   SessionState.get().getLastMapRedStatsList().add(mapRedStats);
+
+  // Computes the skew for all the MapReduce irrespective
+  // of Success or Failure
+  if (this.task.getQueryPlan() != null) {
+computeReducerTimeStatsPerJob(rj);
+  }
 }
 
 boolean success = mapRedStats.isSuccess();
@@ -733,6 +740,31 @@ public class HadoopJobExecHelper {
 return returnVal;
   }
 
+
+  private void computeReducerTimeStatsPerJob(RunningJob rj) throws IOException 
{
+TaskCompletionEvent[] taskCompletions = rj.getTaskCompletionEvents(0);
+ListInteger reducersRunTimes = new ArrayListInteger();
+
+for (TaskCompletionEvent taskCompletion : taskCompletions) {
+  String[] taskJobIds = 
ShimLoader.getHadoopShims().getTaskJobIDs(taskCompletion);
+  if (taskJobIds == null) {
+// Task attempt info is unavailable in this Hadoop version);
+continue;
+  }
+  String taskId = taskJobIds[0];
+  if (!taskCompletion.isMapTask()) {
+reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime()));
+  }
+}
+// Compute the reducers run time statistics for the job
+ReducerTimeStatsPerJob

svn commit: r1390155 - /hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

2012-09-25 Thread cws
Author: cws
Date: Tue Sep 25 21:28:46 2012
New Revision: 1390155

URL: http://svn.apache.org/viewvc?rev=1390155view=rev
Log:
HIVE-3277. Enable Metastore audit logging for non-secure connections (Sean 
Mackrory via cws)

Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1390155r1=1390154r2=1390155view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Tue Sep 25 21:28:46 2012
@@ -207,7 +207,7 @@ public class HiveMetaStore extends Thrif
 };
 
 private final void logAuditEvent(String cmd) {
-  if (!useSasl || cmd == null) {
+  if (cmd == null) {
 return;
   }
 
@@ -219,8 +219,19 @@ public class HiveMetaStore extends Thrif
   }
   final Formatter fmt = auditFormatter.get();
   ((StringBuilder) fmt.out()).setLength(0);
+
+  String address;
+  if (useSasl) {
+address = saslServer.getRemoteAddress().toString();
+  } else {
+address = getIpAddress();
+  }
+  if (address == null) {
+address = unknown-ip-addr;
+  }
+
   auditLog.info(fmt.format(AUDIT_FORMAT, ugi.getUserName(),
-  saslServer.getRemoteAddress().toString(), cmd).toString());
+address, cmd).toString());
 }
 
 // The next serial number to be assigned




svn commit: r1390278 - in /hive/trunk: common/src/java/conf/hive-log4j.properties ql/src/java/conf/hive-exec-log4j.properties

2012-09-25 Thread cws
Author: cws
Date: Wed Sep 26 05:05:44 2012
New Revision: 1390278

URL: http://svn.apache.org/viewvc?rev=1390278view=rev
Log:
HIVE-3505. log4j template has logging threshold that hides all audit logs (Sean 
Mackrory via cws)

Modified:
hive/trunk/common/src/java/conf/hive-log4j.properties
hive/trunk/ql/src/java/conf/hive-exec-log4j.properties

Modified: hive/trunk/common/src/java/conf/hive-log4j.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/conf/hive-log4j.properties?rev=1390278r1=1390277r2=1390278view=diff
==
--- hive/trunk/common/src/java/conf/hive-log4j.properties (original)
+++ hive/trunk/common/src/java/conf/hive-log4j.properties Wed Sep 26 05:05:44 
2012
@@ -15,7 +15,8 @@
 # limitations under the License.
 
 # Define some default values that can be overridden by system properties
-hive.root.logger=WARN,DRFA
+hive.log.threshold=WARN
+hive.root.logger=${hive.log.threshold},DRFA
 hive.log.dir=/tmp/${user.name}
 hive.log.file=hive.log
 
@@ -23,7 +24,7 @@ hive.log.file=hive.log
 log4j.rootLogger=${hive.root.logger}, EventCounter
 
 # Logging Threshold
-log4j.threshhold=WARN
+log4j.threshold=${hive.log.threshold}
 
 #
 # Daily Rolling File Appender

Modified: hive/trunk/ql/src/java/conf/hive-exec-log4j.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/conf/hive-exec-log4j.properties?rev=1390278r1=1390277r2=1390278view=diff
==
--- hive/trunk/ql/src/java/conf/hive-exec-log4j.properties (original)
+++ hive/trunk/ql/src/java/conf/hive-exec-log4j.properties Wed Sep 26 05:05:44 
2012
@@ -15,7 +15,8 @@
 # limitations under the License.
 
 # Define some default values that can be overridden by system properties
-hive.root.logger=INFO,FA
+hive.log.threshold=INFO
+hive.root.logger=${hive.log.threshold},FA
 hive.log.dir=/tmp/${user.name}
 hive.log.file=${hive.query.id}.log
 
@@ -23,7 +24,7 @@ hive.log.file=${hive.query.id}.log
 log4j.rootLogger=${hive.root.logger}, EventCounter
 
 # Logging Threshold
-log4j.threshhold=WARN
+log4j.threshhold=${hive.log.threshold}
 
 #
 # File Appender




svn commit: r1387307 - in /hive/trunk: build.properties build.xml

2012-09-18 Thread cws
Author: cws
Date: Tue Sep 18 18:34:14 2012
New Revision: 1387307

URL: http://svn.apache.org/viewvc?rev=1387307view=rev
Log:
HIVE-3450. Hive maven-publish ant task should be configurable (Travis Crawford 
via cws)

Modified:
hive/trunk/build.properties
hive/trunk/build.xml

Modified: hive/trunk/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1387307r1=1387306r2=1387307view=diff
==
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Tue Sep 18 18:34:14 2012
@@ -106,6 +106,8 @@ mvn.publish.repo=snapshots
 mvn.jar.dir=${build.dir.hive}/maven/jars
 mvn.pom.dir=${build.dir.hive}/maven/poms
 mvn.license.dir=${build.dir.hive}/maven/licenses
+mvn.deploy.id=apache.snapshots.https
+mvn.deploy.url=https://repository.apache.org/content/repositories/snapshots
 
 #
 # unit test Properties

Modified: hive/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1387307r1=1387306r2=1387307view=diff
==
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Tue Sep 18 18:34:14 2012
@@ -1187,9 +1187,7 @@
 artifact:deploy
 file=${mvn.jar.dir}/hive-${hive.project}-${version}.jar
   pom refid=hive.project.pom /
-  remoteRepository
-  id=apache.snapshots.https
-  
url=https://repository.apache.org/content/repositories/snapshots; /
+  remoteRepository id=${mvn.deploy.id} url=${mvn.deploy.url}/
 /artifact:deploy
   /else
 /if




svn commit: r1386857 - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ObjectStore.java java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java test/org/apache/hadoop/hive/me

2012-09-17 Thread cws
Author: cws
Date: Mon Sep 17 21:40:10 2012
New Revision: 1386857

URL: http://svn.apache.org/viewvc?rev=1386857view=rev
Log:
Backout HIVE-3443 (cws)

Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java

hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1386857r1=1386856r2=1386857view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
(original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
Mon Sep 17 21:40:10 2012
@@ -4009,23 +4009,24 @@ public class ObjectStore implements RawS
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
+  @SuppressWarnings(finally)
   public Collection? executeJDOQLSelect(String query) {
 boolean committed = false;
 Collection? result = null;
 
+LOG.info(Executing query:  + query);
+
 try {
   openTransaction();
   Query q = pm.newQuery(query);
   result = (Collection?) q.execute();
   committed = commitTransaction();
-  if (committed) {
-return result;
-  } else {
-return null;
-  }
 } finally {
   if (!committed) {
 rollbackTransaction();
+return null;
+  } else  {
+return result;
   }
 }
   }
@@ -4037,23 +4038,24 @@ public class ObjectStore implements RawS
   * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
   *
   */
+  @SuppressWarnings(finally)
   public long executeJDOQLUpdate(String query) {
 boolean committed = false;
 long numUpdated = 0;
 
+LOG.info(Executing query:  + query);
+
 try {
   openTransaction();
   Query q = pm.newQuery(query);
   numUpdated = (Long) q.execute();
   committed = commitTransaction();
-  if (committed) {
-return numUpdated;
-  } else {
-return -1;
-  }
 } finally {
   if (!committed) {
 rollbackTransaction();
+return -1;
+  } else {
+return numUpdated;
   }
 }
   }
@@ -4065,6 +4067,7 @@ public class ObjectStore implements RawS
   * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
   *
   */
+  @SuppressWarnings(finally)
   public SetString listFSRoots() {
 boolean committed = false;
 SetString fsRoots = new HashSetString();
@@ -4079,14 +4082,12 @@ public class ObjectStore implements RawS
 fsRoots.add(mDB.getLocationUri());
   }
   committed = commitTransaction();
-  if (committed) {
-return fsRoots;
-  } else {
-return null;
-  }
 } finally {
   if (!committed) {
 rollbackTransaction();
+return null;
+  } else {
+return fsRoots;
   }
 }
   }
@@ -4127,322 +4128,155 @@ public class ObjectStore implements RawS
 return true;
   }
 
-  public class UpdateMDatabaseURIRetVal {
-private ListString badRecords;
-private MapString, String updateLocations;
-
-UpdateMDatabaseURIRetVal(ListString badRecords, MapString, String 
updateLocations) {
-  this.badRecords = badRecords;
-  this.updateLocations = updateLocations;
-}
-
-public ListString getBadRecords() {
-  return badRecords;
-}
-
-public void setBadRecords(ListString badRecords) {
-  this.badRecords = badRecords;
-}
-
-public MapString, String getUpdateLocations() {
-  return updateLocations;
-}
-
-public void setUpdateLocations(MapString, String updateLocations) {
-  this.updateLocations = updateLocations;
-}
-  }
-
-  /** The following APIs
-  *
-  *  - updateMDatabaseURI
-  *
-  * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
-  *
-  */
-  public UpdateMDatabaseURIRetVal updateMDatabaseURI(URI oldLoc, URI newLoc, 
boolean dryRun) {
-boolean committed = false;
-MapString, String updateLocations = new HashMapString, String();
-ListString badRecords = new ArrayListString();
-UpdateMDatabaseURIRetVal retVal = null;
-
-try {
-  openTransaction();
-  Query query = pm.newQuery(MDatabase.class);
-  ListMDatabase mDBs = (ListMDatabase) query.execute();
-  pm.retrieveAll(mDBs);
-
-  for(MDatabase mDB:mDBs) {
-URI locationURI = null;
-String location = mDB.getLocationUri();
-try {
-  locationURI = new URI(location);
-} catch(URISyntaxException e) {
-  badRecords.add(location);
-}
-if (locationURI == null) {
-  badRecords.add(location);
-} else {
-  if (shouldUpdateURI

svn commit: r1384433 - in /hive/trunk: data/files/ jdbc/src/java/org/apache/hadoop/hive/jdbc/ jdbc/src/test/org/apache/hadoop/hive/jdbc/

2012-09-13 Thread cws
Author: cws
Date: Thu Sep 13 17:50:19 2012
New Revision: 1384433

URL: http://svn.apache.org/viewvc?rev=1384433view=rev
Log:
HIVE-2957. Hive JDBC doesn't support TIMESTAMP column (Richard Ding via cws)

Modified:
hive/trunk/data/files/datatypes.txt
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java

hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java

hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/Utils.java
hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java

Modified: hive/trunk/data/files/datatypes.txt
URL: 
http://svn.apache.org/viewvc/hive/trunk/data/files/datatypes.txt?rev=1384433r1=1384432r2=1384433view=diff
==
--- hive/trunk/data/files/datatypes.txt (original)
+++ hive/trunk/data/files/datatypes.txt Thu Sep 13 17:50:19 2012
@@ -1,3 +1,3 @@
-\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
--1false-1.1\N\N\N-1-1-1.0-1\N\N
-1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd2
+\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N\N
+-1false-1.1\N\N\N-1-1-1.0-1\N\N\N
+1true1.11121x2ykva92.2111.01abcd1111213142212212x1abcd22012-04-22
 09:00:00.123456789

Modified: 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java?rev=1384433r1=1384432r2=1384433view=diff
==
--- hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java 
(original)
+++ hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveBaseResultSet.java 
Thu Sep 13 17:50:19 2012
@@ -467,11 +467,21 @@ public abstract class HiveBaseResultSet 
   }
 
   public Timestamp getTimestamp(int columnIndex) throws SQLException {
-throw new SQLException(Method not supported);
+Object obj = getObject(columnIndex);
+if (obj == null) {
+  return null;
+}
+if (obj instanceof Timestamp) {
+  return (Timestamp) obj;
+}
+if (obj instanceof String) {
+  return Timestamp.valueOf((String)obj);
+}
+throw new SQLException(Illegal conversion);
   }
 
   public Timestamp getTimestamp(String columnName) throws SQLException {
-throw new SQLException(Method not supported);
+return getTimestamp(findColumn(columnName));
   }
 
   public Timestamp getTimestamp(int columnIndex, Calendar cal) throws 
SQLException {

Modified: 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java?rev=1384433r1=1384432r2=1384433view=diff
==
--- 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java 
(original)
+++ 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HivePreparedStatement.java 
Thu Sep 13 17:50:19 2012
@@ -760,8 +760,7 @@ public class HivePreparedStatement imple
*/
 
   public void setTimestamp(int parameterIndex, Timestamp x) throws 
SQLException {
-// TODO Auto-generated method stub
-throw new SQLException(Method not supported);
+this.parameters.put(parameterIndex, x.toString());
   }
 
   /*

Modified: 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java?rev=1384433r1=1384432r2=1384433view=diff
==
--- 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java 
(original)
+++ 
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/HiveResultSetMetaData.java 
Thu Sep 13 17:50:19 2012
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.jdbc;
 
 import java.sql.ResultSetMetaData;
 import java.sql.SQLException;
-import java.sql.Types;
 import java.util.List;
 
 import org.apache.hadoop.hive.serde.Constants;
@@ -111,6 +110,8 @@ public class HiveResultSetMetaData imple
   return Constants.INT_TYPE_NAME;
 } else if (bigint.equalsIgnoreCase(type)) {
   return Constants.BIGINT_TYPE_NAME;
+} else if (timestamp.equalsIgnoreCase(type)) {
+  return Constants.TIMESTAMP_TYPE_NAME;
 } else if (type.startsWith(map)) {
   return Constants.STRING_TYPE_NAME;
 } else if (type.startsWith(array)) {

Modified: hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcColumn.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc

svn commit: r1383065 - in /hive/trunk: ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/org/apache/hadoop/hiv

2012-09-10 Thread cws
Author: cws
Date: Mon Sep 10 19:21:03 2012
New Revision: 1383065

URL: http://svn.apache.org/viewvc?rev=1383065view=rev
Log:
HIVE-3395. 0.23 compatibility: shim job.tracker.address (Francis Liu via cws)

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapRedTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java

hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java

hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java

hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java

hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1383065r1=1383064r2=1383065view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Mon Sep 10 
19:21:03 2012
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.common.Fil
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLock;
 import org.apache.hadoop.hive.ql.lockmgr.HiveLockManager;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.StringUtils;
 
 /**
@@ -488,7 +489,7 @@ public class Context {
* Today this translates into running hadoop jobs locally
*/
   public boolean isLocalOnlyExecutionMode() {
-return HiveConf.getVar(conf, HiveConf.ConfVars.HADOOPJT).equals(local);
+return ShimLoader.getHadoopShims().isLocalMode(conf);
   }
 
   public ListHiveLock getHiveLocks() {
@@ -516,7 +517,7 @@ public class Context {
 
   public void restoreOriginalTracker() {
 if (originalTracker != null) {
-  HiveConf.setVar(conf, HiveConf.ConfVars.HADOOPJT, originalTracker);
+  ShimLoader.getHadoopShims().setJobLauncherRpcAddress(conf, 
originalTracker);
   originalTracker = null;
 }
   }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java?rev=1383065r1=1383064r2=1383065view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExecDriver.java Mon 
Sep 10 19:21:03 2012
@@ -373,8 +373,7 @@ public class ExecDriver extends TaskMap
 try{
   MapredLocalWork localwork = work.getMapLocalWork();
   if (localwork != null) {
-boolean localMode = HiveConf.getVar(job, 
HiveConf.ConfVars.HADOOPJT).equals(local);
-if (!localMode) {
+if (!ShimLoader.getHadoopShims().isLocalMode(job)) {
   Path localPath = new Path(localwork.getTmpFileURI());
   Path hdfsPath = new Path(work.getTmpHDFSFileURI());
 
@@ -706,7 +705,7 @@ public class ExecDriver extends TaskMap
 OutputStream out = null;
 
 Properties deltaP = hconf.getChangedProperties();
-boolean hadoopLocalMode = 
hconf.getVar(HiveConf.ConfVars.HADOOPJT).equals(local);
+boolean hadoopLocalMode = ShimLoader.getHadoopShims().isLocalMode(hconf);
 String hadoopSysDir = mapred.system.dir;
 String hadoopWorkDir = mapred.local.dir;
 

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1383065r1=1383064r2=1383065view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
Mon Sep 10 19:21:03 2012
@@ -423,10 +423,9 @@ public class HadoopJobExecHelper {
* from StreamJob.java.
*/
   public void jobInfo(RunningJob rj) {
-if (job.get(mapred.job.tracker, local).equals(local)) {
+if (ShimLoader.getHadoopShims().isLocalMode(job)) {
   console.printInfo(Job running in-process (local Hadoop

svn commit: r1382600 - in /hive/trunk/shims/src: 0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

2012-09-09 Thread cws
Author: cws
Date: Sun Sep  9 22:23:53 2012
New Revision: 1382600

URL: http://svn.apache.org/viewvc?rev=1382600view=rev
Log:
HIVE-3387. Meta data file size exceeds limit (Navis Ryu via cws)

Modified:

hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java

hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java

Modified: 
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1382600r1=1382599r2=1382600view=diff
==
--- 
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java 
(original)
+++ 
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java 
Sun Sep  9 22:23:53 2012
@@ -28,7 +28,10 @@ import java.net.URL;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import javax.security.auth.Subject;
 import javax.security.auth.login.LoginException;
@@ -160,10 +163,17 @@ public class Hadoop20Shims implements Ha
 }
 
 public InputSplitShim(CombineFileSplit old) throws IOException {
-  super(old);
+  super(old.getJob(), old.getPaths(), old.getStartOffsets(),
+  old.getLengths(), dedup(old.getLocations()));
   _isShrinked = false;
 }
 
+private static String[] dedup(String[] locations) {
+  SetString dedup = new HashSetString();
+  Collections.addAll(dedup, locations);
+  return dedup.toArray(new String[dedup.size()]);
+}
+
 @Override
 public void shrinkSplit(long length) {
   _isShrinked = true;

Modified: 
hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1382600r1=1382599r2=1382600view=diff
==
--- 
hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 (original)
+++ 
hive/trunk/shims/src/common-secure/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
 Sun Sep  9 22:23:53 2012
@@ -25,7 +25,10 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.security.PrivilegedExceptionAction;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -165,10 +168,17 @@ public abstract class HadoopShimsSecure 
 }
 
 public InputSplitShim(CombineFileSplit old) throws IOException {
-  super(old);
+  super(old.getJob(), old.getPaths(), old.getStartOffsets(),
+  old.getLengths(), dedup(old.getLocations()));
   _isShrinked = false;
 }
 
+private static String[] dedup(String[] locations) {
+  SetString dedup = new HashSetString();
+  Collections.addAll(dedup, locations);
+  return dedup.toArray(new String[dedup.size()]);
+}
+
 @Override
 public void shrinkSplit(long length) {
   _isShrinked = true;




svn commit: r1380479 - in /hive/trunk/ql/src/test: queries/clientpositive/repair.q queries/clientpositive/repair_hadoop23.q results/clientpositive/repair.q.out results/clientpositive/repair_hadoop23.q

2012-09-04 Thread cws
Author: cws
Date: Tue Sep  4 08:04:59 2012
New Revision: 1380479

URL: http://svn.apache.org/viewvc?rev=1380479view=rev
Log:
HIVE-3412. Fix TestCliDriver.repair on Hadoop 0.23.3, 3.0.0, and 2.2.0-alpha 
(Zhenxiao Luo via cws)

Added:
hive/trunk/ql/src/test/queries/clientpositive/repair_hadoop23.q
hive/trunk/ql/src/test/results/clientpositive/repair_hadoop23.q.out
Modified:
hive/trunk/ql/src/test/queries/clientpositive/repair.q
hive/trunk/ql/src/test/results/clientpositive/repair.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/repair.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/repair.q?rev=1380479r1=1380478r2=1380479view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/repair.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/repair.q Tue Sep  4 08:04:59 
2012
@@ -2,10 +2,23 @@
 
 CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
 
+-- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.23)
+
+-- When you invoke the mkdir command using versions of Hadoop up to and 
including 0.23,
+-- they behave as if you had specified the -p option,
+-- *but* they don't actually support the -p option.
+
+-- Support for the -p option first appeared in 1.0 and 2.0,
+-- but they maintain backward compatibility with older versions,
+-- so they let you include -p, but if you don't they still act like you did.
+
+-- HADOOP-8551 breaks backward compatibility with 0.23 and older versions by
+-- requiring you to explicitly specify -p if you require that behavior.
+
 MSCK TABLE repairtable;
 
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
-dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
+dfs -mkdir -p ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
+dfs -mkdir -p ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
 
 MSCK TABLE repairtable;
 

Added: hive/trunk/ql/src/test/queries/clientpositive/repair_hadoop23.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/repair_hadoop23.q?rev=1380479view=auto
==
--- hive/trunk/ql/src/test/queries/clientpositive/repair_hadoop23.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/repair_hadoop23.q Tue Sep  4 
08:04:59 2012
@@ -0,0 +1,28 @@
+
+
+CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 STRING, p2 STRING);
+
+-- INCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.23)
+-- When you invoke the mkdir command using versions of Hadoop up to and 
including 0.23,
+-- they behave as if you had specified the -p option,
+-- *but* they don't actually support the -p option.
+
+-- Support for the -p option first appeared in 1.0 and 2.0,
+-- but they maintain backward compatibility with older versions,
+-- so they let you include -p, but if you don't they still act like you did.
+
+-- HADOOP-8551 breaks backward compatibility with 0.23 and older versions by
+-- requiring you to explicitly specify -p if you require that behavior.
+
+MSCK TABLE repairtable;
+
+dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=a/p2=a;
+dfs -mkdir ../build/ql/test/data/warehouse/repairtable/p1=b/p2=a;
+
+MSCK TABLE repairtable;
+
+MSCK REPAIR TABLE repairtable;
+
+MSCK TABLE repairtable;
+
+

Modified: hive/trunk/ql/src/test/results/clientpositive/repair.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/repair.q.out?rev=1380479r1=1380478r2=1380479view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/repair.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/repair.q.out Tue Sep  4 
08:04:59 2012
@@ -3,9 +3,33 @@ PREHOOK: type: CREATETABLE
 POSTHOOK: query: CREATE TABLE repairtable(col STRING) PARTITIONED BY (p1 
STRING, p2 STRING)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: default@repairtable
-PREHOOK: query: MSCK TABLE repairtable
+PREHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.23)
+-- When you invoke the mkdir command using versions of Hadoop up to and 
including 0.23,
+-- they behave as if you had specified the -p option,
+-- *but* they don't actually support the -p option.
+
+-- Support for the -p option first appeared in 1.0 and 2.0,
+-- but they maintain backward compatibility with older versions,
+-- so they let you include -p, but if you don't they still act like you did.
+
+-- HADOOP-8551 breaks backward compatibility with 0.23 and older versions by
+-- requiring you to explicitly specify -p if you require that behavior.
+
+MSCK TABLE repairtable
 PREHOOK: type: MSCK
-POSTHOOK: query: MSCK TABLE repairtable
+POSTHOOK: query: -- EXCLUDE_HADOOP_MAJOR_VERSIONS(0.20, 0.23)
+-- When you invoke the mkdir command using versions of Hadoop up to and 
including 0.23,
+-- they behave as if you had specified the -p option

svn commit: r1380483 - /hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql

2012-09-04 Thread cws
Author: cws
Date: Tue Sep  4 08:26:05 2012
New Revision: 1380483

URL: http://svn.apache.org/viewvc?rev=1380483view=rev
Log:
HIVE-3424. Error by upgrading a Hive 0.7.0 database to 0.8.0 
(008-HIVE-2246.mysql.sql) (Alexander Alten-Lorenz via cws)

Modified:
hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql

Modified: hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql?rev=1380483r1=1380482r2=1380483view=diff
==
--- hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql 
(original)
+++ hive/trunk/metastore/scripts/upgrade/mysql/008-HIVE-2246.mysql.sql Tue Sep  
4 08:26:05 2012
@@ -26,9 +26,11 @@ CREATE PROCEDURE REVERT()
 DROP TABLE IF EXISTS COLUMNS_V2;
 DROP TABLE IF EXISTS TABLE_SDS;
 DROP TABLE IF EXISTS CDS;
+SET FOREIGN_KEY_CHECKS = 0;
 ALTER TABLE COLUMNS_OLD 
   ADD CONSTRAINT `COLUMNS_FK1` FOREIGN KEY (`SD_ID`) REFERENCES 
`SDS`(`SD_ID`)
 ;
+SET FOREIGN_KEY_CHECKS = 1;
 RENAME TABLE COLUMNS_OLD TO COLUMNS;
 
   END $$
@@ -214,8 +216,10 @@ CREATE PROCEDURE CREATE_TABLE_SDS()
 CREATE PROCEDURE RENAME_OLD_COLUMNS()
   BEGIN
 RENAME TABLE `COLUMNS` TO `COLUMNS_OLD`;
+SET FOREIGN_KEY_CHECKS = 0;
 ALTER TABLE COLUMNS_OLD 
   DROP FOREIGN KEY `COLUMNS_FK1`;
+SET FOREIGN_KEY_CHECKS = 1;
   END $$
 
 /*




svn commit: r1380490 - in /hive/trunk: serde/ivy.xml shims/ivy.xml

2012-09-04 Thread cws
Author: cws
Date: Tue Sep  4 08:45:05 2012
New Revision: 1380490

URL: http://svn.apache.org/viewvc?rev=1380490view=rev
Log:
HIVE-3416 [jira] Fix TestAvroSerdeUtils.determineSchemaCanReadSchemaFromHDFS 
when running Hive on hadoop23
(Zhenxiao Luo via Carl Steinbach)

Summary:
HIVE-3416: Fix TestAvroSerdeUtils.determineSchemaCanReadSchemaFromHDFS when 
running Hive on hadoop23

TestAvroSerdeUtils determinSchemaCanReadSchemaFromHDFS is failing when running 
hive on hadoop23:

$ant very-clean package -Dhadoop.version=0.23.1 -Dhadoop-0.23.version=0.23.1 
-Dhadoop.mr.rev=23

$ant test -Dhadoop.version=0.23.1 -Dhadoop-0.23.version=0.23.1 
-Dhadoop.mr.rev=23 -Dtestcase=TestAvroSerdeUtils

 testcase classname=org.apache.hadoop.hive.serde2.avro.TestAvroSerdeUtils 
name=determineSchemaCanReadSchemaFromHDFS time=0.21
error message=org/apache/hadoop/net/StaticMapping 
type=java.lang.NoClassDefFoundErrorjava.lang.NoClassDefFoundError: 
org/apache/hadoop/net/StaticMapping
at 
org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:534)
at org.apache.hadoop.hdfs.MiniDFSCluster.init(MiniDFSCluster.java:489)
at org.apache.hadoop.hdfs.MiniDFSCluster.init(MiniDFSCluster.java:360)
at 
org.apache.hadoop.hive.serde2.avro.TestAvroSerdeUtils.determineSchemaCanReadSchemaFromHDFS(TestAvroSerdeUtils.java:187)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:616)
at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
at 
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
at 
org.junit.runners.BlockJUnit4ClassRunner.runNotIgnored(BlockJUnit4ClassRunner.java:79)
at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:71)
at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:49)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.net.StaticMapping
at java.net.URLClassLoader$1.run(URLClassLoader.java:217)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:205)
at java.lang.ClassLoader.loadClass(ClassLoader.java:321)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:294)
at java.lang.ClassLoader.loadClass(ClassLoader.java:266)
... 25 more
/error
  /testcase

Test Plan: EMPTY

Reviewers: JIRA

Differential Revision: https://reviews.facebook.net/D5025

Modified:
hive/trunk/serde/ivy.xml
hive/trunk/shims/ivy.xml

Modified: hive/trunk/serde/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/ivy.xml?rev=1380490r1=1380489r2=1380490view=diff
==
--- hive/trunk/serde/ivy.xml (original)
+++ hive/trunk/serde/ivy.xml Tue Sep  4 08:45:05 2012
@@ -14,7 +14,7 @@
See the License for the specific language governing permissions and
limitations under the License.
 --
-ivy-module version=2.0
+ivy-module version=2.0 xmlns:m=http://ant.apache.org/ivy/maven;
   info organisation=${hive.ivy.org} module=hive-serde 
revision=${version}
 license name=The Apache Software License, Version 2.0 
url=http://www.apache.org/licenses/LICENSE-2.0.txt; /
 description homepage=http://hive.apache.org;
@@ -51,6 +51,20 @@
 transitive=false/
 
 !-- Test Dependencies --
+dependency org=org.apache.hadoop name=hadoop-common
+rev=${hadoop.version.ant-internal}
+conf=hadoop23.test-default
+  artifact name=hadoop-common type=tests ext=jar 
m:classifier=tests/
+  exclude org=commons-daemon module=commons-daemon/!--bad POM--
+  exclude org=org.apache.commons module=commons-daemon/!--bad POM--
+/dependency

svn commit: r1378472 - /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java

2012-08-29 Thread cws
Author: cws
Date: Wed Aug 29 08:06:38 2012
New Revision: 1378472

URL: http://svn.apache.org/viewvc?rev=1378472view=rev
Log:
HIVE-3265. HiveHistory.printRowCount() throws NPE (Shreepadma Venugopalan via 
cws)

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java?rev=1378472r1=1378471r2=1378472view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/history/HiveHistory.java 
Wed Aug 29 08:06:38 2012
@@ -409,6 +409,9 @@ public class HiveHistory {
 
   public void printRowCount(String queryId) {
 QueryInfo ji = queryInfoMap.get(queryId);
+if (ji == null) {
+  return;
+}
 for (String tab : ji.rowCountMap.keySet()) {
   console.printInfo(ji.rowCountMap.get(tab) +  Rows loaded to  + tab);
 }
@@ -420,7 +423,6 @@ public class HiveHistory {
* @param queryId
*/
   public void endQuery(String queryId) {
-
 QueryInfo ji = queryInfoMap.get(queryId);
 if (ji == null) {
   return;




svn commit: r1377359 - in /hive/trunk: build-common.xml build.properties common/build.xml

2012-08-25 Thread cws
Author: cws
Date: Sat Aug 25 21:56:27 2012
New Revision: 1377359

URL: http://svn.apache.org/viewvc?rev=1377359view=rev
Log:
HIVE-3345. Add junit exclude utility to disable testcases (Zhenxiao Luo via cws)

Modified:
hive/trunk/build-common.xml
hive/trunk/build.properties
hive/trunk/common/build.xml

Modified: hive/trunk/build-common.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1377359r1=1377358r2=1377359view=diff
==
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Sat Aug 25 21:56:27 2012
@@ -440,7 +440,7 @@
   batchtest todir=${test.build.dir} unless=testcase
 fileset dir=${test.build.classes}
  includes=**/${test.include}.class
- 
excludes=**/TestSerDe.class,**/TestHiveMetaStore.class,**/*$*.class /
+ 
excludes=**/TestSerDe.class,**/TestHiveMetaStore.class,**/*$*.class,${test.junit.exclude}
 /
   /batchtest
   batchtest todir=${test.build.dir} if=testcase
 fileset dir=${test.build.classes} includes=**/${testcase}.class/

Modified: hive/trunk/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1377359r1=1377358r2=1377359view=diff
==
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Sat Aug 25 21:56:27 2012
@@ -70,6 +70,15 @@ jsp.test.jar=${hadoop.root}/lib/jetty-ex
 common.jar=${hadoop.root}/lib/commons-httpclient-3.0.1.jar
 
 #
+# Test Properties
+#
+
+# Use this property to selectively disable tests from the command line:
+# ant test -Dtest.junit.exclude=**/TestCliDriver.class
+# ant test 
-Dtest.junit.exclude=**/Test*CliDriver.class,**/TestPartitions.class
+test.junit.exclude=
+
+#
 # Ivy Properties
 #
 build.ivy.dir=${build.dir.hive}/ivy

Modified: hive/trunk/common/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/build.xml?rev=1377359r1=1377358r2=1377359view=diff
==
--- hive/trunk/common/build.xml (original)
+++ hive/trunk/common/build.xml Sat Aug 25 21:56:27 2012
@@ -58,7 +58,7 @@ to call at top-level: ant deploy-contrib
   batchtest todir=${test.build.dir} unless=testcase
 fileset dir=${test.build.classes}
  includes=**/${test.include}.class
- excludes=**/*$*.class /
+ excludes=**/*$*.class,${test.junit.exclude} /
   /batchtest
   batchtest todir=${test.build.dir} if=testcase
 fileset dir=${test.build.classes} includes=**/${testcase}.class/




svn commit: r1376834 - in /hive/trunk/ql/src/test: queries/clientpositive/newline.q queries/clientpositive/timestamp_lazy.q results/clientpositive/newline.q.out results/clientpositive/timestamp_lazy.q

2012-08-24 Thread cws
Author: cws
Date: Fri Aug 24 06:21:42 2012
New Revision: 1376834

URL: http://svn.apache.org/viewvc?rev=1376834view=rev
Log:
HIVE-3240. Fix non-deterministic results in newline.q and timestamp_lazy.q 
(Zhenxiao Luo via cws)

Modified:
hive/trunk/ql/src/test/queries/clientpositive/newline.q
hive/trunk/ql/src/test/queries/clientpositive/timestamp_lazy.q
hive/trunk/ql/src/test/results/clientpositive/newline.q.out
hive/trunk/ql/src/test/results/clientpositive/timestamp_lazy.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/newline.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/newline.q?rev=1376834r1=1376833r2=1376834view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/newline.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/newline.q Fri Aug 24 06:21:42 
2012
@@ -6,7 +6,7 @@ insert overwrite table tmp_tmp
 SELECT TRANSFORM(key, value) USING
 'python newline.py' AS key, value FROM src limit 6;
 
-select * from tmp_tmp;
+select * from tmp_tmp ORDER BY key ASC, value ASC;
 
 drop table tmp_tmp;
 
@@ -20,7 +20,7 @@ insert overwrite table tmp_tmp
 SELECT TRANSFORM(key, value) USING
 'python escapednewline.py' AS key, value FROM src limit 5;
 
-select * from tmp_tmp;
+select * from tmp_tmp ORDER BY key ASC, value ASC;
 
 SELECT TRANSFORM(key, value) USING
 'cat' AS (key, value) FROM tmp_tmp;
@@ -29,7 +29,7 @@ insert overwrite table tmp_tmp
 SELECT TRANSFORM(key, value) USING
 'python escapedcarriagereturn.py' AS key, value FROM src limit 5;
 
-select * from tmp_tmp;
+select * from tmp_tmp ORDER BY key ASC, value ASC;
 
 SELECT TRANSFORM(key, value) USING
 'cat' AS (key, value) FROM tmp_tmp;
@@ -38,7 +38,7 @@ insert overwrite table tmp_tmp
 SELECT TRANSFORM(key, value) USING
 'python escapedtab.py' AS key, value FROM src limit 5;
 
-select * from tmp_tmp;
+select * from tmp_tmp ORDER BY key ASC, value ASC;
 
 SELECT TRANSFORM(key, value) USING
 'cat' AS (key, value) FROM tmp_tmp;
@@ -47,11 +47,11 @@ insert overwrite table tmp_tmp
 SELECT TRANSFORM(key, value) USING
 'python doubleescapedtab.py' AS key, value FROM src limit 5;
 
-select * from tmp_tmp;
+select * from tmp_tmp ORDER BY key ASC, value ASC;
 
 SELECT TRANSFORM(key, value) USING
 'cat' AS (key, value) FROM tmp_tmp;
 
-SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) 
FROM src limit 1)a;
+SELECT key FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, 
value) FROM src limit 1)a ORDER BY key ASC;
 
-SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING 'cat' AS (key, value) 
FROM src limit 1)a;
+SELECT value FROM (SELECT TRANSFORM ('a\tb', 'c') USING '/bin/cat' AS (key, 
value) FROM src limit 1)a ORDER BY value ASC;

Modified: hive/trunk/ql/src/test/queries/clientpositive/timestamp_lazy.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/timestamp_lazy.q?rev=1376834r1=1376833r2=1376834view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/timestamp_lazy.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/timestamp_lazy.q Fri Aug 24 
06:21:42 2012
@@ -2,5 +2,5 @@ drop table timestamp_lazy;
 create table timestamp_lazy (t timestamp, key string, value string);
 insert overwrite table timestamp_lazy select cast('2011-01-01 01:01:01' as 
timestamp), key, value from src limit 5;
 
-select t,key,value from timestamp_lazy;
-select t,key,value from timestamp_lazy distribute by t;
\ No newline at end of file
+select t,key,value from timestamp_lazy ORDER BY key ASC, value ASC;
+select t,key,value from timestamp_lazy distribute by t sort by key ASC, value 
ASC;

Modified: hive/trunk/ql/src/test/results/clientpositive/newline.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/newline.q.out?rev=1376834r1=1376833r2=1376834view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/newline.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/newline.q.out Fri Aug 24 
06:21:42 2012
@@ -17,26 +17,26 @@ POSTHOOK: Input: default@src
 POSTHOOK: Output: default@tmp_tmp
 POSTHOOK: Lineage: tmp_tmp.key SCRIPT [(src)src.FieldSchema(name:key, 
type:string, comment:default), (src)src.FieldSchema(name:value, type:string, 
comment:default), ]
 POSTHOOK: Lineage: tmp_tmp.value SCRIPT [(src)src.FieldSchema(name:key, 
type:string, comment:default), (src)src.FieldSchema(name:value, type:string, 
comment:default), ]
-PREHOOK: query: select * from tmp_tmp
+PREHOOK: query: select * from tmp_tmp ORDER BY key ASC, value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tmp_tmp
  A masked pattern was here 
-POSTHOOK: query: select * from tmp_tmp
+POSTHOOK: query: select * from tmp_tmp ORDER BY key ASC, value ASC
 POSTHOOK: type: QUERY

svn commit: r1374101 - /hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java

2012-08-16 Thread cws
Author: cws
Date: Thu Aug 16 23:37:06 2012
New Revision: 1374101

URL: http://svn.apache.org/viewvc?rev=1374101view=rev
Log:
HIVE-3029. Update ShimLoader to work with Hadoop 2.x (Carl Steinbach via cws)

Modified:

hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java

Modified: 
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java?rev=1374101r1=1374100r2=1374101view=diff
==
--- 
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java 
(original)
+++ 
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java 
Thu Aug 16 23:37:06 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.shims;
 
+import java.lang.IllegalArgumentException;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -104,9 +105,12 @@ public abstract class ShimLoader {
   }
 
   /**
-   * Return the major version of Hadoop currently on the classpath.
-   * This is simply the first two components of the version number
-   * (e.g 0.18 or 0.20)
+   * Return the major version of Hadoop currently on the classpath.
+   * For releases in the 0.x series this is simply the first two
+   * components of the version, e.g. 0.20 or 0.23. Releases in
+   * the 1.x and 2.x series are mapped to the appropriate
+   * 0.x release series, e.g. 1.x is mapped to 0.20S and 2.x
+   * is mapped to 0.23.
*/
   public static String getMajorVersion() {
 String vers = VersionInfo.getVersion();
@@ -116,9 +120,19 @@ public abstract class ShimLoader {
   throw new RuntimeException(Illegal Hadoop Version:  + vers +
(expected A.B.* format));
 }
-if (Integer.parseInt(parts[0])  0){
+
+// Special handling for Hadoop 1.x and 2.x
+switch (Integer.parseInt(parts[0])) {
+case 0:
+  break;
+case 1:
   return 0.20S;
+case 2:
+  return 0.23;
+default:
+  throw new IllegalArgumentException(Unrecognized Hadoop major version 
number:  + vers);
 }
+
 String majorVersion = parts[0] + . + parts[1];
 
 // If we are running a security release, we won't have 
UnixUserGroupInformation




svn commit: r1373145 - in /hive/trunk: ./ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/errors/ ql/src/test/org/apache/hadoop/hive/ql/udf/generic/ ql/src/test/

2012-08-14 Thread cws
Author: cws
Date: Tue Aug 14 22:21:37 2012
New Revision: 1373145

URL: http://svn.apache.org/viewvc?rev=1373145view=rev
Log:
HIVE-2804. Task log retrieval fails on Hadoop 0.23 (Zhenxiao Luo via cws)

Added:

hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEvaluateNPE.java
   (with props)
hive/trunk/ql/src/test/queries/clientnegative/cluster_tasklog_retrieval.q

hive/trunk/ql/src/test/results/clientnegative/cluster_tasklog_retrieval.q.out
Modified:
hive/trunk/build-common.xml

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/errors/TaskLogProcessor.java

hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java

hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java

hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java

hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/build-common.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1373145r1=1373144r2=1373145view=diff
==
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Tue Aug 14 22:21:37 2012
@@ -59,7 +59,7 @@
   property name=test.junit.output.format value=xml/
   property name=test.junit.output.usefile value=true/
   property name=minimr.query.files 
value=input16_cc.q,scriptfile1.q,bucket4.q,bucketmapjoin6.q,disable_merge_for_bucketing.q,reduce_deduplicate.q,smb_mapjoin_8.q,join1.q,groupby2.q,bucketizedhiveinputformat.q/
-  property name=minimr.query.negative.files 
value=minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q
 /
+  property name=minimr.query.negative.files 
value=cluster_tasklog_retrieval.q,minimr_broken_pipe.q,mapreduce_stack_trace.q,mapreduce_stack_trace_turnoff.q,mapreduce_stack_trace_hadoop20.q,mapreduce_stack_trace_turnoff_hadoop20.q
 /
   property name=test.silent value=true/
   property name=hadoopVersion value=${hadoop.version.ant-internal}/
   property name=test.serialize.qplan value=false/

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1373145r1=1373144r2=1373145view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java 
Tue Aug 14 22:21:37 2012
@@ -20,6 +20,8 @@ package org.apache.hadoop.hive.ql.exec;
 
 import java.io.IOException;
 import java.io.Serializable;
+import java.lang.Exception;
+import java.net.MalformedURLException;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Calendar;
@@ -106,10 +108,6 @@ public class HadoopJobExecHelper {
 return Ended Job =  + jobId;
   }
 
-  private String getTaskAttemptLogUrl(String taskTrackerHttpAddress, String 
taskAttemptId) {
-return taskTrackerHttpAddress + /tasklog?taskid= + taskAttemptId + 
all=true;
-  }
-
   public boolean mapStarted() {
 return mapProgress  0;
   }
@@ -495,7 +493,8 @@ public class HadoopJobExecHelper {
   }
 
   @SuppressWarnings(deprecation)
-  private void showJobFailDebugInfo(JobConf conf, RunningJob rj) throws 
IOException {
+  private void showJobFailDebugInfo(JobConf conf, RunningJob rj)
+throws IOException, MalformedURLException {
 // Mapping from task ID to the number of failures
 MapString, Integer failures = new HashMapString, Integer();
 // Successful task ID's
@@ -544,7 +543,11 @@ public class HadoopJobExecHelper {
 }
 // These tasks should have come from the same job.
 assert (ti.getJobId() != null  ti.getJobId().equals(jobId));
-ti.getLogUrls().add(getTaskAttemptLogUrl(t.getTaskTrackerHttp(), 
t.getTaskId()));
+String taskAttemptLogUrl = 
ShimLoader.getHadoopShims().getTaskAttemptLogUrl(
+  conf, t.getTaskTrackerHttp(), t.getTaskId());
+if (taskAttemptLogUrl != null) {
+  ti.getLogUrls().add(taskAttemptLogUrl);
+}
 
 // If a task failed, then keep track of the total number of failures
 // for that task (typically, a task gets re-run up to 4 times if it

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobDebugger.java?rev=1373145r1=1373144r2=1373145view=diff
==
--- hive/trunk/ql/src

svn commit: r1373218 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ conf/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientposi

2012-08-14 Thread cws
Author: cws
Date: Wed Aug 15 05:34:45 2012
New Revision: 1373218

URL: http://svn.apache.org/viewvc?rev=1373218view=rev
Log:
HIVE-3337. Create Table Like should copy configured Table Parameters (Bhushan 
Mandhani via cws)

Added:
hive/trunk/ql/src/test/queries/clientpositive/create_like2.q
hive/trunk/ql/src/test/results/clientpositive/create_like2.q.out
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
hive/trunk/conf/hive-default.xml.template
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1373218r1=1373217r2=1373218view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed 
Aug 15 05:34:45 2012
@@ -311,6 +311,8 @@ public class HiveConf extends Configurat
 
 // Default parameters for creating tables
 NEWTABLEDEFAULTPARA(hive.table.parameters.default, ),
+// Parameters to copy over when creating a table with Create Table Like.
+
DDL_CTL_PARAMETERS_WHITELIST(hive.ddl.createtablelike.properties.whitelist, 
),
 METASTORE_RAW_STORE_IMPL(hive.metastore.rawstore.impl,
 org.apache.hadoop.hive.metastore.ObjectStore),
 METASTORE_CONNECTION_DRIVER(javax.jdo.option.ConnectionDriverName,

Modified: hive/trunk/conf/hive-default.xml.template
URL: 
http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml.template?rev=1373218r1=1373217r2=1373218view=diff
==
--- hive/trunk/conf/hive-default.xml.template (original)
+++ hive/trunk/conf/hive-default.xml.template Wed Aug 15 05:34:45 2012
@@ -1104,6 +1104,12 @@
 /property
 
 property
+  namehive.ddl.createtablelike.properties.whitelist/name
+  value/value
+  descriptionTable Properties to copy over when executing a Create Table 
Like./description
+/property
+
+property
   namehive.variable.substitute/name
   valuetrue/value
   descriptionThis enables substitution using syntax like ${var} 
${system:var} and ${env:var}./description

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1373218r1=1373217r2=1373218view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Aug 
15 05:34:45 2012
@@ -32,6 +32,7 @@ import java.io.Writer;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.Date;
@@ -3453,9 +3454,15 @@ public class DDLTask extends TaskDDLWor
 tbl.unsetDataLocation();
   }
 
-  // we should reset table specific parameters including (stats, 
lastDDLTime etc.)
   MapString, String params = tbl.getParameters();
-  params.clear();
+  // We should copy only those table parameters that are specified in the 
config.
+  String paramsStr = HiveConf.getVar(conf, 
HiveConf.ConfVars.DDL_CTL_PARAMETERS_WHITELIST);
+  if (paramsStr != null) {
+ListString paramsList = Arrays.asList(paramsStr.split(,));
+params.keySet().retainAll(paramsList);
+  } else {
+params.clear();
+  }
 
   if (crtTbl.isExternal()) {
 tbl.setProperty(EXTERNAL, TRUE);

Added: hive/trunk/ql/src/test/queries/clientpositive/create_like2.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/create_like2.q?rev=1373218view=auto
==
--- hive/trunk/ql/src/test/queries/clientpositive/create_like2.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/create_like2.q Wed Aug 15 
05:34:45 2012
@@ -0,0 +1,9 @@
+-- Tests the copying over of Table Parameters according to a HiveConf setting
+-- when doing a CREATE TABLE LIKE.
+
+CREATE TABLE table1(a INT, b STRING);
+ALTER TABLE table1 SET TBLPROPERTIES ('a'='1', 'b'='2', 'c'='3', 'd' = '4');
+
+SET hive.ddl.createtablelike.properties.whitelist=a,c,D;
+CREATE TABLE table2 LIKE table1;
+DESC FORMATTED table2;

Added: hive/trunk/ql/src/test/results/clientpositive/create_like2.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/create_like2.q.out?rev=1373218view=auto
==
--- hive/trunk/ql/src/test/results/clientpositive/create_like2.q.out (added)
+++ hive/trunk/ql/src/test/results

svn commit: r1370982 - in /hive/trunk/ql/src/test: queries/clientpositive/query_properties.q results/clientpositive/query_properties.q.out

2012-08-08 Thread cws
Author: cws
Date: Wed Aug  8 21:30:17 2012
New Revision: 1370982

URL: http://svn.apache.org/viewvc?rev=1370982view=rev
Log:
HIVE-2789. query_properties.q contains non-deterministic queries (Zhenxiao Luo 
via cws)

Modified:
hive/trunk/ql/src/test/queries/clientpositive/query_properties.q
hive/trunk/ql/src/test/results/clientpositive/query_properties.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/query_properties.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/query_properties.q?rev=1370982r1=1370981r2=1370982view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/query_properties.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/query_properties.q Wed Aug  8 
21:30:17 2012
@@ -1,14 +1,15 @@
 set hive.exec.post.hooks = 
org.apache.hadoop.hive.ql.hooks.CheckQueryPropertiesHook;
 
-select * from src a join src b on a.key = b.key limit 1;
-select * from src group by src.key, src.value limit 1;
-select * from src order by src.key limit 1;
-select * from src sort by src.key limit 1;
-select a.key, sum(b.value) from src a join src b on a.key = b.key group by 
a.key limit 1;
-select transform(*) using 'cat' from src limit 1;
-select * from src distribute by src.key limit 1;
-select * from src cluster by src.key limit 1;
+select * from src a join src b on a.key = b.key limit 0;
+select * from src group by src.key, src.value limit 0;
+select * from src order by src.key limit 0;
+select * from src sort by src.key limit 0;
+select a.key, sum(b.value) from src a join src b on a.key = b.key group by 
a.key limit 0;
+select transform(*) using 'cat' from src limit 0;
+select * from src distribute by src.key limit 0;
+select * from src cluster by src.key limit 0;
+
+select key, sum(value) from (select a.key as key, b.value as value from src a 
join src b on a.key = b.key) c group by key limit 0;
+select * from src a join src b on a.key = b.key order by a.key limit 0;
+select * from src a join src b on a.key = b.key distribute by a.key sort by 
a.key, b.value limit 0;
 
-select key, sum(value) from (select a.key as key, b.value as value from src a 
join src b on a.key = b.key) c group by key limit 1;
-select * from src a join src b on a.key = b.key order by a.key limit 1;
-select * from src a join src b on a.key = b.key distribute by a.key sort by 
a.key, b.value limit 1;
\ No newline at end of file

Modified: hive/trunk/ql/src/test/results/clientpositive/query_properties.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/query_properties.q.out?rev=1370982r1=1370981r2=1370982view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/query_properties.q.out 
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/query_properties.q.out Wed 
Aug  8 21:30:17 2012
@@ -1,4 +1,4 @@
-PREHOOK: query: select * from src a join src b on a.key = b.key limit 1
+PREHOOK: query: select * from src a join src b on a.key = b.key limit 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
  A masked pattern was here 
@@ -10,8 +10,7 @@ Has Group By After Join: false
 Uses Script: false
 Has Distribute By: false
 Has Cluster By: false
-0  val_0   0   val_0
-PREHOOK: query: select * from src group by src.key, src.value limit 1
+PREHOOK: query: select * from src group by src.key, src.value limit 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
  A masked pattern was here 
@@ -23,8 +22,7 @@ Has Group By After Join: false
 Uses Script: false
 Has Distribute By: false
 Has Cluster By: false
-0  val_0
-PREHOOK: query: select * from src order by src.key limit 1
+PREHOOK: query: select * from src order by src.key limit 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
  A masked pattern was here 
@@ -36,8 +34,7 @@ Has Group By After Join: false
 Uses Script: false
 Has Distribute By: false
 Has Cluster By: false
-0  val_0
-PREHOOK: query: select * from src sort by src.key limit 1
+PREHOOK: query: select * from src sort by src.key limit 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
  A masked pattern was here 
@@ -49,8 +46,7 @@ Has Group By After Join: false
 Uses Script: false
 Has Distribute By: false
 Has Cluster By: false
-0  val_0
-PREHOOK: query: select a.key, sum(b.value) from src a join src b on a.key = 
b.key group by a.key limit 1
+PREHOOK: query: select a.key, sum(b.value) from src a join src b on a.key = 
b.key group by a.key limit 0
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
  A masked pattern was here 
@@ -62,8 +58,7 @@ Has Group By After Join: true
 Uses Script: false
 Has Distribute By: false
 Has Cluster By: false
-0  0.0
-PREHOOK: query: select transform(*) using 'cat' from src limit 1
+PREHOOK: query: select transform(*) using 'cat' from src limit 0
 PREHOOK: type: QUERY
 PREHOOK

svn commit: r1367708 - /hive/trunk/ql/build.xml

2012-07-31 Thread cws
Author: cws
Date: Tue Jul 31 19:15:06 2012
New Revision: 1367708

URL: http://svn.apache.org/viewvc?rev=1367708view=rev
Log:
HIVE-3273 [jira] Add avro jars into hive execution classpath
(Zhenxiao Luo via Carl Steinbach)

Summary:
HIVE-3273: Add avro jars into hive execution classpath

avro*.jar should be added to hive execution classpath

Test Plan: EMPTY

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D4209

Modified:
hive/trunk/ql/build.xml

Modified: hive/trunk/ql/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/build.xml?rev=1367708r1=1367707r2=1367708view=diff
==
--- hive/trunk/ql/build.xml (original)
+++ hive/trunk/ql/build.xml Tue Jul 31 19:15:06 2012
@@ -195,6 +195,18 @@
 exclude name=meta-inf/manifest.mf/
   /patternset
 /unzip
+unzip src=${build.ivy.lib.dir}/default/avro-${avro.version}.jar 
dest=${build.dir.hive}/avro/classes
+  patternset
+exclude name=META-INF/
+exclude name=META-INF/MANIFEST.MF/
+  /patternset
+/unzip
+unzip src=${build.ivy.lib.dir}/default/avro-mapred-${avro.version}.jar 
dest=${build.dir.hive}/avro-mapred/classes
+  patternset
+exclude name=META-INF/
+exclude name=META-INF/MANIFEST.MF/
+  /patternset
+/unzip
 !-- jar jarfile=${build.dir}/hive_${name}.jar 
basedir=${build.classes} / --
 jar jarfile=${build.dir}/hive-exec-${version}.jar
   fileset dir=${build.dir.hive}/common/classes includes=**/*.class/
@@ -203,6 +215,8 @@
   fileset dir=${build.dir.hive}/thrift/classes includes=**/*.class/
   fileset dir=${build.dir.hive}/commons-lang/classes 
includes=**/StringUtils.class,**/WordUtils.class/
   fileset dir=${build.dir.hive}/json/classes includes=**/*.class/
+  fileset dir=${build.dir.hive}/avro/classes includes=**/*.class/
+  fileset dir=${build.dir.hive}/avro-mapred/classes 
includes=**/*.class/
   fileset dir=${build.dir.hive}/shims/classes includes=**/*.class/
   fileset dir=${build.dir.hive}/javaewah/classes includes=**/*.class/
   manifest




svn commit: r1365371 - /hive/trunk/ivy/libraries.properties

2012-07-24 Thread cws
Author: cws
Date: Wed Jul 25 00:50:22 2012
New Revision: 1365371

URL: http://svn.apache.org/viewvc?rev=1365371view=rev
Log:
HIVE-3249 [jira] Upgrade guava to 11.0.2
(Zhenxiao Luo via Carl Steinbach)

Summary:
HIVE-3249: Upgrade guava to 11.0.2

Hadoop has upgraded to this new version of Guava. We should, too, so we don't 
have compatibility issues running on Hadoop 2.0+

currently, hive is using guava-r09.jar
we should update it to guava-11.0.2.jar

Test Plan: EMPTY

Reviewers: JIRA

Differential Revision: https://reviews.facebook.net/D4083

Modified:
hive/trunk/ivy/libraries.properties

Modified: hive/trunk/ivy/libraries.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/ivy/libraries.properties?rev=1365371r1=1365370r2=1365371view=diff
==
--- hive/trunk/ivy/libraries.properties (original)
+++ hive/trunk/ivy/libraries.properties Wed Jul 25 00:50:22 2012
@@ -42,7 +42,7 @@ commons-logging.version=1.0.4
 commons-logging-api.version=1.0.4
 commons-pool.version=1.5.4
 derby.version=10.4.2.0
-guava.version=r09
+guava.version=11.0.2
 hbase.version=0.92.0
 jackson.version=1.8.8
 javaewah.version=0.3.2




svn commit: r1356516 - in /hive/trunk: ./ bin/ext/ eclipse-templates/ ivy/ jdbc/ jdbc/src/java/org/apache/hive/ jdbc/src/java/org/apache/hive/jdbc/ jdbc/src/java/org/apache/hive/jdbc/beeline/

2012-07-02 Thread cws
Author: cws
Date: Mon Jul  2 23:07:07 2012
New Revision: 1356516

URL: http://svn.apache.org/viewvc?rev=1356516view=rev
Log:
HIVE-3100. Add HiveCLI that runs over JDBC (Prasad Mujumdar via cws)

Added:
hive/trunk/bin/ext/beeline.sh   (with props)
hive/trunk/eclipse-templates/HiveBeeLine.launchtemplate
hive/trunk/jdbc/src/java/org/apache/hive/
hive/trunk/jdbc/src/java/org/apache/hive/jdbc/
hive/trunk/jdbc/src/java/org/apache/hive/jdbc/beeline/
hive/trunk/jdbc/src/java/org/apache/hive/jdbc/beeline/HiveBeeline.java   
(with props)
hive/trunk/jdbc/src/java/org/apache/hive/jdbc/beeline/OptionsProcessor.java 
  (with props)
Modified:
hive/trunk/LICENSE
hive/trunk/NOTICE
hive/trunk/eclipse-templates/.classpath
hive/trunk/ivy/ivysettings.xml
hive/trunk/ivy/libraries.properties
hive/trunk/jdbc/ivy.xml

Modified: hive/trunk/LICENSE
URL: 
http://svn.apache.org/viewvc/hive/trunk/LICENSE?rev=1356516r1=1356515r2=1356516view=diff
==
--- hive/trunk/LICENSE (original)
+++ hive/trunk/LICENSE Mon Jul  2 23:07:07 2012
@@ -416,6 +416,21 @@ LIABILITY, OR TORT (INCLUDING NEGLIGENCE
 IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
 OF THE POSSIBILITY OF SUCH DAMAGE.
 
+For the SQLLine package:
+
+Copyright (c) 2002, 2003, 2004, 2005 Marc Prud'hommeaux
+
+From: http://sqlline.sourceforge.net/#license
+
+SQLLine is distributed under the BSD License, meaning that you are free to 
redistribute, modify, or sell the software with almost no restrictions.
+
+Statement from Marc Prud'hommeaux regarding inconsistent licenses in some 
SQLLine source files:
+
+ SQLLine was once GPL, but it was changed to be BSD a few years back.
+ Any references to the GPL are vestigial. Hopefully the license
+ declaration at http://sqlline.sourceforge.net/#license is sufficiently
+ authoritative in this regard.
+
 
 For the SLF4J library:
 
@@ -465,4 +480,4 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE F
 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 THE SOFTWARE.
- 
\ No newline at end of file
+ 

Modified: hive/trunk/NOTICE
URL: 
http://svn.apache.org/viewvc/hive/trunk/NOTICE?rev=1356516r1=1356515r2=1356516view=diff
==
--- hive/trunk/NOTICE (original)
+++ hive/trunk/NOTICE Mon Jul  2 23:07:07 2012
@@ -22,6 +22,9 @@ Copyright (c) 2002 JSON.org
 This product includes/uses JLine (http://jline.sourceforge.net/),
 Copyright (c) 2002-2006, Marc Prud'hommeaux m...@cornell.edu.
 
+This product includes/uses SQLLine (http://sqlline.sourceforge.net),
+Copyright (c) 2002, 2003, 2004, 2005 Marc Prud'hommeaux m...@cornell.edu.
+
 This product includes/uses SLF4J (http://www.slf4j.org/),
 Copyright (c) 2004-2008 QOS.ch
 
@@ -29,4 +32,4 @@ This product includes/uses Bootstrap (ht
 Copyright (c) 2012 Twitter, Inc.
 
 This product includes/uses Glyphicons (http://glyphicons.com/),
-Copyright (c) 2010 - 2012 Jan Kovarík
\ No newline at end of file
+Copyright (c) 2010 - 2012 Jan Kovarík

Added: hive/trunk/bin/ext/beeline.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/bin/ext/beeline.sh?rev=1356516view=auto
==
--- hive/trunk/bin/ext/beeline.sh (added)
+++ hive/trunk/bin/ext/beeline.sh Mon Jul  2 23:07:07 2012
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the License); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an AS IS BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Need arguments [host [port [db]]]
+THISSERVICE=beeline
+export SERVICE_LIST=${SERVICE_LIST}${THISSERVICE} 
+
+beeline () {
+
+  CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+  execHiveCmd $CLASS $@
+}
+
+beeline_help () {
+  CLASS=org.apache.hive.jdbc.beeline.HiveBeeline;
+  execHiveCmd $CLASS --help
+} 
+

Propchange: hive/trunk/bin/ext/beeline.sh
--
svn:eol-style = native

Modified: hive/trunk/eclipse-templates/.classpath
URL: 
http://svn.apache.org/viewvc/hive/trunk/eclipse-templates/.classpath?rev=1356516r1=1356515r2=1356516view=diff

svn commit: r1353720 [1/3] - in /hive/trunk: ./ metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/ serde/src

2012-06-25 Thread cws
Author: cws
Date: Mon Jun 25 19:59:44 2012
New Revision: 1353720

URL: http://svn.apache.org/viewvc?rev=1353720view=rev
Log:
Hive thrift code doesnt generate quality hashCode()
(Liu Tim via Carl Steinbach)

Summary:

Hive thrift code doesnt generate quality hashCode(). Right now, it all has
@Override
public int hashCode() { return 0; }

It will cause all entries go to the same bucket if it's used as hash key.


Test Plan:
ant clean package test

pass 1946 tests

Reviewers: njain, kevinwilfong, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D3759

Modified:
hive/trunk/build-common.xml

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/AlreadyExistsException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ConfigValSecurityException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Constants.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Database.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/FieldSchema.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectPrivilege.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/HiveObjectRef.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/IndexAlreadyExistsException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidObjectException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidOperationException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/InvalidPartitionException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/MetaException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NoSuchObjectException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Order.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrincipalPrivilegeSet.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeBag.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/PrivilegeGrantInfo.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Role.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/SerDeInfo.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/StorageDescriptor.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Type.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownDBException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownPartitionException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/UnknownTableException.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Version.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Adjacency.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Graph.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Query.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/QueryPlan.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Stage.java

hive/trunk/ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Task.java

hive/trunk/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/Constants.java

hive/trunk/service/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/service/HiveClusterStatus.java

hive/trunk/service/src

svn commit: r1352719 - in /hive/trunk/ql/src/test: queries/clientpositive/autogen_colalias.q results/clientpositive/autogen_colalias.q.out

2012-06-21 Thread cws
Author: cws
Date: Thu Jun 21 22:39:30 2012
New Revision: 1352719

URL: http://svn.apache.org/viewvc?rev=1352719view=rev
Log:
HIVE-3161. A minor test update
(Namit Jain via Carl Steinbach)

Summary: The correct long term fix is HIVE-3160

Test Plan: manual

Differential Revision: https://reviews.facebook.net/D3723

Modified:
hive/trunk/ql/src/test/queries/clientpositive/autogen_colalias.q
hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out

Modified: hive/trunk/ql/src/test/queries/clientpositive/autogen_colalias.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/autogen_colalias.q?rev=1352719r1=1352718r2=1352719view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/autogen_colalias.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/autogen_colalias.q Thu Jun 21 
22:39:30 2012
@@ -20,3 +20,6 @@ describe dest_grouped_new1;
 
 create table dest_grouped_new2 as select distinct src.key from src;
 describe dest_grouped_new2;
+
+-- Drop the temporary function at the end till HIVE-3160 gets fixed
+DROP TEMPORARY FUNCTION test_max;

Modified: hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out?rev=1352719r1=1352718r2=1352719view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out 
(original)
+++ hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out Thu 
Jun 21 22:39:30 2012
@@ -80,3 +80,9 @@ PREHOOK: type: DESCTABLE
 POSTHOOK: query: describe dest_grouped_new2
 POSTHOOK: type: DESCTABLE
 keystring  
+PREHOOK: query: -- Drop the temporary function at the end till HIVE-3160 gets 
fixed
+DROP TEMPORARY FUNCTION test_max
+PREHOOK: type: DROPFUNCTION
+POSTHOOK: query: -- Drop the temporary function at the end till HIVE-3160 gets 
fixed
+DROP TEMPORARY FUNCTION test_max
+POSTHOOK: type: DROPFUNCTION




svn commit: r1351859 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/metadata/Partition.java java/org/apache/hadoop/hive/ql/metadata/Table.java test/queries/clientpositive/protectmode2.q test/

2012-06-19 Thread cws
Author: cws
Date: Tue Jun 19 21:14:09 2012
New Revision: 1351859

URL: http://svn.apache.org/viewvc?rev=1351859view=rev
Log:
Fixing bug in removing ProtectMode from a Table
(Bhushan Mandhani via Carl Steinbach)

Summary:
When ProtectMode has every member set to the default false value, the 
toString() method returns null. When this happens, we should realize that the 
PROTECT_MODE parameter is
no longer needed and should be removed for the Table or Partition. Currently, 
we try to persist this null value and get an error.

Test Plan: Ran Hive. Checked that alter table some_table disable NO_DROP 
CASCADE successfully removed the PROTECT_MODE parameter and did not give any 
error. Running unit tests.

Reviewers: njain, kevinwilfong, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D3615

Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
hive/trunk/ql/src/test/queries/clientpositive/protectmode2.q
hive/trunk/ql/src/test/results/clientpositive/protectmode2.q.out

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java?rev=1351859r1=1351858r2=1351859view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Partition.java 
Tue Jun 19 21:14:09 2012
@@ -544,7 +544,12 @@ public class Partition implements Serial
*/
   public void setProtectMode(ProtectMode protectMode){
 MapString, String parameters = tPartition.getParameters();
-parameters.put(ProtectMode.PARAMETER_NAME, protectMode.toString());
+String pm = protectMode.toString();
+if (pm != null) {
+  parameters.put(ProtectMode.PARAMETER_NAME, pm);
+} else {
+  parameters.remove(ProtectMode.PARAMETER_NAME);
+}
 tPartition.setParameters(parameters);
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=1351859r1=1351858r2=1351859view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Tue 
Jun 19 21:14:09 2012
@@ -767,7 +767,12 @@ public class Table implements Serializab
*/
   public void setProtectMode(ProtectMode protectMode){
 MapString, String parameters = tTable.getParameters();
-parameters.put(ProtectMode.PARAMETER_NAME, protectMode.toString());
+String pm = protectMode.toString();
+if (pm != null) {
+  parameters.put(ProtectMode.PARAMETER_NAME, pm);
+} else {
+  parameters.remove(ProtectMode.PARAMETER_NAME);
+}
 tTable.setParameters(parameters);
   }
 

Modified: hive/trunk/ql/src/test/queries/clientpositive/protectmode2.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/protectmode2.q?rev=1351859r1=1351858r2=1351859view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/protectmode2.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/protectmode2.q Tue Jun 19 
21:14:09 2012
@@ -17,6 +17,7 @@ alter table tbl2 add partition (p='p1');
 alter table tbl2 add partition (p='p2');
 alter table tbl2 add partition (p='p3');
 alter table tbl2 enable no_drop cascade;
-desc extended tbl2;
+desc formatted tbl2;
 alter table tbl2 disable no_drop cascade;
+desc formatted tbl2;
 drop table tbl2;

Modified: hive/trunk/ql/src/test/results/clientpositive/protectmode2.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/protectmode2.q.out?rev=1351859r1=1351858r2=1351859view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/protectmode2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/protectmode2.q.out Tue Jun 19 
21:14:09 2012
@@ -106,14 +106,40 @@ POSTHOOK: query: alter table tbl2 enable
 POSTHOOK: type: ALTERTABLE_PROTECTMODE
 POSTHOOK: Input: default@tbl2
 POSTHOOK: Output: default@tbl2
-PREHOOK: query: desc extended tbl2
+PREHOOK: query: desc formatted tbl2
 PREHOOK: type: DESCTABLE
-POSTHOOK: query: desc extended tbl2
+POSTHOOK: query: desc formatted tbl2
 POSTHOOK: type: DESCTABLE
-colstring  
-p  string  
+# col_name data_type   comment 
 
+colstring  None
+
+# Partition Information 
+# col_name

svn commit: r1351467 - in /hive/trunk: cli/ivy.xml contrib/ivy.xml hbase-handler/ivy.xml hwi/ivy.xml jdbc/ivy.xml metastore/ivy.xml pdk/ivy.xml ql/ivy.xml serde/ivy.xml service/ivy.xml shims/ivy.xml

2012-06-18 Thread cws
Author: cws
Date: Mon Jun 18 19:54:03 2012
New Revision: 1351467

URL: http://svn.apache.org/viewvc?rev=1351467view=rev
Log:
HIVE-3019 [jira] Add JUnit to list of test dependencies managed by Ivy

Summary: HIVE-3019. Add JUnit to list of test dependencies managed by Ivy

Test Plan: EMPTY

Reviewers: JIRA, ashutoshc

Reviewed By: ashutoshc

Differential Revision: https://reviews.facebook.net/D3171

Modified:
hive/trunk/cli/ivy.xml
hive/trunk/contrib/ivy.xml
hive/trunk/hbase-handler/ivy.xml
hive/trunk/hwi/ivy.xml
hive/trunk/jdbc/ivy.xml
hive/trunk/metastore/ivy.xml
hive/trunk/pdk/ivy.xml
hive/trunk/ql/ivy.xml
hive/trunk/serde/ivy.xml
hive/trunk/service/ivy.xml
hive/trunk/shims/ivy.xml

Modified: hive/trunk/cli/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/cli/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/cli/ivy.xml (original)
+++ hive/trunk/cli/ivy.xml Mon Jun 18 19:54:03 2012
@@ -48,6 +48,9 @@
 conf=runtime transitive=false/
 
 !-- Test Dependencies --
-dependency org=org.mockito name=mockito-all 
rev=${mockito-all.version} /
+dependency org=junit name=junit
+rev=${junit.version} conf=test-default /
+dependency org=org.mockito name=mockito-all
+rev=${mockito-all.version} conf=test-default /
   /dependencies
 /ivy-module

Modified: hive/trunk/contrib/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/contrib/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/contrib/ivy.xml (original)
+++ hive/trunk/contrib/ivy.xml Mon Jun 18 19:54:03 2012
@@ -36,5 +36,9 @@
 transitive=false/
 dependency org=commons-codec name=commons-codec 
rev=${commons-codec.version}
 transitive=false/
+
+!-- Test Dependencies --
+dependency org=junit name=junit rev=${junit.version} 
conf=test-default /
+
   /dependencies
 /ivy-module

Modified: hive/trunk/hbase-handler/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/hbase-handler/ivy.xml (original)
+++ hive/trunk/hbase-handler/ivy.xml Mon Jun 18 19:54:03 2012
@@ -46,5 +46,9 @@
 transitive=false/
 dependency org=org.codehaus.jackson name=jackson-jaxrs 
rev=${jackson.version}/
 dependency org=org.codehaus.jackson name=jackson-xc 
rev=${jackson.version}/
+
+!-- Test Dependencies --
+dependency org=junit name=junit rev=${junit.version} 
conf=test-default /
+
   /dependencies
 /ivy-module

Modified: hive/trunk/hwi/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/hwi/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/hwi/ivy.xml (original)
+++ hive/trunk/hwi/ivy.xml Mon Jun 18 19:54:03 2012
@@ -33,6 +33,9 @@
 transitive=false/
 dependency org=commons-logging name=commons-logging-api 
rev=${commons-logging-api.version}
 transitive=false/
+
+!-- Test Dependencies --
+dependency org=junit name=junit rev=${junit.version} 
conf=test-default /
 dependency org=org.apache.hive name=hive-builtins rev=${version}
 conf=test-default transitive=false/
 dependency org=commons-httpclient name=commons-httpclient 
rev=${commons-httpclient.version}

Modified: hive/trunk/jdbc/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/jdbc/ivy.xml (original)
+++ hive/trunk/jdbc/ivy.xml Mon Jun 18 19:54:03 2012
@@ -33,5 +33,9 @@
 transitive=false/
 dependency org=commons-logging name=commons-logging 
rev=${commons-logging.version}
 transitive=false/
+
+!-- Test Dependencies --
+dependency org=junit name=junit rev=${junit.version} 
conf=test-default /
+
   /dependencies
 /ivy-module

Modified: hive/trunk/metastore/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/ivy.xml?rev=1351467r1=1351466r2=1351467view=diff
==
--- hive/trunk/metastore/ivy.xml (original)
+++ hive/trunk/metastore/ivy.xml Mon Jun 18 19:54:03 2012
@@ -62,5 +62,9 @@
 dependency org=org.apache.thrift name=libfb303 
rev=${libfb303.version} transitive=false /
 
 dependency org=asm name=asm rev=${asm.version}/
+
+!-- Test Dependencies --
+dependency org=junit name=junit rev=${junit.version} 
conf=test-default /
+
   /dependencies
 /ivy-module

Modified: hive/trunk/pdk/ivy.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/pdk/ivy.xml?rev=1351467r1=1351466r2

svn commit: r1351174 - in /hive/trunk: metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

2012-06-17 Thread cws
Author: cws
Date: Sun Jun 17 22:22:13 2012
New Revision: 1351174

URL: http://svn.apache.org/viewvc?rev=1351174view=rev
Log:
HIVE-3134 Drop table/index/database can result in orphaned locations
(Kevin Wilfong via Carl Steinbach)

Summary:
Today when a managed table has a partition with a location which is not a 
subdirectory of the table's location, when the table is dropped the partition's 
data is not deleted from HDFS, resulting in an orphaned directory (the data 
exists but nothing points to it).

The same applies to dropping a database with cascade and a table has a location 
outside the database.

I think it is safe to assume managed tables/partitions own the directories they 
point to, so we should clean these up.

Test Plan: Added test cases to verify that the directories which would be 
orphaned are cleaned up by drop table/index/database.

Reviewers: JIRA, njain, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D3633

Added:

hive/trunk/ql/src/test/queries/clientpositive/drop_database_removes_partition_dirs.q

hive/trunk/ql/src/test/queries/clientpositive/drop_index_removes_partition_dirs.q

hive/trunk/ql/src/test/queries/clientpositive/drop_table_removes_partition_dirs.q

hive/trunk/ql/src/test/results/clientpositive/drop_database_removes_partition_dirs.q.out

hive/trunk/ql/src/test/results/clientpositive/drop_index_removes_partition_dirs.q.out

hive/trunk/ql/src/test/results/clientpositive/drop_table_removes_partition_dirs.q.out
Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1351174r1=1351173r2=1351174view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Sun Jun 17 22:22:13 2012
@@ -606,6 +606,8 @@ public class HiveMetaStore extends Thrif
 IOException {
   boolean success = false;
   Database db = null;
+  ListPath tablePaths = new ArrayListPath();
+  ListPath partitionPaths = new ArrayListPath();
   try {
 ms.openTransaction();
 db = ms.getDatabase(name);
@@ -624,6 +626,60 @@ public class HiveMetaStore extends Thrif
   path +  is not writable by  +
   hiveConf.getUser());
 }
+
+Path databasePath = wh.getDnsPath(wh.getDatabasePath(db));
+
+// first drop tables
+int tableBatchSize = HiveConf.getIntVar(hiveConf,
+ConfVars.METASTORE_BATCH_RETRIEVE_MAX);
+
+int startIndex = 0;
+int endIndex = -1;
+// retrieve the tables from the metastore in batches to alleviate 
memory constraints
+while(endIndex  allTables.size() - 1) {
+  startIndex = endIndex + 1;
+  endIndex = endIndex + tableBatchSize;
+  if (endIndex = allTables.size()) {
+endIndex = allTables.size() - 1;
+  }
+
+  ListTable tables = null;
+  try {
+tables = ms.getTableObjectsByName(name, 
allTables.subList(startIndex, endIndex));
+  } catch (UnknownDBException e) {
+throw new MetaException(e.getMessage());
+  }
+
+  if (tables != null  !tables.isEmpty()) {
+for (Table table : tables) {
+
+  // If the table is not external and it might not be in a 
subdirectory of the database
+  // add it's locations to the list of paths to delete
+  Path tablePath = null;
+  if (table.getSd().getLocation() != null  !isExternal(table)) {
+tablePath = wh.getDnsPath(new 
Path(table.getSd().getLocation()));
+if (!wh.isWritable(tablePath.getParent())) {
+  throw new MetaException(Database metadata not deleted since 
table:  +
+  table.getTableName() +  has a parent location  +  
tablePath.getParent() +
+   which is not writable by  + hiveConf.getUser());
+}
+
+if (!isSubdirectory(databasePath, tablePath)) {
+  tablePaths.add(tablePath);
+}
+  }
+
+  // For each partition in each table, drop the partitions and get 
a list of
+  // partitions' locations which might need to be deleted
+  partitionPaths = dropPartitionsAndGetLocations(ms, name, 
table.getTableName(),
+  tablePath, table.getPartitionKeys(), deleteData  
!isExternal(table));
+
+  // Drop the table but not its data
+  drop_table(name

svn commit: r1348995 - in /hive/trunk: ./ eclipse-templates/

2012-06-11 Thread cws
Author: cws
Date: Mon Jun 11 20:32:40 2012
New Revision: 1348995

URL: http://svn.apache.org/viewvc?rev=1348995view=rev
Log:
HIVE-3013 [jira] TestCliDriver cannot be debugged with eclipse since 
hadoop_home is set wrongly

Summary:
HIVE-3013

fix typo

cp Fix

cp

Test Plan: EMPTY

Reviewers: JIRA, njain, kevinwilfong

Differential Revision: https://reviews.facebook.net/D3555

Added:
hive/trunk/eclipse-templates/HiveServer.launchtemplate
  - copied, changed from r1348993, 
hive/trunk/eclipse-templates/HiveCLI.launchtemplate
Modified:
hive/trunk/build.properties
hive/trunk/build.xml
hive/trunk/eclipse-templates/.classpath
hive/trunk/eclipse-templates/HiveCLI.launchtemplate
hive/trunk/eclipse-templates/TestCliDriver.launchtemplate
hive/trunk/eclipse-templates/TestEmbeddedHiveMetaStore.launchtemplate
hive/trunk/eclipse-templates/TestHBaseCliDriver.launchtemplate
hive/trunk/eclipse-templates/TestHive.launchtemplate
hive/trunk/eclipse-templates/TestHiveMetaStoreChecker.launchtemplate
hive/trunk/eclipse-templates/TestJdbc.launchtemplate
hive/trunk/eclipse-templates/TestMTQueries.launchtemplate
hive/trunk/eclipse-templates/TestRemoteHiveMetaStore.launchtemplate
hive/trunk/eclipse-templates/TestTruncate.launchtemplate

Modified: hive/trunk/build.properties
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1348995r1=1348994r2=1348995view=diff
==
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Mon Jun 11 20:32:40 2012
@@ -98,3 +98,9 @@ mvn.license.dir=${build.dir.hive}/maven/
 #
 datanucleus.repo=http://www.datanucleus.org/downloads/maven2
 
+#
+# Eclipse Properties
+#
+
+# JVM arguments for Eclipse launch configurations
+eclipse.launch.jvm.args=-Xms256m -Xmx1024m

Modified: hive/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1348995r1=1348994r2=1348995view=diff
==
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Mon Jun 11 20:32:40 2012
@@ -577,6 +577,14 @@
   matches string=${hadoop.version.ant-internal} pattern=^0\.20\..* /
 /condition
 
+
+!-- Construct the classpath needed for testutils/hadoop launch script --
+path id=hive.hadoop.test.classpath
+  fileset dir=${hive.root}/build/ivy/lib/hadoop0.${hadoop.mr.rev}.shim 
includes=*.jar /
+  fileset dir=${hive.root}/build/ivy/lib/default includes=*.jar 
excludes=hive*.jar /
+/path
+property name=hive.hadoop.test.classpath 
refid=hive.hadoop.test.classpath/
+
 pathconvert property=eclipse.project
   path path=${basedir}/
   regexpmapper from=^.*/([^/]+)$$ to=\1 handledirsep=yes/
@@ -595,6 +603,9 @@
 filter token=JETTYUTILJAR value=${jetty.util.jar}/
 filter token=SERVLETAPIJAR value=${servlet-api.jar}/
 filter token=HIVE_VERSION value=${version}/
+filter token=HIVE_HADOOP_TEST_CLASSPATH 
value=${hive.hadoop.test.classpath}/
+filter token=HADOOP_BIN_PATH value=${hive.root}/testutils/hadoop/
+filter token=JVM_ARGS value=${eclipse.launch.jvm.args}/
   /filterset
 /copy
 move todir=. includeemptydirs=false

Modified: hive/trunk/eclipse-templates/.classpath
URL: 
http://svn.apache.org/viewvc/hive/trunk/eclipse-templates/.classpath?rev=1348995r1=1348994r2=1348995view=diff
==
--- hive/trunk/eclipse-templates/.classpath (original)
+++ hive/trunk/eclipse-templates/.classpath Mon Jun 11 20:32:40 2012
@@ -46,6 +46,8 @@
   classpathentry kind=lib 
path=build/ivy/lib/hadoop0.20.shim/oro-2.0.8.jar/
   classpathentry kind=lib 
path=build/ivy/lib/hadoop0.20.shim/servlet-api-2.5-6.1.14.jar/
   classpathentry kind=lib 
path=build/ivy/lib/hadoop0.20.shim/xmlenc-0.52.jar/
+  classpathentry kind=lib 
path=build/ivy/lib/default/avro-@avro.version@.jar/
+  classpathentry kind=lib 
path=build/ivy/lib/default/avro-mapred-@avro.version@.jar/
   classpathentry kind=lib 
path=build/ivy/lib/default/jline-@jline.version@.jar/
   classpathentry kind=lib 
path=build/ivy/lib/default/json-@json.version@.jar/
   classpathentry kind=lib 
path=build/ivy/lib/default/asm-@asm.version@.jar/

Modified: hive/trunk/eclipse-templates/HiveCLI.launchtemplate
URL: 
http://svn.apache.org/viewvc/hive/trunk/eclipse-templates/HiveCLI.launchtemplate?rev=1348995r1=1348994r2=1348995view=diff
==
--- hive/trunk/eclipse-templates/HiveCLI.launchtemplate (original)
+++ hive/trunk/eclipse-templates/HiveCLI.launchtemplate Mon Jun 11 20:32:40 2012
@@ -20,7 +20,7 @@
   booleanAttribute key=org.eclipse.debug.core.appendEnvironmentVariables 
value=false/
   mapAttribute key=org.eclipse.debug.core.environmentVariables
 mapEntry key=JAVA_HOME value=${system_property:java.home

svn commit: r1347396 - /hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql

2012-06-06 Thread cws
Author: cws
Date: Thu Jun  7 05:27:36 2012
New Revision: 1347396

URL: http://svn.apache.org/viewvc?rev=1347396view=rev
Log:
Oracle Metastore schema script doesn't include DDL for DN internal tables (Carl 
Steinbach via cws)

Modified:
hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql

Modified: 
hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql?rev=1347396r1=1347395r2=1347396view=diff
==
--- hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql 
(original)
+++ hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql 
Thu Jun  7 05:27:36 2012
@@ -1,25 +1,32 @@
---
--- DataNucleus SchemaTool (version 2.0.3) ran at 10/08/2011 21:27:47
---
--- org.apache.hadoop.hive.metastore.model.MDatabase
--- org.apache.hadoop.hive.metastore.model.MFieldSchema
--- org.apache.hadoop.hive.metastore.model.MType
--- org.apache.hadoop.hive.metastore.model.MTable
--- org.apache.hadoop.hive.metastore.model.MSerDeInfo
--- org.apache.hadoop.hive.metastore.model.MOrder
--- org.apache.hadoop.hive.metastore.model.MStorageDescriptor
--- org.apache.hadoop.hive.metastore.model.MPartition
--- org.apache.hadoop.hive.metastore.model.MIndex
--- org.apache.hadoop.hive.metastore.model.MRole
--- org.apache.hadoop.hive.metastore.model.MRoleMap
--- org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
--- org.apache.hadoop.hive.metastore.model.MDBPrivilege
--- org.apache.hadoop.hive.metastore.model.MTablePrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionPrivilege
--- org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionEvent
---
+
+
+-- Table SEQUENCE_TABLE is an internal table required by DataNucleus.
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE SEQUENCE_TABLE
+(
+   SEQUENCE_NAME VARCHAR2(255) NOT NULL,
+   NEXT_VAL NUMBER NOT NULL
+);
+
+ALTER TABLE SEQUENCE_TABLE ADD CONSTRAINT PART_TABLE_PK PRIMARY KEY 
(SEQUENCE_NAME);
+
+-- Table NUCLEUS_TABLES is an internal table required by DataNucleus.
+-- This table is required if datanucleus.autoStartMechanism=SchemaTable
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE NUCLEUS_TABLES
+(
+   CLASS_NAME VARCHAR2(128) NOT NULL,
+   TABLE_NAME VARCHAR2(128) NOT NULL,
+   TYPE VARCHAR2(4) NOT NULL,
+   OWNER VARCHAR2(2) NOT NULL,
+   VERSION VARCHAR2(20) NOT NULL,
+   INTERFACE_NAME VARCHAR2(255) NULL
+);
+
+ALTER TABLE NUCLEUS_TABLES ADD CONSTRAINT NUCLEUS_TABLES_PK PRIMARY KEY 
(CLASS_NAME);
+
 -- Table PART_COL_PRIVS for classes 
[org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
 CREATE TABLE PART_COL_PRIVS
 (




svn commit: r1347397 - in /hive/branches/branch-0.9/metastore/scripts/upgrade/oracle: ./ hive-schema-0.9.0.oracle.sql

2012-06-06 Thread cws
Author: cws
Date: Thu Jun  7 05:36:15 2012
New Revision: 1347397

URL: http://svn.apache.org/viewvc?rev=1347397view=rev
Log:
HIVE-2928. Support for Oracle-backed Hive-Metastore (longvarchar to clob in 
package.jdo)

Added:
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/

hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
   (with props)

Added: 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql?rev=1347397view=auto
==
--- 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
 (added)
+++ 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
 Thu Jun  7 05:36:15 2012
@@ -0,0 +1,577 @@
+--
+-- DataNucleus SchemaTool (version 2.0.3) ran at 10/08/2011 21:27:47
+--
+-- org.apache.hadoop.hive.metastore.model.MDatabase
+-- org.apache.hadoop.hive.metastore.model.MFieldSchema
+-- org.apache.hadoop.hive.metastore.model.MType
+-- org.apache.hadoop.hive.metastore.model.MTable
+-- org.apache.hadoop.hive.metastore.model.MSerDeInfo
+-- org.apache.hadoop.hive.metastore.model.MOrder
+-- org.apache.hadoop.hive.metastore.model.MStorageDescriptor
+-- org.apache.hadoop.hive.metastore.model.MPartition
+-- org.apache.hadoop.hive.metastore.model.MIndex
+-- org.apache.hadoop.hive.metastore.model.MRole
+-- org.apache.hadoop.hive.metastore.model.MRoleMap
+-- org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
+-- org.apache.hadoop.hive.metastore.model.MDBPrivilege
+-- org.apache.hadoop.hive.metastore.model.MTablePrivilege
+-- org.apache.hadoop.hive.metastore.model.MPartitionPrivilege
+-- org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege
+-- org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege
+-- org.apache.hadoop.hive.metastore.model.MPartitionEvent
+--
+-- Table PART_COL_PRIVS for classes 
[org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
+CREATE TABLE PART_COL_PRIVS
+(
+PART_COLUMN_GRANT_ID NUMBER NOT NULL,
+COLUMN_NAME VARCHAR2(128) NULL,
+CREATE_TIME NUMBER (10) NOT NULL,
+GRANT_OPTION NUMBER (5) NOT NULL,
+GRANTOR VARCHAR2(128) NULL,
+GRANTOR_TYPE VARCHAR2(128) NULL,
+PART_ID NUMBER NULL,
+PRINCIPAL_NAME VARCHAR2(128) NULL,
+PRINCIPAL_TYPE VARCHAR2(128) NULL,
+PART_COL_PRIV VARCHAR2(128) NULL
+);
+
+ALTER TABLE PART_COL_PRIVS ADD CONSTRAINT PART_COL_PRIVS_PK PRIMARY KEY 
(PART_COLUMN_GRANT_ID);
+
+-- Table CDS.
+CREATE TABLE CDS
+(
+CD_ID NUMBER NOT NULL
+);
+
+ALTER TABLE CDS ADD CONSTRAINT CDS_PK PRIMARY KEY (CD_ID);
+
+-- Table COLUMNS_V2 for join relationship
+CREATE TABLE COLUMNS_V2
+(
+CD_ID NUMBER NOT NULL,
+COMMENT VARCHAR2(256) NULL,
+COLUMN_NAME VARCHAR2(128) NOT NULL,
+TYPE_NAME VARCHAR2(4000) NOT NULL,
+INTEGER_IDX NUMBER(10) NOT NULL
+);
+
+ALTER TABLE COLUMNS_V2 ADD CONSTRAINT COLUMNS_V2_PK PRIMARY KEY 
(CD_ID,COLUMN_NAME);
+
+-- Table PARTITION_KEY_VALS for join relationship
+CREATE TABLE PARTITION_KEY_VALS
+(
+PART_ID NUMBER NOT NULL,
+PART_KEY_VAL VARCHAR2(256) NULL,
+INTEGER_IDX NUMBER(10) NOT NULL
+);
+
+ALTER TABLE PARTITION_KEY_VALS ADD CONSTRAINT PARTITION_KEY_VALS_PK PRIMARY 
KEY (PART_ID,INTEGER_IDX);
+
+-- Table DBS for classes [org.apache.hadoop.hive.metastore.model.MDatabase]
+CREATE TABLE DBS
+(
+DB_ID NUMBER NOT NULL,
+DESC VARCHAR2(4000) NULL,
+DB_LOCATION_URI VARCHAR2(4000) NOT NULL,
+NAME VARCHAR2(128) NULL
+);
+
+ALTER TABLE DBS ADD CONSTRAINT DBS_PK PRIMARY KEY (DB_ID);
+
+-- Table PARTITION_PARAMS for join relationship
+CREATE TABLE PARTITION_PARAMS
+(
+PART_ID NUMBER NOT NULL,
+PARAM_KEY VARCHAR2(256) NOT NULL,
+PARAM_VALUE VARCHAR2(4000) NULL
+);
+
+ALTER TABLE PARTITION_PARAMS ADD CONSTRAINT PARTITION_PARAMS_PK PRIMARY KEY 
(PART_ID,PARAM_KEY);
+
+-- Table SERDES for classes [org.apache.hadoop.hive.metastore.model.MSerDeInfo]
+CREATE TABLE SERDES
+(
+SERDE_ID NUMBER NOT NULL,
+NAME VARCHAR2(128) NULL,
+SLIB VARCHAR2(4000) NULL
+);
+
+ALTER TABLE SERDES ADD CONSTRAINT SERDES_PK PRIMARY KEY (SERDE_ID);
+
+-- Table TYPES for classes [org.apache.hadoop.hive.metastore.model.MType]
+CREATE TABLE TYPES
+(
+TYPES_ID NUMBER NOT NULL,
+TYPE_NAME VARCHAR2(128) NULL,
+TYPE1 VARCHAR2(767) NULL,
+TYPE2 VARCHAR2(767) NULL
+);
+
+ALTER TABLE TYPES ADD CONSTRAINT TYPES_PK PRIMARY KEY (TYPES_ID);
+
+-- Table PARTITION_KEYS for join relationship
+CREATE TABLE PARTITION_KEYS
+(
+TBL_ID NUMBER NOT NULL,
+PKEY_COMMENT VARCHAR2(4000) NULL,
+PKEY_NAME VARCHAR2(128) NOT NULL

svn commit: r1347399 - /hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql

2012-06-06 Thread cws
Author: cws
Date: Thu Jun  7 05:37:38 2012
New Revision: 1347399

URL: http://svn.apache.org/viewvc?rev=1347399view=rev
Log:
HIVE-3082. Oracle Metastore schema script doesn't include DDL for DN internal 
tables

Modified:

hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql

Modified: 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql?rev=1347399r1=1347398r2=1347399view=diff
==
--- 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
 (original)
+++ 
hive/branches/branch-0.9/metastore/scripts/upgrade/oracle/hive-schema-0.9.0.oracle.sql
 Thu Jun  7 05:37:38 2012
@@ -1,25 +1,32 @@
---
--- DataNucleus SchemaTool (version 2.0.3) ran at 10/08/2011 21:27:47
---
--- org.apache.hadoop.hive.metastore.model.MDatabase
--- org.apache.hadoop.hive.metastore.model.MFieldSchema
--- org.apache.hadoop.hive.metastore.model.MType
--- org.apache.hadoop.hive.metastore.model.MTable
--- org.apache.hadoop.hive.metastore.model.MSerDeInfo
--- org.apache.hadoop.hive.metastore.model.MOrder
--- org.apache.hadoop.hive.metastore.model.MStorageDescriptor
--- org.apache.hadoop.hive.metastore.model.MPartition
--- org.apache.hadoop.hive.metastore.model.MIndex
--- org.apache.hadoop.hive.metastore.model.MRole
--- org.apache.hadoop.hive.metastore.model.MRoleMap
--- org.apache.hadoop.hive.metastore.model.MGlobalPrivilege
--- org.apache.hadoop.hive.metastore.model.MDBPrivilege
--- org.apache.hadoop.hive.metastore.model.MTablePrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionPrivilege
--- org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege
--- org.apache.hadoop.hive.metastore.model.MPartitionEvent
---
+
+
+-- Table SEQUENCE_TABLE is an internal table required by DataNucleus.
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE SEQUENCE_TABLE
+(
+   SEQUENCE_NAME VARCHAR2(255) NOT NULL,
+   NEXT_VAL NUMBER NOT NULL
+);
+
+ALTER TABLE SEQUENCE_TABLE ADD CONSTRAINT PART_TABLE_PK PRIMARY KEY 
(SEQUENCE_NAME);
+
+-- Table NUCLEUS_TABLES is an internal table required by DataNucleus.
+-- This table is required if datanucleus.autoStartMechanism=SchemaTable
+-- NOTE: Some versions of SchemaTool do not automatically generate this table.
+-- See http://www.datanucleus.org/servlet/jira/browse/NUCRDBMS-416
+CREATE TABLE NUCLEUS_TABLES
+(
+   CLASS_NAME VARCHAR2(128) NOT NULL,
+   TABLE_NAME VARCHAR2(128) NOT NULL,
+   TYPE VARCHAR2(4) NOT NULL,
+   OWNER VARCHAR2(2) NOT NULL,
+   VERSION VARCHAR2(20) NOT NULL,
+   INTERFACE_NAME VARCHAR2(255) NULL
+);
+
+ALTER TABLE NUCLEUS_TABLES ADD CONSTRAINT NUCLEUS_TABLES_PK PRIMARY KEY 
(CLASS_NAME);
+
 -- Table PART_COL_PRIVS for classes 
[org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege]
 CREATE TABLE PART_COL_PRIVS
 (




svn commit: r1345806 [1/7] - in /hive/trunk: metastore/if/ metastore/scripts/upgrade/mysql/ metastore/scripts/upgrade/oracle/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/or

2012-06-03 Thread cws
Author: cws
Date: Sun Jun  3 23:44:13 2012
New Revision: 1345806

URL: http://svn.apache.org/viewvc?rev=1345806view=rev
Log:
HIVE-3079. Revert HIVE-2989 (Carl Steinbach via cws)

Removed:
hive/trunk/metastore/scripts/upgrade/mysql/010-HIVE-2989.mysql.sql
hive/trunk/metastore/scripts/upgrade/mysql/hive-schema-0.10.0.mysql.sql
hive/trunk/metastore/scripts/upgrade/oracle/hive-schema-0.10.0.oracle.sql

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/TableIdentifier.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLinkDesc.java
hive/trunk/ql/src/test/queries/clientnegative/create_table_failure5.q
hive/trunk/ql/src/test/queries/clientnegative/create_tablelink_failure1.q
hive/trunk/ql/src/test/queries/clientnegative/create_tablelink_failure2.q
hive/trunk/ql/src/test/queries/clientpositive/create_tablelink.q
hive/trunk/ql/src/test/results/clientnegative/create_table_failure5.q.out

hive/trunk/ql/src/test/results/clientnegative/create_tablelink_failure1.q.out

hive/trunk/ql/src/test/results/clientnegative/create_tablelink_failure2.q.out
hive/trunk/ql/src/test/results/clientpositive/create_tablelink.q.out
Modified:
hive/trunk/metastore/if/hive_metastore.thrift
hive/trunk/metastore/src/gen/thrift/gen-cpp/ThriftHiveMetastore.cpp
hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.h

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/EnvironmentContext.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Index.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Partition.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Schema.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java

hive/trunk/metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java

hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/ThriftHiveMetastore.php

hive/trunk/metastore/src/gen/thrift/gen-php/hive_metastore/hive_metastore_types.php

hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ThriftHiveMetastore.py
hive/trunk/metastore/src/gen/thrift/gen-py/hive_metastore/ttypes.py
hive/trunk/metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/TableType.java

hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java
hive/trunk/metastore/src/model/package.jdo
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
hive/trunk/ql/src/test/results/clientnegative/drop_table_failure2.q.out
hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out
hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out

hive/trunk/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out

Modified: hive/trunk/metastore/if/hive_metastore.thrift
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/if/hive_metastore.thrift?rev=1345806r1=1345805r2=1345806view=diff
==
--- hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hive/trunk/metastore/if/hive_metastore.thrift Sun Jun  3 23:44:13 2012
@@ -145,11 +145,6 @@ struct StorageDescriptor {
   10: mapstring, string parameters // any user supplied key value hash
 }
 
-struct TableIdentifier {
-  1: string dbName,
-  2: string tableName  
-}
-
 // table information
 struct Table {
   1: string tableName,// name of the table
@@ -163,10 +158,8 @@ struct Table {
   9: mapstring, string parameters,   // to store comments or any other user 
level parameters
   10: string viewOriginalText, // original view text, null for non-view

svn commit: r1345806 [3/7] - in /hive/trunk: metastore/if/ metastore/scripts/upgrade/mysql/ metastore/scripts/upgrade/oracle/ metastore/src/gen/thrift/gen-cpp/ metastore/src/gen/thrift/gen-javabean/or

2012-06-03 Thread cws
Modified: hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp?rev=1345806r1=1345805r2=1345806view=diff
==
--- hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp 
(original)
+++ hive/trunk/metastore/src/gen/thrift/gen-cpp/hive_metastore_types.cpp Sun 
Jun  3 23:44:13 2012
@@ -1490,73 +1490,8 @@ uint32_t StorageDescriptor::write(::apac
   return xfer;
 }
 
-const char* TableIdentifier::ascii_fingerprint = 
07A9615F837F7D0A952B595DD3020972;
-const uint8_t TableIdentifier::binary_fingerprint[16] = 
{0x07,0xA9,0x61,0x5F,0x83,0x7F,0x7D,0x0A,0x95,0x2B,0x59,0x5D,0xD3,0x02,0x09,0x72};
-
-uint32_t TableIdentifier::read(::apache::thrift::protocol::TProtocol* iprot) {
-
-  uint32_t xfer = 0;
-  std::string fname;
-  ::apache::thrift::protocol::TType ftype;
-  int16_t fid;
-
-  xfer += iprot-readStructBegin(fname);
-
-  using ::apache::thrift::protocol::TProtocolException;
-
-
-  while (true)
-  {
-xfer += iprot-readFieldBegin(fname, ftype, fid);
-if (ftype == ::apache::thrift::protocol::T_STOP) {
-  break;
-}
-switch (fid)
-{
-  case 1:
-if (ftype == ::apache::thrift::protocol::T_STRING) {
-  xfer += iprot-readString(this-dbName);
-  this-__isset.dbName = true;
-} else {
-  xfer += iprot-skip(ftype);
-}
-break;
-  case 2:
-if (ftype == ::apache::thrift::protocol::T_STRING) {
-  xfer += iprot-readString(this-tableName);
-  this-__isset.tableName = true;
-} else {
-  xfer += iprot-skip(ftype);
-}
-break;
-  default:
-xfer += iprot-skip(ftype);
-break;
-}
-xfer += iprot-readFieldEnd();
-  }
-
-  xfer += iprot-readStructEnd();
-
-  return xfer;
-}
-
-uint32_t TableIdentifier::write(::apache::thrift::protocol::TProtocol* oprot) 
const {
-  uint32_t xfer = 0;
-  xfer += oprot-writeStructBegin(TableIdentifier);
-  xfer += oprot-writeFieldBegin(dbName, 
::apache::thrift::protocol::T_STRING, 1);
-  xfer += oprot-writeString(this-dbName);
-  xfer += oprot-writeFieldEnd();
-  xfer += oprot-writeFieldBegin(tableName, 
::apache::thrift::protocol::T_STRING, 2);
-  xfer += oprot-writeString(this-tableName);
-  xfer += oprot-writeFieldEnd();
-  xfer += oprot-writeFieldStop();
-  xfer += oprot-writeStructEnd();
-  return xfer;
-}
-
-const char* Table::ascii_fingerprint = 06C17A4A74B309FF2357999371777D0D;
-const uint8_t Table::binary_fingerprint[16] = 
{0x06,0xC1,0x7A,0x4A,0x74,0xB3,0x09,0xFF,0x23,0x57,0x99,0x93,0x71,0x77,0x7D,0x0D};
+const char* Table::ascii_fingerprint = 26BE788C09746068A2616712C9262900;
+const uint8_t Table::binary_fingerprint[16] = 
{0x26,0xBE,0x78,0x8C,0x09,0x74,0x60,0x68,0xA2,0x61,0x67,0x12,0xC9,0x26,0x29,0x00};
 
 uint32_t Table::read(::apache::thrift::protocol::TProtocol* iprot) {
 
@@ -1709,34 +1644,6 @@ uint32_t Table::read(::apache::thrift::p
   xfer += iprot-skip(ftype);
 }
 break;
-  case 14:
-if (ftype == ::apache::thrift::protocol::T_STRUCT) {
-  xfer += this-linkTarget.read(iprot);
-  this-__isset.linkTarget = true;
-} else {
-  xfer += iprot-skip(ftype);
-}
-break;
-  case 15:
-if (ftype == ::apache::thrift::protocol::T_LIST) {
-  {
-this-linkTables.clear();
-uint32_t _size117;
-::apache::thrift::protocol::TType _etype120;
-iprot-readListBegin(_etype120, _size117);
-this-linkTables.resize(_size117);
-uint32_t _i121;
-for (_i121 = 0; _i121  _size117; ++_i121)
-{
-  xfer += this-linkTables[_i121].read(iprot);
-}
-iprot-readListEnd();
-  }
-  this-__isset.linkTables = true;
-} else {
-  xfer += iprot-skip(ftype);
-}
-break;
   default:
 xfer += iprot-skip(ftype);
 break;
@@ -1776,10 +1683,10 @@ uint32_t Table::write(::apache::thrift::
   xfer += oprot-writeFieldBegin(partitionKeys, 
::apache::thrift::protocol::T_LIST, 8);
   {
 xfer += oprot-writeListBegin(::apache::thrift::protocol::T_STRUCT, 
static_castuint32_t(this-partitionKeys.size()));
-std::vectorFieldSchema ::const_iterator _iter122;
-for (_iter122 = this-partitionKeys.begin(); _iter122 != 
this-partitionKeys.end(); ++_iter122)
+std::vectorFieldSchema ::const_iterator _iter117;
+for (_iter117 = this-partitionKeys.begin(); _iter117 != 
this-partitionKeys.end(); ++_iter117)
 {
-  xfer += (*_iter122).write(oprot);
+  xfer += (*_iter117).write(oprot);
 }
 xfer += oprot-writeListEnd();
   }
@@ -1787,11 +1694,11 @@ uint32_t Table::write(::apache::thrift::
   xfer += oprot-writeFieldBegin(parameters, 
::apache::thrift::protocol::T_MAP, 

svn commit: r1340256 - in /hive/trunk: contrib/src/test/queries/clientnegative/ contrib/src/test/queries/clientpositive/ contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositiv

2012-05-18 Thread cws
Author: cws
Date: Fri May 18 21:19:50 2012
New Revision: 1340256

URL: http://svn.apache.org/viewvc?rev=1340256view=rev
Log:
HIVE-1719 [jira] Move RegexSerDe out of hive-contrib and over to hive-serde
(Shreepadma Venugopalan via Carl Steinbach)

Summary:
Regex Serde Changes

RegexSerDe is as much a part of the standard Hive distribution as the other 
SerDes
currently in hive-serde. I think we should move it over to the hive-serde 
module so that
users don't have to go to the added effort of manually registering the contrib 
jar before
using it.

Test Plan: EMPTY

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D3249

Added:
hive/trunk/ql/src/test/queries/clientnegative/serde_regex.q
hive/trunk/ql/src/test/queries/clientnegative/serde_regex2.q
hive/trunk/ql/src/test/queries/clientnegative/serde_regex3.q
hive/trunk/ql/src/test/queries/clientpositive/serde_regex.q
  - copied, changed from r1340252, 
hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q
hive/trunk/ql/src/test/results/clientnegative/serde_regex.q.out
hive/trunk/ql/src/test/results/clientnegative/serde_regex2.q.out
hive/trunk/ql/src/test/results/clientnegative/serde_regex3.q.out
hive/trunk/ql/src/test/results/clientpositive/serde_regex.q.out
  - copied, changed from r1340252, 
hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java   
(with props)
Modified:
hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q
hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q
hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out
hive/trunk/contrib/src/test/results/clientpositive/serde_regex.q.out

Modified: hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q?rev=1340256r1=1340255r2=1340256view=diff
==
--- hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q (original)
+++ hive/trunk/contrib/src/test/queries/clientnegative/serde_regex.q Fri May 18 
21:19:50 2012
@@ -1,7 +1,8 @@
 add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
 
-DROP TABLE serde_regex;
+USE default;
 
+--  This should fail because Regex SerDe supports only columns of type string
 EXPLAIN
 CREATE TABLE serde_regex(
   host STRING,
@@ -35,11 +36,4 @@ WITH SERDEPROPERTIES (
   input.regex = ([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ 
\]*|\[^\]*\) (-|[0-9]*) (-|[0-9]*)(?: ([^ \]*|\[^\]*\) ([^ 
\]*|\[^\]*\))?,
   output.format.string = %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
 )
-STORED AS TEXTFILE;
-
-LOAD DATA LOCAL INPATH ../data/files/apache.access.log INTO TABLE 
serde_regex;
-LOAD DATA LOCAL INPATH ../data/files/apache.access.2.log INTO TABLE 
serde_regex;
-
-SELECT * FROM serde_regex ORDER BY time;
-
-DROP TABLE serde_regex;
+STORED AS TEXTFILE;
\ No newline at end of file

Modified: hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q?rev=1340256r1=1340255r2=1340256view=diff
==
--- hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q (original)
+++ hive/trunk/contrib/src/test/queries/clientpositive/serde_regex.q Fri May 18 
21:19:50 2012
@@ -1,7 +1,5 @@
 add jar ${system:build.dir}/hive-contrib-${system:hive.version}.jar;
 
-DROP TABLE serde_regex;
-
 EXPLAIN
 CREATE TABLE serde_regex(
   host STRING,
@@ -15,7 +13,7 @@ CREATE TABLE serde_regex(
   agent STRING)
 ROW FORMAT SERDE 'org.apache.hadoop.hive.contrib.serde2.RegexSerDe'
 WITH SERDEPROPERTIES (
-  input.regex = ([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ 
\]*|\[^\]*\) (-|[0-9]*) (-|[0-9]*)(?: ([^ \]*|\[^\]*\) ([^ 
\]*|\[^\]*\))?,
+  input.regex = ([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ 
\]*|\[^\]*\) (-|[0-9]*) (-|[0-9]*)(?: ([^ \]*|\[^\]*\) ([^ 
\]*|\[^\]*\))?, 
   output.format.string = %1$s %2$s %3$s %4$s %5$s %6$s %7$s %8$s %9$s
 )
 STORED AS TEXTFILE;
@@ -40,6 +38,4 @@ STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH ../data/files/apache.access.log INTO TABLE 
serde_regex;
 LOAD DATA LOCAL INPATH ../data/files/apache.access.2.log INTO TABLE 
serde_regex;
 
-SELECT * FROM serde_regex ORDER BY time;
-
-DROP TABLE serde_regex;
+SELECT * FROM serde_regex ORDER BY time;
\ No newline at end of file

Modified: hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/serde_regex.q.out?rev=1340256r1=1340255r2=1340256view=diff
==
--- hive/trunk/contrib/src/test/results/clientnegative

svn commit: r1339004 - /hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

2012-05-15 Thread cws
Author: cws
Date: Wed May 16 04:29:14 2012
New Revision: 1339004

URL: http://svn.apache.org/viewvc?rev=1339004view=rev
Log:
HIVE-3014 [jira] Fix metastore test failures caused by HIVE-2757
(Zhenxiao Luo via Carl Steinbach)

Summary: HIVE-3014: Fix metastore test failures caused by HIVE-2757

Test Plan: EMPTY

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

Differential Revision: https://reviews.facebook.net/D3213

Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1339004r1=1339003r2=1339004view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed 
May 16 04:29:14 2012
@@ -648,6 +648,17 @@ public class HiveConf extends Configurat
 public String toString() {
   return varname;
 }
+
+private static String findHadoopBinary() {
+  String val = System.getenv(HADOOP_HOME);
+  // In Hadoop 1.X and Hadoop 2.X HADOOP_HOME is gone and replaced with 
HADOOP_PREFIX
+  if (val == null) {
+val = System.getenv(HADOOP_PREFIX);
+  }
+  // and if all else fails we can at least try /usr/bin/hadoop
+  return (val == null ? File.separator + usr : val)
++ File.separator + bin + File.separator + hadoop;
+}
   }
 
   /**
@@ -907,16 +918,6 @@ public class HiveConf extends Configurat
 }
   }
 
-  private static String findHadoopBinary() {
-String val = System.getenv(HADOOP_HOME);
-// In Hadoop 1.X and Hadoop 2.X HADOOP_HOME is gone and replaced with 
HADOOP_PREFIX
-if (val == null) {
-  val = System.getenv(HADOOP_PREFIX);
-}
-// and if all else fails we can at least try /usr/bin/hadoop
-return (val == null ? File.separator + usr : val) + File.separator + 
bin + File.separator + hadoop;
-  }
-
   public Properties getChangedProperties() {
 Properties ret = new Properties();
 Properties newProp = getAllProperties();




svn commit: r1336906 - in /hive/trunk: bin/ext/help.sh bin/hive bin/init-hive-dfs.sh common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

2012-05-10 Thread cws
Author: cws
Date: Thu May 10 21:30:15 2012
New Revision: 1336906

URL: http://svn.apache.org/viewvc?rev=1336906view=rev
Log:
HIVE-2757. Hive can't find hadoop executor scripts without HADOOP_HOME set 
(Roman Shaposhnik via cws)

Modified:
hive/trunk/bin/ext/help.sh
hive/trunk/bin/hive
hive/trunk/bin/init-hive-dfs.sh
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

Modified: hive/trunk/bin/ext/help.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/bin/ext/help.sh?rev=1336906r1=1336905r2=1336906view=diff
==
--- hive/trunk/bin/ext/help.sh (original)
+++ hive/trunk/bin/ext/help.sh Thu May 10 21:30:15 2012
@@ -24,7 +24,7 @@ help() {
   echo   --config : Hive configuration directory
   echo   --service : Starts specific service/component. cli is default
   echo Parameters used:
-  echo   HADOOP_HOME : Hadoop install directory
+  echo   HADOOP_HOME or HADOOP_PREFIX : Hadoop install directory
   echo   HIVE_OPT : Hive options
   echo For help on a particular service:
   echo   ./hive --service serviceName --help

Modified: hive/trunk/bin/hive
URL: 
http://svn.apache.org/viewvc/hive/trunk/bin/hive?rev=1336906r1=1336905r2=1336906view=diff
==
--- hive/trunk/bin/hive (original)
+++ hive/trunk/bin/hive Thu May 10 21:30:15 2012
@@ -155,15 +155,15 @@ if [ -f ${HADOOP_IN_PATH} ]; then
   HADOOP_DIR=`dirname $HADOOP_IN_PATH`/..
 fi
 # HADOOP_HOME env variable overrides hadoop in the path
-HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+HADOOP_HOME=${HADOOP_HOME:-${HADOOP_PREFIX:-$HADOOP_DIR}}
 if [ $HADOOP_HOME ==  ]; then
-  echo Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop 
must be in the path;
+  echo Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must 
be set or hadoop must be in the path;
   exit 4;
 fi
 
 HADOOP=$HADOOP_HOME/bin/hadoop
 if [ ! -f ${HADOOP} ]; then
-  echo Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop 
must be in the path;
+  echo Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must 
be set or hadoop must be in the path;
   exit 4;
 fi
 

Modified: hive/trunk/bin/init-hive-dfs.sh
URL: 
http://svn.apache.org/viewvc/hive/trunk/bin/init-hive-dfs.sh?rev=1336906r1=1336905r2=1336906view=diff
==
--- hive/trunk/bin/init-hive-dfs.sh (original)
+++ hive/trunk/bin/init-hive-dfs.sh Thu May 10 21:30:15 2012
@@ -62,15 +62,15 @@ if [ -f ${HADOOP_IN_PATH} ]; then
   HADOOP_DIR=`dirname $HADOOP_IN_PATH`/..
 fi
 # HADOOP_HOME env variable overrides hadoop in the path
-HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
+HADOOP_HOME=${HADOOP_HOME:-${HADOOP_PREFIX:-$HADOOP_DIR}}
 if [ $HADOOP_HOME ==  ]; then
-  echo Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop 
must be in the path;
+  echo Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must 
be set or hadoop must be in the path;
   exit 4;
 fi
 
 HADOOP_EXEC=$HADOOP_HOME/bin/hadoop
 if [ ! -f ${HADOOP} ]; then
-  echo Cannot find hadoop installation: \$HADOOP_HOME must be set or hadoop 
must be in the path;
+  echo Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must 
be set or hadoop must be in the path;
   exit 4;
 fi
 

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1336906r1=1336905r2=1336906view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Thu 
May 10 21:30:15 2012
@@ -207,8 +207,7 @@ public class HiveConf extends Configurat
 // Properties with null values are ignored and exist only for the purpose 
of giving us
 // a symbolic name to reference in the Hive source code. Properties with 
non-null
 // values will override any values set in the underlying Hadoop 
configuration.
-HADOOPBIN(hadoop.bin.path, System.getenv(HADOOP_HOME) + /bin/hadoop),
-HADOOPCONF(hadoop.config.dir, System.getenv(HADOOP_HOME) + /conf),
+HADOOPBIN(hadoop.bin.path, findHadoopBinary()),
 HADOOPFS(fs.default.name, null),
 HIVE_FS_HAR_IMPL(fs.har.impl, 
org.apache.hadoop.hive.shims.HiveHarFileSystem),
 HADOOPMAPFILENAME(map.input.file, null),
@@ -845,14 +844,6 @@ public class HiveConf extends Configurat
   addResource(hiveSiteURL);
 }
 
-// if hadoop configuration files are already in our path - then define
-// the containing directory as the configuration directory
-URL hadoopconfurl = getClassLoader().getResource(core-site.xml);
-if (hadoopconfurl != null) {
-  String conffile = hadoopconfurl.getPath

svn commit: r1336913 - in /hive/trunk/ql/src/test: org/apache/hadoop/hive/ql/ queries/clientnegative/ queries/clientpositive/ results/clientnegative/ results/clientpositive/

2012-05-10 Thread cws
Author: cws
Date: Thu May 10 21:50:39 2012
New Revision: 1336913

URL: http://svn.apache.org/viewvc?rev=1336913view=rev
Log:
HIVE-2979. Implement INCLUDE_HADOOP_MAJOR_VERSION test macro (Zhenxiao Luo via 
cws)

Added:
hive/trunk/ql/src/test/queries/clientnegative/archive_corrupt.q
hive/trunk/ql/src/test/results/clientnegative/archive_corrupt.q.out
Modified:
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
hive/trunk/ql/src/test/queries/clientpositive/archive_corrupt.q
hive/trunk/ql/src/test/queries/clientpositive/combine2.q
hive/trunk/ql/src/test/queries/clientpositive/sample_islocalmode_hook.q
hive/trunk/ql/src/test/queries/clientpositive/split_sample.q
hive/trunk/ql/src/test/results/clientpositive/archive_corrupt.q.out
hive/trunk/ql/src/test/results/clientpositive/combine2.q.out
hive/trunk/ql/src/test/results/clientpositive/sample_islocalmode_hook.q.out
hive/trunk/ql/src/test/results/clientpositive/split_sample.q.out

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java?rev=1336913r1=1336912r2=1336913view=diff
==
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/QTestUtil.java Thu May 10 
21:50:39 2012
@@ -33,6 +33,7 @@ import java.io.InputStreamReader;
 import java.io.PrintStream;
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
+import java.lang.UnsupportedOperationException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Deque;
@@ -45,6 +46,7 @@ import java.util.TreeMap;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -276,50 +278,68 @@ public class QTestUtil {
   }
 
   public void addFile(File qf) throws Exception {
-
 FileInputStream fis = new FileInputStream(qf);
 BufferedInputStream bis = new BufferedInputStream(fis);
 BufferedReader br = new BufferedReader(new InputStreamReader(bis, UTF8));
 StringBuilder qsb = new StringBuilder();
 
 // Look for a hint to not run a test on some Hadoop versions
-Pattern pattern = Pattern.compile(-- EXCLUDE_HADOOP_MAJOR_VERSIONS(.*));
-
-
-// Read the entire query
+Pattern pattern = Pattern.compile(-- 
(EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS\\((.*)\\));
+
 boolean excludeQuery = false;
+boolean includeQuery = false;
+SetString versionSet = new HashSetString();
 String hadoopVer = ShimLoader.getMajorVersion();
 String line;
+
+// Read the entire query
 while ((line = br.readLine()) != null) {
 
-  // While we are reading the lines, detect whether this query wants to be
-  // excluded from running because the Hadoop version is incorrect
+  // Each qfile may include at most one INCLUDE or EXCLUDE directive.
+  //
+  // If a qfile contains an INCLUDE directive, and hadoopVer does
+  // not appear in the list of versions to include, then the qfile
+  // is skipped.
+  //
+  // If a qfile contains an EXCLUDE directive, and hadoopVer is
+  // listed in the list of versions to EXCLUDE, then the qfile is
+  // skipped.
+  //
+  // Otherwise, the qfile is included.
   Matcher matcher = pattern.matcher(line);
   if (matcher.find()) {
-String group = matcher.group();
-int start = group.indexOf('(');
-int end = group.indexOf(')');
-assert end  start;
-// versions might be something like '0.17, 0.19'
-String versions = group.substring(start+1, end);
+if (excludeQuery || includeQuery) {
+  String message = QTestUtil: qfile  + qf.getName()
++  contains more than one reference to 
(EX|IN)CLUDE_HADOOP_MAJOR_VERSIONS;
+  throw new UnsupportedOperationException(message);
+}
+
+String prefix = matcher.group(1);
+if (EX.equals(prefix)) {
+  excludeQuery = true;
+} else {
+  includeQuery = true;
+}
 
-SetString excludedVersionSet = new HashSetString();
+String versions = matcher.group(2);
 for (String s : versions.split(\\,)) {
   s = s.trim();
-  excludedVersionSet.add(s);
-}
-if (excludedVersionSet.contains(hadoopVer)) {
-  excludeQuery = true;
+  versionSet.add(s);
 }
   }
   qsb.append(line + \n);
 }
 qMap.put(qf.getName(), qsb.toString());
-if(excludeQuery) {
-  System.out.println(Due to the Hadoop Version (+ hadoopVer + ),  +
-  adding query  + qf.getName() +  to the set of tests to skip);
+
+if (excludeQuery

svn commit: r1301162 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/common/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientpositive/

2012-03-15 Thread cws
Author: cws
Date: Thu Mar 15 19:29:42 2012
New Revision: 1301162

URL: http://svn.apache.org/viewvc?rev=1301162view=rev
Log:
HIVE-2856. Fix TestCliDriver escape1.q failure on MR2 (Zhenxiao Luo via cws)

Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
hive/trunk/ql/src/test/queries/clientpositive/escape1.q
hive/trunk/ql/src/test/queries/clientpositive/escape2.q
hive/trunk/ql/src/test/results/clientpositive/escape1.q.out
hive/trunk/ql/src/test/results/clientpositive/escape2.q.out

Modified: 
hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java?rev=1301162r1=1301161r2=1301162view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/common/FileUtils.java Thu 
Mar 15 19:29:42 2012
@@ -144,7 +144,7 @@ public final class FileUtils {
 '\u0013', '\u0014', '\u0015', '\u0016', '\u0017', '\u0018', '\u0019',
 '\u001A', '\u001B', '\u001C', '\u001D', '\u001E', '\u001F',
 '', '#', '%', '\'', '*', '/', ':', '=', '?', '\\', '\u007F', '{',
-'[', ']'};
+'[', ']', '^'};
 for (char c : clist) {
   charToEscape.set(c);
 }

Modified: hive/trunk/ql/src/test/queries/clientpositive/escape1.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/escape1.q?rev=1301162r1=1301161r2=1301162view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/escape1.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/escape1.q Thu Mar 15 19:29:42 
2012
@@ -6,12 +6,14 @@ DROP TABLE escape_raw;
 CREATE TABLE escape_raw (s STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/escapetest.txt' INTO TABLE  escape_raw;
 
+SELECT count(*) from escape_raw;
 SELECT * from escape_raw;
 
 CREATE TABLE escape1 (a STRING) PARTITIONED BY (ds STRING, part STRING);
 INSERT OVERWRITE TABLE escape1 PARTITION (ds='1', part) SELECT '1', s from 
 escape_raw;
 
+SELECT count(*) from escape1;
 SELECT * from escape1;
 SHOW PARTITIONS escape1;
 

Modified: hive/trunk/ql/src/test/queries/clientpositive/escape2.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/escape2.q?rev=1301162r1=1301161r2=1301162view=diff
==
--- hive/trunk/ql/src/test/queries/clientpositive/escape2.q (original)
+++ hive/trunk/ql/src/test/queries/clientpositive/escape2.q Thu Mar 15 19:29:42 
2012
@@ -8,12 +8,14 @@ DROP TABLE IF EXISTS escape_raw;
 CREATE TABLE escape_raw (s STRING) STORED AS TEXTFILE;
 LOAD DATA LOCAL INPATH '../data/files/escapetest.txt' INTO TABLE  escape_raw;
 
+SELECT count(*) from escape_raw;
 SELECT * from escape_raw;
 
 CREATE TABLE escape2(a STRING) PARTITIONED BY (ds STRING, part STRING);
 INSERT OVERWRITE TABLE escape2 PARTITION (ds='1', part) SELECT '1', s from 
 escape_raw;
 
+SELECT count(*) from escape2;
 SELECT * from escape2;
 SHOW PARTITIONS escape2;
 

Modified: hive/trunk/ql/src/test/results/clientpositive/escape1.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape1.q.out?rev=1301162r1=1301161r2=1301162view=diff
==
Files hive/trunk/ql/src/test/results/clientpositive/escape1.q.out (original) 
and hive/trunk/ql/src/test/results/clientpositive/escape1.q.out Thu Mar 15 
19:29:42 2012 differ

Modified: hive/trunk/ql/src/test/results/clientpositive/escape2.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/escape2.q.out?rev=1301162r1=1301161r2=1301162view=diff
==
Files hive/trunk/ql/src/test/results/clientpositive/escape2.q.out (original) 
and hive/trunk/ql/src/test/results/clientpositive/escape2.q.out Thu Mar 15 
19:29:42 2012 differ




svn commit: r1297919 [2/2] - in /hive/trunk: data/files/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/ ql/src/java/org/apache/hadoop/hive/ql/optim

2012-03-07 Thread cws
Modified: 
hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1297919r1=1297918r2=1297919view=diff
==
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java 
(original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java Wed 
Mar  7 10:11:03 2012
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.UnionObjectInspector;
@@ -372,6 +373,20 @@ public final class SerDeUtils {
   }
 
   /**
+   * return false though element is null if nullsafe flag is true for that
+   */
+  public static boolean hasAnyNullObject(List o, StandardStructObjectInspector 
loi,
+  boolean[] nullSafes) {
+List? extends StructField fields = loi.getAllStructFieldRefs();
+for (int i = 0; i  o.size();i++) {
+  if ((nullSafes == null || !nullSafes[i])
+   hasAnyNullObject(o.get(i), 
fields.get(i).getFieldObjectInspector())) {
+return true;
+  }
+}
+return false;
+  }
+  /**
* True if Object passed is representing null object.
*
* @param o The object




svn commit: r1243762 - in /hive/trunk: jdbc/src/java/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/org/apache/hadoop/

2012-02-13 Thread cws
Author: cws
Date: Tue Feb 14 02:16:39 2012
New Revision: 1243762

URL: http://svn.apache.org/viewvc?rev=1243762view=rev
Log:
HIVE-2753 [jira] Remove empty java files
(Owen O'Malley via Carl Steinbach)

Summary:
remove dead java files

When looking at the 0.8.1 rc1, I discovered there were a set of empty Java files
that were likely left over from using 'patch' without the -E.

jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java
ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java
ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObject.java
ql/src/java/org/apache/hadoop/hive/ql/exec/PathUtil.java
ql/src/java/org/apache/hadoop/hive/ql/exec/TypedBytesRecordReader.java
ql/src/java/org/apache/hadoop/hive/ql/plan/AlterPartitionProtectModeDesc.java
ql/src/java/org/apache/hadoop/hive/ql/plan/TouchDesc.java
ql/src/test/org/apache/hadoop/hive/ql/plan/TestAddPartition.java
serde/src/gen-java/org/apache/hadoop/hive/serde/test/Constants.java
shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java
shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java

Test Plan: EMPTY

Reviewers: JIRA, cwsteinbach

Reviewed By: cwsteinbach

CC: cwsteinbach

Differential Revision: https://reviews.facebook.net/D1611

Removed:
hive/trunk/jdbc/src/java/org/apache/hadoop/hive/jdbc/JdbcSessionState.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeIndexEvaluator.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinObject.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/PathUtil.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/TypedBytesRecordReader.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterPartitionProtectModeDesc.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TouchDesc.java
hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/plan/TestAddPartition.java

hive/trunk/serde/src/gen-java/org/apache/hadoop/hive/serde/test/Constants.java
hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java

hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java



svn commit: r1240852 - in /hive/site: author/src/documentation/content/xdocs/ publish/

2012-02-05 Thread cws
Author: cws
Date: Sun Feb  5 22:49:31 2012
New Revision: 1240852

URL: http://svn.apache.org/viewvc?rev=1240852view=rev
Log:
HIVE-BUILD. Update for 0.8.1

Modified:
hive/site/author/src/documentation/content/xdocs/releases.xml
hive/site/author/src/documentation/content/xdocs/site.xml
hive/site/publish/credits.html
hive/site/publish/credits.pdf
hive/site/publish/index.html
hive/site/publish/index.pdf
hive/site/publish/irc.html
hive/site/publish/irc.pdf
hive/site/publish/issue_tracking.html
hive/site/publish/issue_tracking.pdf
hive/site/publish/linkmap.html
hive/site/publish/linkmap.pdf
hive/site/publish/mailing_lists.html
hive/site/publish/mailing_lists.pdf
hive/site/publish/releases.html
hive/site/publish/releases.pdf
hive/site/publish/version_control.html
hive/site/publish/version_control.pdf

Modified: hive/site/author/src/documentation/content/xdocs/releases.xml
URL: 
http://svn.apache.org/viewvc/hive/site/author/src/documentation/content/xdocs/releases.xml?rev=1240852r1=1240851r2=1240852view=diff
==
--- hive/site/author/src/documentation/content/xdocs/releases.xml (original)
+++ hive/site/author/src/documentation/content/xdocs/releases.xml Sun Feb  5 
22:49:31 2012
@@ -31,58 +31,10 @@
   titleNews/title
 
   section
-title16 December, 2011: release 0.8.0 available /title
+title5 February, 2012: release 0.8.1 available /title
pThis release is the latest release of Hive and
 it works with Hadoop 0.20.1 and 0.20.2/p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;version=12316178;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title21 June, 2011: release 0.7.1 available /title
-   pThis release is the latest release of Hive and
-it works with Hadoop 0.20.1 and 0.20.2/p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;version=12316336;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title29 March, 2011: release 0.7.0 available /title
-   pThis release is the latest release of Hive and
-it works with Hadoop 0.20.1 and 0.20.2/p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;version=12315150;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title19 October, 2010: release 0.6.0 available /title
-   pThis release is the latest release of Hive and
-it works with Hadoop 0.17, 0.18, 0.19 and 0.20./p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;styleName=Htmlamp;version=12314524;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title23 February, 2010: release 0.5.0 available /title
-   pThis release is the latest release of Hive and
-it works with Hadoop 0.17, 0.18, 0.19 and 0.20./p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;styleName=Htmlamp;version=12314156;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title17 December, 2009: release 0.4.1 available /title
-   pThis release is the latest stable release of Hive and
-it works with Hadoop 0.17, 0.18, 0.19 and 0.20./p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;styleName=Htmlamp;version=12314225;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title13 October, 2009: release 0.4.0 available /title
-   pThis release is deprecated given release 0.4.1./p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;styleName=Htmlamp;version=12313714;JIRA
 change log for this release/a./p
-  /section
-
-  section
-title29 April, 2009: release 0.3.0 available /title
-   pThis release is the first official stable release of Hive and
-it works with Hadoop 0.17, 0.18 and 0.19./p
-pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;styleName=Htmlamp;version=12313637;JIRA
 change log for this release/a./p
+pYou can look at the complete a 
href=https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310843amp;version=12319268;JIRA
 change log for this release/a./p
   /section
 
 /section

Modified: hive/site/author/src/documentation/content/xdocs/site.xml
URL: 
http://svn.apache.org/viewvc/hive/site/author/src/documentation/content/xdocs/site.xml?rev=1240852r1=1240851r2=1240852view=diff

svn commit: r1237922 - /hive/tags/release-0.8.1-rc1/

2012-01-30 Thread cws
Author: cws
Date: Mon Jan 30 18:51:12 2012
New Revision: 1237922

URL: http://svn.apache.org/viewvc?rev=1237922view=rev
Log:
Hive 0.8.1-rc1 release.

Added:
hive/tags/release-0.8.1-rc1/
  - copied from r1237921, hive/branches/branch-0.8-r2/



svn commit: r1237510 - /hive/trunk/build-common.xml

2012-01-29 Thread cws
Author: cws
Date: Mon Jan 30 00:20:09 2012
New Revision: 1237510

URL: http://svn.apache.org/viewvc?rev=1237510view=rev
Log:
HIVE-2662 [jira] Add Ant configuration property for dumping classpath of tests

Summary: HIVE-2662. Add Ant configuration property for dumping classpath of
tests

Test Plan: EMPTY

Reviewers: JIRA, jsichi, ashutoshc

Reviewed By: ashutoshc

CC: ashutoshc

Differential Revision: https://reviews.facebook.net/D903

Modified:
hive/trunk/build-common.xml

Modified: hive/trunk/build-common.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1237510r1=1237509r2=1237510view=diff
==
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Mon Jan 30 00:20:09 2012
@@ -63,6 +63,7 @@
   property name=test.silent value=true/
   property name=hadoopVersion value=${hadoop.version.ant-internal}/
   property name=test.serialize.qplan value=false/
+  property name=test.print.classpath value=false/
 
   path id=test.classpath
 pathelement location=${test.build.classes} /
@@ -354,8 +355,13 @@
   target name=test
depends=test-conditions,gen-test,compile-test,test-jar,test-init
 echo message=Project: ${ant.project.name}/
-!--property name=testcp refid=test.classpath/--
-!--echo message=test.classpath: ${testcp}/--
+if
+  equals arg1=${test.print.classpath} arg2=true /
+  then
+property name=testcp refid=test.classpath/
+echo message=Test Classpath: ${testcp}/
+  /then
+/if
 junit showoutput=${test.output} printsummary=yes haltonfailure=no
fork=yes maxmemory=512m dir=${basedir} 
timeout=${test.timeout}
errorProperty=tests.failed failureProperty=tests.failed 
filtertrace=off




svn commit: r1237511 - in /hive/trunk: hbase-handler/src/test/templates/ ql/src/test/templates/

2012-01-29 Thread cws
Author: cws
Date: Mon Jan 30 00:26:59 2012
New Revision: 1237511

URL: http://svn.apache.org/viewvc?rev=1237511view=rev
Log:
HIVE-2760 [jira] TestCliDriver should log elapsed time

Summary: HIVE-2760. TestCliDriver should log elapsed time

Test Plan: EMPTY

Reviewers: JIRA, ashutoshc

Reviewed By: ashutoshc

CC: ashutoshc, cwsteinbach

Differential Revision: https://reviews.facebook.net/D1503

Modified:
hive/trunk/hbase-handler/src/test/templates/TestHBaseCliDriver.vm
hive/trunk/ql/src/test/templates/TestCliDriver.vm
hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm
hive/trunk/ql/src/test/templates/TestParse.vm
hive/trunk/ql/src/test/templates/TestParseNegative.vm

Modified: hive/trunk/hbase-handler/src/test/templates/TestHBaseCliDriver.vm
URL: 
http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/test/templates/TestHBaseCliDriver.vm?rev=1237511r1=1237510r2=1237511view=diff
==
--- hive/trunk/hbase-handler/src/test/templates/TestHBaseCliDriver.vm (original)
+++ hive/trunk/hbase-handler/src/test/templates/TestHBaseCliDriver.vm Mon Jan 
30 00:26:59 2012
@@ -85,6 +85,7 @@ public class $className extends TestCase
   #set ($eidx = $fname.indexOf('.'))
   #set ($tname = $fname.substring(0, $eidx))
   public void testCliDriver_$tname() throws Exception {
+long startTime = System.currentTimeMillis();
 try {
   System.out.println(Begin query:  + $fname);
   qt.cliInit($fname);
@@ -118,11 +119,13 @@ public class $className extends TestCase
 } catch (Throwable e) {
   System.out.println(Exception:  + e.getMessage());
   e.printStackTrace();
+  System.out.println(Failed query:  + $fname);
   System.out.flush();
   fail(Unexpected exception);
 }
 
-System.out.println(Done query:  + $fname);
+long elapsedTime = System.currentTimeMillis() - startTime;
+System.out.println(Done query:  + $fname +  elapsedTime= + 
elapsedTime/1000 + s);
 assertTrue(Test passed, true);
   }
 

Modified: hive/trunk/ql/src/test/templates/TestCliDriver.vm
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/templates/TestCliDriver.vm?rev=1237511r1=1237510r2=1237511view=diff
==
--- hive/trunk/ql/src/test/templates/TestCliDriver.vm (original)
+++ hive/trunk/ql/src/test/templates/TestCliDriver.vm Mon Jan 30 00:26:59 2012
@@ -103,6 +103,7 @@ public class $className extends TestCase
   #set ($eidx = $fname.indexOf('.'))
   #set ($tname = $fname.substring(0, $eidx))
   public void testCliDriver_$tname() throws Exception {
+long startTime = System.currentTimeMillis();
 try {
   System.out.println(Begin query:  + $fname);
 
@@ -142,11 +143,13 @@ public class $className extends TestCase
 catch (Throwable e) {
   System.out.println(Exception:  + e.getMessage());
   e.printStackTrace();
+  System.out.println(Failed query:  + $fname);
   System.out.flush();
   fail(Unexpected exception + debugHint);
 }
 
-System.out.println(Done query:  + $fname);
+long elapsedTime = System.currentTimeMillis() - startTime;
+System.out.println(Done query:  + $fname +  elapsedTime= + 
elapsedTime/1000 + s);
 assertTrue(Test passed, true);
   }
 

Modified: hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm?rev=1237511r1=1237510r2=1237511view=diff
==
--- hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm (original)
+++ hive/trunk/ql/src/test/templates/TestNegativeCliDriver.vm Mon Jan 30 
00:26:59 2012
@@ -94,6 +94,7 @@ public class $className extends TestCase
   #set ($eidx = $fname.indexOf('.'))
   #set ($tname = $fname.substring(0, $eidx))
   public void testNegativeCliDriver_$tname() throws Exception {
+long startTime = System.currentTimeMillis();
 try {
   System.out.println(Begin query:  + $fname);
 
@@ -120,11 +121,13 @@ public class $className extends TestCase
 catch (Throwable e) {
   System.out.println(Exception:  + e.getMessage());
   e.printStackTrace();
+  System.out.println(Failed query:  + $fname);
   System.out.flush();
   fail(Unexpected exception + debugHint);
 }
 
-System.out.println(Done query:  + $fname);
+long elapsedTime = System.currentTimeMillis() - startTime;
+System.out.println(Done query:  + $fname +  elapsedTime= + 
elapsedTime/1000 + s);
 assertTrue(Test passed, true);
   }
 

Modified: hive/trunk/ql/src/test/templates/TestParse.vm
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/templates/TestParse.vm?rev=1237511r1=1237510r2=1237511view=diff
==
--- hive/trunk/ql/src/test/templates/TestParse.vm (original)
+++ hive/trunk/ql/src

svn commit: r1236489 [5/7] - in /hive/trunk/ql/src/test: queries/clientpositive/ results/clientpositive/

2012-01-26 Thread cws
Modified: hive/trunk/ql/src/test/results/clientpositive/join_nulls.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/join_nulls.q.out?rev=1236489r1=1236488r2=1236489view=diff
==
--- hive/trunk/ql/src/test/results/clientpositive/join_nulls.q.out (original)
+++ hive/trunk/ql/src/test/results/clientpositive/join_nulls.q.out Fri Jan 27 
01:41:35 2012
@@ -9,11 +9,11 @@ PREHOOK: Output: default@myinput1
 POSTHOOK: query: LOAD DATA LOCAL INPATH '../data/files/in1.txt' INTO TABLE 
myinput1
 POSTHOOK: type: LOAD
 POSTHOOK: Output: default@myinput1
-PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b
+PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ORDER BY a.key ASC, 
a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b
+POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ORDER BY a.key ASC, 
a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
@@ -26,11 +26,11 @@ NULL35  100 100
 100100 NULL35
 100100 48  NULL
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b
+PREHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ORDER BY 
a.key ASC, a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b
+POSTHOOK: query: SELECT * FROM myinput1 a LEFT OUTER JOIN myinput1 b ORDER BY 
a.key ASC, a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
@@ -43,11 +43,11 @@ NULL35  100 100
 100100 NULL35
 100100 48  NULL
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b
+PREHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ORDER BY 
a.key ASC, a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b
+POSTHOOK: query: SELECT * FROM myinput1 a RIGHT OUTER JOIN myinput1 b ORDER BY 
a.key ASC, a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
@@ -60,364 +60,364 @@ NULL  35  100 100
 100100 NULL35
 100100 48  NULL
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value
+PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value
+POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.value 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key
+PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key
+POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.key = b.key 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
 48 NULL48  NULL
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value
+PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 PREHOOK: type: QUERY
 PREHOOK: Input: default@myinput1
  A masked pattern was here 
-POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value
+POSTHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value 
ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@myinput1
  A masked pattern was here 
 NULL   35  NULL35
 100100 100 100
-PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value 
and a.key=b.key
+PREHOOK: query: SELECT * FROM myinput1 a JOIN myinput1 b ON a.value = b.value 
and a.key=b.key ORDER BY a.key ASC, a.value ASC, b.key ASC, b.value ASC
 

svn commit: r1234996 - /hive/tags/release-0.8.1-rc0/

2012-01-23 Thread cws
Author: cws
Date: Mon Jan 23 21:32:04 2012
New Revision: 1234996

URL: http://svn.apache.org/viewvc?rev=1234996view=rev
Log:
Hive 0.8.1-rc0 release.

Added:
hive/tags/release-0.8.1-rc0/
  - copied from r1234995, hive/branches/branch-0.8-r2/



svn commit: r1234150 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/udf/generic/ test/queries/clientnegative/ test/queries/clientpositive/ test/results/clientnegative/ test/results/clientposi

2012-01-20 Thread cws
Author: cws
Date: Fri Jan 20 21:32:24 2012
New Revision: 1234150

URL: http://svn.apache.org/viewvc?rev=1234150view=rev
Log:
HIVE-2203. Extend concat_ws() UDF to support arrays of strings (Zhenxiao Luo 
via cws)

Added:
hive/trunk/ql/src/test/queries/clientnegative/udf_concat_ws_wrong1.q
hive/trunk/ql/src/test/queries/clientnegative/udf_concat_ws_wrong2.q
hive/trunk/ql/src/test/queries/clientnegative/udf_concat_ws_wrong3.q
hive/trunk/ql/src/test/results/clientnegative/udf_concat_ws_wrong1.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_concat_ws_wrong2.q.out
hive/trunk/ql/src/test/results/clientnegative/udf_concat_ws_wrong3.q.out
Modified:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
hive/trunk/ql/src/test/queries/clientpositive/udf_concat_ws.q
hive/trunk/ql/src/test/results/clientpositive/udf_concat_ws.q.out

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java?rev=1234150r1=1234149r2=1234150view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFConcatWS.java
 Fri Jan 20 21:32:24 2012
@@ -24,24 +24,28 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde.Constants;
+import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import org.apache.hadoop.io.Text;
 
 /**
  * Generic UDF for string function
- * codeCONCAT_WS(sep,str1,str2,str3,...)/code. This mimics the function 
from
+ * codeCONCAT_WS(sep, [string | array(string)]+)code.
+ * This mimics the function from
  * MySQL http://dev.mysql.com/doc/refman/5.0/en/string-functions.html#
  * function_concat-ws
- * 
+ *
  * @see org.apache.hadoop.hive.ql.udf.generic.GenericUDF
  */
-@Description(name = concat_ws, value = _FUNC_(separator, str1, str2, ...) - 

+@Description(name = concat_ws,
+value = _FUNC_(separator, [string | array(string)]+) - 
 + returns the concatenation of the strings separated by the separator.,
 extended = Example:\n
-+SELECT _FUNC_('ce', 'fa', 'book') FROM src LIMIT 1;\n
-+   'facebook')
++SELECT _FUNC_('.', 'www', array('facebook', 'com')) FROM src LIMIT 
1;\n
++   'www.facebook.com')
 public class GenericUDFConcatWS extends GenericUDF {
   private ObjectInspector[] argumentOIs;
 
@@ -49,15 +53,28 @@ public class GenericUDFConcatWS extends 
   public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
 if (arguments.length  2) {
   throw new UDFArgumentLengthException(
-  The function CONCAT_WS(separator,str1,str2,str3,...) needs at least 
two arguments.);
+  The function CONCAT_WS(separator,[string | array(string)]+) 
++ needs at least two arguments.);
 }
 
+// check if argument is a string or an array of strings
 for (int i = 0; i  arguments.length; i++) {
-  if (arguments[i].getTypeName() != Constants.STRING_TYPE_NAME
-   arguments[i].getTypeName() != Constants.VOID_TYPE_NAME) {
-throw new UDFArgumentTypeException(i, Argument  + (i + 1)
+  switch(arguments[i].getCategory()) {
+case LIST:
+  if 
(((ListObjectInspector)arguments[i]).getListElementObjectInspector()
+.getTypeName().equals(Constants.STRING_TYPE_NAME)
+|| 
((ListObjectInspector)arguments[i]).getListElementObjectInspector()
+.getTypeName().equals(Constants.VOID_TYPE_NAME))
+  break;
+case PRIMITIVE:
+  if (arguments[i].getTypeName().equals(Constants.STRING_TYPE_NAME)
+|| arguments[i].getTypeName().equals(Constants.VOID_TYPE_NAME))
+  break;
+default:
+  throw new UDFArgumentTypeException(i, Argument  + (i + 1)
 +  of function CONCAT_WS must be \ + Constants.STRING_TYPE_NAME
-+ \, but \ + arguments[i].getTypeName() + \ was found.);
++  or  + Constants.LIST_TYPE_NAME +  + 
Constants.STRING_TYPE_NAME
++ \, but \ + arguments[i].getTypeName() + \ was found.);
   }
 }
 
@@ -84,8 +101,22 @@ public class GenericUDFConcatWS extends 
 } else {
   sb.append(separator);
 }
-sb.append

svn commit: r1232766 - in /hive/trunk: common/src/java/org/apache/hadoop/hive/conf/ metastore/src/java/org/apache/hadoop/hive/metastore/ ql/src/test/queries/clientpositive/ ql/src/test/results/clientp

2012-01-17 Thread cws
Author: cws
Date: Wed Jan 18 07:10:02 2012
New Revision: 1232766

URL: http://svn.apache.org/viewvc?rev=1232766view=rev
Log:
HIVE-2719. Revert HIVE-2589 (He Yongqiang via cws)

Removed:
hive/trunk/ql/src/test/queries/clientpositive/part_inherit_tbl_props.q
hive/trunk/ql/src/test/queries/clientpositive/part_inherit_tbl_props_empty.q

hive/trunk/ql/src/test/queries/clientpositive/part_inherit_tbl_props_with_star.q
hive/trunk/ql/src/test/results/clientpositive/part_inherit_tbl_props.q.out

hive/trunk/ql/src/test/results/clientpositive/part_inherit_tbl_props_empty.q.out

hive/trunk/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out
Modified:
hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1232766r1=1232765r2=1232766view=diff
==
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
(original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed 
Jan 18 07:10:02 2012
@@ -119,7 +119,6 @@ public class HiveConf extends Configurat
   HiveConf.ConfVars.METASTORE_EVENT_EXPIRY_DURATION,
   HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL,
   HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS,
-  HiveConf.ConfVars.METASTORE_PART_INHERIT_TBL_PROPS,
   };
 
   /**
@@ -297,7 +296,6 @@ public class HiveConf extends Configurat
 METASTORE_NON_TRANSACTIONAL_READ(javax.jdo.option.NonTransactionalRead, 
true),
 METASTORE_CONNECTION_USER_NAME(javax.jdo.option.ConnectionUserName, 
APP),
 METASTORE_END_FUNCTION_LISTENERS(hive.metastore.end.function.listeners, 
),
-
METASTORE_PART_INHERIT_TBL_PROPS(hive.metastore.partition.inherit.table.properties,),
 
 // CLI
 CLIIGNOREERRORS(hive.cli.errors.ignore, false),

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1232766r1=1232765r2=1232766view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Wed Jan 18 07:10:02 2012
@@ -26,17 +26,14 @@ import static org.apache.hadoop.hive.met
 import java.io.IOException;
 import java.util.AbstractMap;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.Formatter;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Properties;
-import java.util.Set;
 import java.util.Timer;
 import java.util.regex.Pattern;
 
@@ -1566,23 +1563,6 @@ public class HiveMetaStore extends Thrif
   part.putToParameters(Constants.DDL_TIME, Long.toString(time));
 }
 
-
-MapString,String tblParams = tbl.getParameters();
-String inheritProps =  
hiveConf.getVar(ConfVars.METASTORE_PART_INHERIT_TBL_PROPS).trim();
-// Default value is empty string in which case no properties will be 
inherited.
-// * implies all properties needs to be inherited
-SetString inheritKeys = new 
HashSetString(Arrays.asList(inheritProps.split(,)));
-if(inheritKeys.contains(*)){
-  inheritKeys =  tblParams.keySet();
-}
-
-for (String key : inheritKeys) {
-  String paramVal = tblParams.get(key);
-  if(null != paramVal){ // add the property only if it exists in table 
properties
-part.putToParameters(key, paramVal);
-  }
-}
-
 success = ms.addPartition(part);
 
   } finally {




svn commit: r1220933 - in /hive/trunk/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/parse/ java/org/apache/hadoop/hive/ql/udf/generic/ test/queries/clientpositive/ test/r

2011-12-19 Thread cws
Author: cws
Date: Mon Dec 19 20:25:48 2011
New Revision: 1220933

URL: http://svn.apache.org/viewvc?rev=1220933view=rev
Log:
HIVE-2005. Implement BETWEEN operator (Navis via cws)

Added:

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
hive/trunk/ql/src/test/queries/clientpositive/udf_between.q
hive/trunk/ql/src/test/results/clientpositive/udf_between.q.out
Modified:
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1220933r1=1220932r2=1220933view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
Mon Dec 19 20:25:48 2011
@@ -152,6 +152,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArrayContains;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFAssertTrue;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBetween;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCase;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFCoalesce;
@@ -362,6 +363,7 @@ public final class FunctionRegistry {
 registerGenericUDF(=, GenericUDFOPEqualOrGreaterThan.class);
 registerGenericUDF(not, GenericUDFOPNot.class);
 registerGenericUDF(!, GenericUDFOPNot.class);
+registerGenericUDF(between, GenericUDFBetween.class);
 
 registerGenericUDF(ewah_bitmap_and, GenericUDFEWAHBitmapAnd.class);
 registerGenericUDF(ewah_bitmap_or, GenericUDFEWAHBitmapOr.class);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1220933r1=1220932r2=1220933view=diff
==
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Mon Dec 19 
20:25:48 2011
@@ -2003,6 +2003,10 @@ precedenceEqualExpression
- ^(KW_NOT ^(TOK_FUNCTION KW_IN $precedenceEqualExpression 
expressions))
 | (KW_IN expressions)
- ^(TOK_FUNCTION KW_IN $precedenceEqualExpression expressions)
+| ( KW_NOT KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND 
(max=precedenceBitwiseOrExpression) )
+   - ^(TOK_FUNCTION Identifier[between] KW_TRUE $left $min $max)
+| ( KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND 
(max=precedenceBitwiseOrExpression) )
+   - ^(TOK_FUNCTION Identifier[between] KW_FALSE $left $min $max)
 )*
 ;
 
@@ -2106,6 +2110,7 @@ sysFuncNames
 | KW_RLIKE
 | KW_REGEXP
 | KW_IN
+| KW_BETWEEN
 ;
 
 descFuncNames

Added: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java?rev=1220933view=auto
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
 (added)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBetween.java
 Mon Dec 19 20:25:48 2011
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2

svn commit: r1215012 - in /hive/tags: release-0.8.0-rc4/ release-0.8.0/

2011-12-15 Thread cws
Author: cws
Date: Fri Dec 16 00:54:18 2011
New Revision: 1215012

URL: http://svn.apache.org/viewvc?rev=1215012view=rev
Log:
Hive-0.8.0 release.

Added:
hive/tags/release-0.8.0/
  - copied from r1215011, hive/tags/release-0.8.0-rc4/
Removed:
hive/tags/release-0.8.0-rc4/



svn commit: r1213518 - in /hive/branches/branch-0.8-r2/metastore/scripts/upgrade: derby/hive-schema-0.9.0.derby.sql mysql/hive-schema-0.9.0.mysql.sql

2011-12-12 Thread cws
Author: cws
Date: Tue Dec 13 00:29:40 2011
New Revision: 1213518

URL: http://svn.apache.org/viewvc?rev=1213518view=rev
Log:
HIVE-BUILD. Remove 0.9.0 metastore schemas (cws)

Removed:

hive/branches/branch-0.8-r2/metastore/scripts/upgrade/derby/hive-schema-0.9.0.derby.sql

hive/branches/branch-0.8-r2/metastore/scripts/upgrade/mysql/hive-schema-0.9.0.mysql.sql



svn commit: r1213520 - /hive/branches/branch-0.8-r2/RELEASE_NOTES.txt

2011-12-12 Thread cws
Author: cws
Date: Tue Dec 13 00:31:19 2011
New Revision: 1213520

URL: http://svn.apache.org/viewvc?rev=1213520view=rev
Log:
HIVE-BUILD. Update release notes (cws)

Modified:
hive/branches/branch-0.8-r2/RELEASE_NOTES.txt

Modified: hive/branches/branch-0.8-r2/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/RELEASE_NOTES.txt?rev=1213520r1=1213519r2=1213520view=diff
==
--- hive/branches/branch-0.8-r2/RELEASE_NOTES.txt (original)
+++ hive/branches/branch-0.8-r2/RELEASE_NOTES.txt Tue Dec 13 00:31:19 2011
@@ -220,6 +220,7 @@ Release Notes - Hive - Version 0.8.0
 * [HIVE-2625] - Fix maven-build Ant target
 * [HIVE-2630] - TestHiveServer doesn't produce a JUnit report file
 * [HIVE-2634] - revert HIVE-2566
+* [HIVE-2643] - Recent patch prevents Hadoop confs from loading in 0.20.204
 
 ** Improvement
 * [HIVE-1078] - CREATE VIEW followup:  CREATE OR REPLACE




svn commit: r1212183 [5/5] - in /hive/branches/branch-0.8-r2/ql/src: java/org/apache/hadoop/hive/ql/exec/ java/org/apache/hadoop/hive/ql/optimizer/ test/results/clientpositive/

2011-12-08 Thread cws
Modified: 
hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union3.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union3.q.out?rev=1212183r1=1212182r2=1212183view=diff
==
--- hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union3.q.out 
(original)
+++ hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union3.q.out 
Thu Dec  8 23:37:20 2011
@@ -88,62 +88,58 @@ STAGE PLANS:
   Stage: Stage-2
 Map Reduce
   Alias - Map Operator Tree:
-
file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-11-11_11-01-00_814_2332491237056487962/-mr-10002
 
-  TableScan
-Union
-  Select Operator
-expressions:
-  expr: _col0
-  type: int
-outputColumnNames: _col0
-File Output Operator
-  compressed: false
-  GlobalTableId: 0
-  table:
-  input format: org.apache.hadoop.mapred.TextInputFormat
-  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-11-11_11-01-00_814_2332491237056487962/-mr-10003
 
-  TableScan
-Union
-  Select Operator
-expressions:
-  expr: _col0
-  type: int
-outputColumnNames: _col0
-File Output Operator
-  compressed: false
-  GlobalTableId: 0
-  table:
-  input format: org.apache.hadoop.mapred.TextInputFormat
-  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-11-11_11-01-00_814_2332491237056487962/-mr-10005
 
-  TableScan
-Union
-  Select Operator
-expressions:
-  expr: _col0
-  type: int
-outputColumnNames: _col0
-File Output Operator
-  compressed: false
-  GlobalTableId: 0
-  table:
-  input format: org.apache.hadoop.mapred.TextInputFormat
-  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
-
file:/var/folders/bZ/bZe+iKfoFTuPoShRd6dy6-tOU9Y/-Tmp-/njain/hive_2011-11-11_11-01-00_814_2332491237056487962/-mr-10007
 
-  TableScan
-Union
-  Select Operator
-expressions:
-  expr: _col0
-  type: int
-outputColumnNames: _col0
-File Output Operator
-  compressed: false
-  GlobalTableId: 0
-  table:
-  input format: org.apache.hadoop.mapred.TextInputFormat
-  output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
file:/tmp/njain/hive_2011-12-07_18-06-27_327_8112398566049944010/-mr-10002 
+  Union
+Select Operator
+  expressions:
+expr: _col0
+type: int
+  outputColumnNames: _col0
+  File Output Operator
+compressed: false
+GlobalTableId: 0
+table:
+input format: org.apache.hadoop.mapred.TextInputFormat
+output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
file:/tmp/njain/hive_2011-12-07_18-06-27_327_8112398566049944010/-mr-10003 
+  Union
+Select Operator
+  expressions:
+expr: _col0
+type: int
+  outputColumnNames: _col0
+  File Output Operator
+compressed: false
+GlobalTableId: 0
+table:
+input format: org.apache.hadoop.mapred.TextInputFormat
+output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+
file:/tmp/njain/hive_2011-12-07_18-06-27_327_8112398566049944010/-mr-10005 
+  Union
+Select Operator
+  expressions:
+expr: _col0
+type: int
+  outputColumnNames: _col0
+  File Output Operator
+compressed: false
+GlobalTableId: 0
+table:
+input format: org.apache.hadoop.mapred.TextInputFormat
+output format: 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+

svn commit: r1212184 - in /hive/branches/branch-0.8-r2: RELEASE_NOTES.txt ql/src/test/queries/clientpositive/union25.q ql/src/test/results/clientpositive/union25.q.out

2011-12-08 Thread cws
Author: cws
Date: Thu Dec  8 23:38:53 2011
New Revision: 1212184

URL: http://svn.apache.org/viewvc?rev=1212184view=rev
Log:
HIVE-2634. revert HIVE-2566 including missing sections (Namit Jain via cws)

Added:
hive/branches/branch-0.8-r2/ql/src/test/queries/clientpositive/union25.q
hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union25.q.out
Modified:
hive/branches/branch-0.8-r2/RELEASE_NOTES.txt

Modified: hive/branches/branch-0.8-r2/RELEASE_NOTES.txt
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/RELEASE_NOTES.txt?rev=1212184r1=1212183r2=1212184view=diff
==
--- hive/branches/branch-0.8-r2/RELEASE_NOTES.txt (original)
+++ hive/branches/branch-0.8-r2/RELEASE_NOTES.txt Thu Dec  8 23:38:53 2011
@@ -219,6 +219,7 @@ Release Notes - Hive - Version 0.8.0
 * [HIVE-2624] - Fix eclipse classpath template broken in HIVE-2523
 * [HIVE-2625] - Fix maven-build Ant target
 * [HIVE-2630] - TestHiveServer doesn't produce a JUnit report file
+* [HIVE-2634] - revert HIVE-2566
 
 ** Improvement
 * [HIVE-1078] - CREATE VIEW followup:  CREATE OR REPLACE

Added: hive/branches/branch-0.8-r2/ql/src/test/queries/clientpositive/union25.q
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/ql/src/test/queries/clientpositive/union25.q?rev=1212184view=auto
==
--- hive/branches/branch-0.8-r2/ql/src/test/queries/clientpositive/union25.q 
(added)
+++ hive/branches/branch-0.8-r2/ql/src/test/queries/clientpositive/union25.q 
Thu Dec  8 23:38:53 2011
@@ -0,0 +1,23 @@
+create table tmp_srcpart like srcpart;
+
+insert overwrite table tmp_srcpart partition (ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11';
+
+explain
+create table tmp_unionall as
+SELECT count(1) as counts, key, value
+FROM
+(
+  SELECT key, value FROM srcpart a WHERE a.ds='2008-04-08' and a.hr='11'
+
+UNION ALL
+
+  SELECT key, key as value FROM (
+SELECT distinct key FROM (
+  SELECT key, value FROM tmp_srcpart a WHERE a.ds='2008-04-08' and 
a.hr='11'
+UNION ALL
+  SELECT key, value FROM tmp_srcpart b WHERE b.ds='2008-04-08' and 
b.hr='11'
+)t
+  ) master_table
+) a GROUP BY key, value
+;

Added: 
hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union25.q.out
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union25.q.out?rev=1212184view=auto
==
--- 
hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union25.q.out 
(added)
+++ 
hive/branches/branch-0.8-r2/ql/src/test/results/clientpositive/union25.q.out 
Thu Dec  8 23:38:53 2011
@@ -0,0 +1,330 @@
+PREHOOK: query: create table tmp_srcpart like srcpart
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table tmp_srcpart like srcpart
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_srcpart
+PREHOOK: query: insert overwrite table tmp_srcpart partition (ds='2008-04-08', 
hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Output: default@tmp_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: query: insert overwrite table tmp_srcpart partition 
(ds='2008-04-08', hr='11')
+select key, value from srcpart where ds='2008-04-08' and hr='11'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Output: default@tmp_srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Lineage: tmp_srcpart PARTITION(ds=2008-04-08,hr=11).key SIMPLE 
[(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: tmp_srcpart PARTITION(ds=2008-04-08,hr=11).value SIMPLE 
[(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
+PREHOOK: query: explain
+create table tmp_unionall as
+SELECT count(1) as counts, key, value
+FROM
+(
+  SELECT key, value FROM srcpart a WHERE a.ds='2008-04-08' and a.hr='11'
+
+UNION ALL
+
+  SELECT key, key as value FROM (
+SELECT distinct key FROM (
+  SELECT key, value FROM tmp_srcpart a WHERE a.ds='2008-04-08' and 
a.hr='11'
+UNION ALL
+  SELECT key, value FROM tmp_srcpart b WHERE b.ds='2008-04-08' and 
b.hr='11'
+)t
+  ) master_table
+) a GROUP BY key, value
+PREHOOK: type: CREATETABLE_AS_SELECT
+POSTHOOK: query: explain
+create table tmp_unionall as
+SELECT count(1) as counts, key, value
+FROM
+(
+  SELECT key, value FROM srcpart a WHERE a.ds='2008-04-08' and a.hr='11'
+
+UNION ALL
+
+  SELECT key, key as value FROM (
+SELECT distinct key FROM (
+  SELECT key, value FROM tmp_srcpart a WHERE a.ds='2008-04-08' and 
a.hr='11'
+UNION ALL
+  SELECT key, value FROM tmp_srcpart b WHERE b.ds='2008-04-08' and 
b.hr='11'
+)t
+  ) master_table
+) a GROUP BY key, value

svn commit: r1211752 - /hive/branches/branch-0.8-r2/build.properties

2011-12-07 Thread cws
Author: cws
Date: Thu Dec  8 03:49:05 2011
New Revision: 1211752

URL: http://svn.apache.org/viewvc?rev=1211752view=rev
Log:
HIVE-BUILD. Set version to 0.8.0-SNAPSHOT (cws)

Modified:
hive/branches/branch-0.8-r2/build.properties

Modified: hive/branches/branch-0.8-r2/build.properties
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/build.properties?rev=1211752r1=1211751r2=1211752view=diff
==
--- hive/branches/branch-0.8-r2/build.properties (original)
+++ hive/branches/branch-0.8-r2/build.properties Thu Dec  8 03:49:05 2011
@@ -1,6 +1,6 @@
 Name=Hive
 name=hive
-version=0.9.0-SNAPSHOT
+version=0.8.0-SNAPSHOT
 year=2011
 
 javac.debug=on




svn commit: r1211763 - /hive/branches/branch-0.8-r2/README.txt

2011-12-07 Thread cws
Author: cws
Date: Thu Dec  8 06:07:09 2011
New Revision: 1211763

URL: http://svn.apache.org/viewvc?rev=1211763view=rev
Log:
HIVE-BUILD. Update release notes (cws)

Modified:
hive/branches/branch-0.8-r2/README.txt

Modified: hive/branches/branch-0.8-r2/README.txt
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8-r2/README.txt?rev=1211763r1=1211762r2=1211763view=diff
==
--- hive/branches/branch-0.8-r2/README.txt (original)
+++ hive/branches/branch-0.8-r2/README.txt Thu Dec  8 06:07:09 2011
@@ -86,11 +86,17 @@ Upgrading from older versions of Hive
   you are using a different database for your MetaStore you will need
   to provide your own upgrade script.
 
-- Hive @VERSION@ includes new configuration properties. If you
-  are upgrading from an earlier version of Hive it is imperative
-  that you replace all of the old copies of the hive-default.xml
-  configuration file with the new version located in the conf/
-  directory.
+- Please be aware that the Hive 0.8.0 MetaStore upgrade scripts remove
+  support for partition-level column information from the MetaStore
+  schema. Since this information was not previously exposed by Hive
+  the only people potentially impacted by this change are those who
+  access the MetaStore directly via the Thrift API. If you fall into
+  the latter category please consult HIVE-2246 for more information.
+
+- Hive 0.8.0 ignores the hive-default.xml file, though we continue
+  to provide it for reference purposes. Any changes that you
+  previously made to hive-default.xml must now be moved to the
+  hive-site.xml file.
 
 
 Useful mailing lists




svn commit: r1209384 - in /hive/trunk: ./ bin/ builtins/ builtins/src/ builtins/src/org/ builtins/src/org/apache/ builtins/src/org/apache/hive/ builtins/src/org/apache/hive/builtins/ builtins/test/ ec

2011-12-02 Thread cws
Author: cws
Date: Fri Dec  2 08:14:10 2011
New Revision: 1209384

URL: http://svn.apache.org/viewvc?rev=1209384view=rev
Log:
HIVE-2523. Add a new builtins subproject (John Sichi via cws)

Added:
hive/trunk/builtins/
hive/trunk/builtins/build-plugin.xml
hive/trunk/builtins/build.xml
hive/trunk/builtins/ivy.xml
hive/trunk/builtins/src/
hive/trunk/builtins/src/org/
hive/trunk/builtins/src/org/apache/
hive/trunk/builtins/src/org/apache/hive/
hive/trunk/builtins/src/org/apache/hive/builtins/
hive/trunk/builtins/src/org/apache/hive/builtins/BuiltinUtils.java
hive/trunk/builtins/src/org/apache/hive/builtins/UDAFUnionMap.java
hive/trunk/builtins/test/
hive/trunk/builtins/test/cleanup.sql
hive/trunk/builtins/test/iris.txt
hive/trunk/builtins/test/onerow.txt
hive/trunk/builtins/test/setup.sql
Modified:
hive/trunk/bin/hive
hive/trunk/build.xml
hive/trunk/eclipse-templates/.classpath
hive/trunk/pdk/scripts/build-plugin.xml
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
hive/trunk/ql/src/test/results/clientpositive/show_functions.q.out

Modified: hive/trunk/bin/hive
URL: 
http://svn.apache.org/viewvc/hive/trunk/bin/hive?rev=1209384r1=1209383r2=1209384view=diff
==
--- hive/trunk/bin/hive (original)
+++ hive/trunk/bin/hive Fri Dec  2 08:14:10 2011
@@ -74,6 +74,11 @@ if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ];
   exit 1;
 fi
 
+if [ ! -f ${HIVE_LIB}/hive-builtins-*.jar ]; then
+  echo Missing Hive Builtins Jar: ${HIVE_LIB}/hive-builtins-*.jar
+  exit 1;
+fi
+
 if [ ! -f ${HIVE_LIB}/hive-metastore-*.jar ]; then
   echo Missing Hive MetaStore Jar
   exit 2;

Modified: hive/trunk/build.xml
URL: 
http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1209384r1=1209383r2=1209384view=diff
==
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Fri Dec  2 08:14:10 2011
@@ -139,7 +139,7 @@
   subant target=@{target}
 property name=build.dir.hive location=${build.dir.hive}/
 property name=is-offline value=${is-offline}/
-filelist dir=. 
files=ant/build.xml,shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,contrib/build.xml,service/build.xml,cli/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml,ant/build.xml,pdk/build.xml/
+filelist dir=. 
files=ant/build.xml,shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,contrib/build.xml,service/build.xml,cli/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml,ant/build.xml,pdk/build.xml,builtins/build.xml/
   /subant
 /sequential
   /macrodef
@@ -150,7 +150,7 @@
   subant target=@{target}
 property name=build.dir.hive location=${build.dir.hive}/
 property name=is-offline value=${is-offline}/
-filelist dir=. 
files=shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,contrib/build.xml,service/build.xml,cli/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml,pdk/build.xml/
+filelist dir=. 
files=shims/build.xml,common/build.xml,serde/build.xml,metastore/build.xml,ql/build.xml,contrib/build.xml,service/build.xml,cli/build.xml,jdbc/build.xml,hwi/build.xml,hbase-handler/build.xml,pdk/build.xml,builtins/build.xml/
   /subant
 /sequential
   /macrodef
@@ -519,6 +519,13 @@
 symlink overwrite=true link=${target.lib.dir}/libthrift.jar 
resource=libthrift-${libthrift.version}.jar/
 symlink overwrite=true link=${target.lib.dir}/libfb303.jar 
resource=libfb303-${libfb303.version}.jar/
 symlink overwrite=true link=${target.lib.dir}/hive_contrib.jar 
resource=hive-contrib-${version}.jar/
+!-- special case because builtins compilation depends on packaging
+ up everything else first --
+ant antfile=${hive.root}/builtins/build.xml target=package
+ inheritAll=false /
+copy todir=${target.lib.dir} preservelastmodified=true flatten=true
+  fileset file=${build.dir.hive}/builtins/hive-builtins-${version}.jar/
+/copy
   /target
 
 
@@ -665,6 +672,7 @@
   packageset dir=${build.dir.hive}/ql/gen/antlr/gen-java/
   packageset dir=shims/src/common/java/
   packageset dir=pdk/src/java/
+  packageset dir=builtins/src/
 
   link href=${javadoc.link.java}/
 
@@ -902,6 +910,8 @@
   todir=${mvn.jar.dir} /
 copy file=${build.dir.hive}/pdk/hive-pdk-${version}.jar
   todir=${mvn.jar.dir} /
+copy file=${build.dir.hive}/pdk/hive-builtins-${version}.jar
+  todir=${mvn.jar.dir} /
 
 !-- copy over maven pom files created using

svn commit: r1202443 - /hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

2011-11-15 Thread cws
Author: cws
Date: Tue Nov 15 22:08:38 2011
New Revision: 1202443

URL: http://svn.apache.org/viewvc?rev=1202443view=rev
Log:
HIVE-2411. Metastore server tries to connect to NN without authenticating 
itself (Ashutosh Chauhan via cws)

Modified:

hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

Modified: 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1202443r1=1202442r2=1202443view=diff
==
--- 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Tue Nov 15 22:08:38 2011
@@ -3668,7 +3668,6 @@ public class HiveMetaStore extends Thrif
   public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
   HiveConf conf) throws Throwable {
 try {
-  HMSHandler handler = new HMSHandler(new db based metaserver, conf);
 
   // Server will create new threads up to max as necessary. After an idle
   // period, it will destory threads to keep the number of threads in the
@@ -3681,7 +3680,7 @@ public class HiveMetaStore extends Thrif
   TServerTransport serverTransport = tcpKeepAlive ?
   new TServerSocketKeepAlive(port) : new TServerSocket(port);
 
-  TProcessor processor = new ThriftHiveMetastore.Processor(handler);
+  TProcessor processor;
   TTransportFactory transFactory;
   if (useSasl) {
  saslServer = bridge.createServer(
@@ -3691,8 +3690,11 @@ public class HiveMetaStore extends Thrif
 // start delegation token manager
 saslServer.startDelegationTokenSecretManager(conf);
 transFactory = saslServer.createTransportFactory();
-processor = saslServer.wrapProcessor(processor);
+processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor(
+new HMSHandler(new db based metaserver, conf)));
   } else {
+processor = new ThriftHiveMetastore.Processor(
+new HMSHandler(new db based metaserver, conf));
 transFactory = new TTransportFactory();
   }
 




svn commit: r1202444 - /hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

2011-11-15 Thread cws
Author: cws
Date: Tue Nov 15 22:11:22 2011
New Revision: 1202444

URL: http://svn.apache.org/viewvc?rev=1202444view=rev
Log:
HIVE-2411. Metastore server tries to connect to NN without authenticating 
itself (Ashutosh Chauhan via cws)

Modified:

hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java

Modified: 
hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1202444r1=1202443r2=1202444view=diff
==
--- 
hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 (original)
+++ 
hive/branches/branch-0.8/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 Tue Nov 15 22:11:22 2011
@@ -3523,7 +3523,6 @@ public class HiveMetaStore extends Thrif
   public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
   HiveConf conf) throws Throwable {
 try {
-  HMSHandler handler = new HMSHandler(new db based metaserver, conf);
 
   // Server will create new threads up to max as necessary. After an idle
   // period, it will destory threads to keep the number of threads in the
@@ -3536,7 +3535,7 @@ public class HiveMetaStore extends Thrif
   TServerTransport serverTransport = tcpKeepAlive ?
   new TServerSocketKeepAlive(port) : new TServerSocket(port);
 
-  TProcessor processor = new ThriftHiveMetastore.Processor(handler);
+  TProcessor processor;
   TTransportFactory transFactory;
   if (useSasl) {
  saslServer = bridge.createServer(
@@ -3546,8 +3545,11 @@ public class HiveMetaStore extends Thrif
 // start delegation token manager
 saslServer.startDelegationTokenSecretManager(conf);
 transFactory = saslServer.createTransportFactory();
-processor = saslServer.wrapProcessor(processor);
+processor = saslServer.wrapProcessor(new ThriftHiveMetastore.Processor(
+new HMSHandler(new db based metaserver, conf)));
   } else {
+processor = new ThriftHiveMetastore.Processor(
+new HMSHandler(new db based metaserver, conf));
 transFactory = new TTransportFactory();
   }
 




svn commit: r1171253 - in /hive/trunk: jdbc/src/test/org/apache/hadoop/hive/jdbc/ ql/src/java/org/apache/hadoop/hive/ql/processors/ service/src/java/org/apache/hadoop/hive/service/

2011-09-15 Thread cws
Author: cws
Date: Thu Sep 15 20:05:46 2011
New Revision: 1171253

URL: http://svn.apache.org/viewvc?rev=1171253view=rev
Log:
HIVE-2398. Hive server doesn't return schema for 'set' command (Prasad Mujumdar 
via cws)

Modified:
hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java

hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java

Modified: 
hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1171253r1=1171252r2=1171253view=diff
==
--- hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java 
(original)
+++ hive/trunk/jdbc/src/test/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java 
Thu Sep 15 20:05:46 2011
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.jdbc;
 
+import static 
org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -1008,4 +1010,28 @@ public class TestJdbcDriver extends Test
 assertEquals(Invalid DriverPropertyInfo value, value, dpi.value);
 assertEquals(Invalid DriverPropertyInfo required, false, dpi.required);
   }
+
+
+  /**
+   * validate schema generated by set command
+   * @throws SQLException
+   */
+public void testSetCommand() throws SQLException {
+  // execute set command
+  String sql = set -v;
+  Statement stmt = con.createStatement();
+  ResultSet res = stmt.executeQuery(sql);
+
+  // Validate resultset columns
+  ResultSetMetaData md = res.getMetaData() ;
+  assertEquals(1, md.getColumnCount());
+  assertEquals(SET_COLUMN_NAME, md.getColumnLabel(1));
+
+  //check if there is data in the resultset
+  assertTrue(Nothing returned by set -v, res.next());
+
+  res.close();
+  stmt.close();
+  }
+
 }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java?rev=1171253r1=1171252r2=1171253view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
 Thu Sep 15 20:05:46 2011
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import org.apache.hadoop.hive.metastore.api.Schema;
+
 /**
  * Encapsulates the basic response info returned by classes the implement the
  * codeCommandProcessor/code interfaace. Typically 
codeerrorMessage/code
@@ -28,18 +30,25 @@ public class CommandProcessorResponse {
   private int responseCode;
   private String errorMessage;
   private String SQLState;
+  private Schema resSchema;
 
   public CommandProcessorResponse(int responseCode) {
 this(responseCode, null, null);
   }
 
   public CommandProcessorResponse(int responseCode, String errorMessage, 
String SQLState) {
+this(responseCode, errorMessage, SQLState, null);
+  }
+
+  public CommandProcessorResponse(int responseCode, String errorMessage, 
String SQLState, Schema schema) {
 this.responseCode = responseCode;
 this.errorMessage = errorMessage;
 this.SQLState = SQLState;
+this.resSchema = schema;
   }
 
   public int getResponseCode() { return responseCode; }
   public String getErrorMessage() { return errorMessage; }
   public String getSQLState() { return SQLState; }
+  public Schema getSchema() { return resSchema; }
 }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1171253r1=1171252r2=1171253view=diff
==
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java 
Thu Sep 15 20:05:46 2011
@@ -18,11 +18,17 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import static org.apache.hadoop.hive.serde.Constants.STRING_TYPE_NAME;
+import static org.apache.hadoop.hive.serde.Constants.SERIALIZATION_NULL_FORMAT;
+import static 
org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString;
+
 import java.util.Map;
 import java.util.Properties;
 import java.util.SortedMap;
 import java.util.TreeMap;
 
+import org.apache.hadoop.hive.metastore.api.Schema;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import

  1   2   >