Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join2.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join2.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join2.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join2.q.xml Thu Oct 
30 16:22:33 2014
@@ -465,6 +465,14 @@
                    </void> 
                   </object> 
                  </void> 
+                 <void property="reducerTraits">
+                  <void method="clear"/>
+                  <void method="add">
+                   <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                    <string>UNSET</string>
+                   </object>
+                  </void>
+                 </void>
                  <void property="valueCols"> 
                   <object class="java.util.ArrayList"> 
                    <void method="add"> 
@@ -741,6 +749,12 @@
                       </void> 
                      </object> 
                     </void> 
+                    <void property="reducerTraits">
+                     <void method="clear"/>
+                     <void method="add">
+                      <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                     </void>
+                    </void>
                     <void property="tag"> 
                      <int>1</int> 
                     </void> 
@@ -1944,6 +1958,12 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="reducerTraits">
+                 <void method="clear"/>
+                 <void method="add">
+                  <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                 </void>
+                </void>
                 <void property="tag"> 
                  <int>1</int> 
                 </void> 
@@ -2273,6 +2293,12 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="reducerTraits">
+                 <void method="clear"/>
+                 <void method="add">
+                  <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                 </void>
+                </void>
                 <void property="valueCols"> 
                  <object class="java.util.ArrayList"/> 
                 </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join3.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join3.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join3.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join3.q.xml Thu Oct 
30 16:22:33 2014
@@ -534,6 +534,14 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="reducerTraits">
+                 <void method="clear"/>
+                 <void method="add">
+                  <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                   <string>UNSET</string>
+                  </object>
+                 </void>
+                </void>
                 <void property="tag"> 
                  <int>1</int> 
                 </void> 
@@ -920,6 +928,12 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="reducerTraits">
+                 <void method="clear"/>
+                 <void method="add">
+                  <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                 </void>
+                </void>
                 <void property="tag"> 
                  <int>2</int> 
                 </void> 
@@ -1284,6 +1298,12 @@
                   </void> 
                  </object> 
                 </void> 
+                <void property="reducerTraits">
+                 <void method="clear"/>
+                 <void method="add">
+                  <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                 </void>
+                </void>
                 <void property="valueCols"> 
                  <object class="java.util.ArrayList"/> 
                 </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join4.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join4.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join4.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join4.q.xml Thu Oct 
30 16:22:33 2014
@@ -336,6 +336,14 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                      <string>UNSET</string>
+                     </object>
+                    </void>
+                   </void>
                    <void property="valueCols"> 
                     <object class="java.util.ArrayList"> 
                      <void method="add"> 
@@ -911,6 +919,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>1</int> 
                    </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join5.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join5.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join5.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join5.q.xml Thu Oct 
30 16:22:33 2014
@@ -336,6 +336,14 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                      <string>UNSET</string>
+                     </object>
+                    </void>
+                   </void>
                    <void property="valueCols"> 
                     <object class="java.util.ArrayList"> 
                      <void method="add"> 
@@ -911,6 +919,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>1</int> 
                    </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join6.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join6.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join6.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join6.q.xml Thu Oct 
30 16:22:33 2014
@@ -336,6 +336,14 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                      <string>UNSET</string>
+                     </object>
+                    </void>
+                   </void>
                    <void property="valueCols"> 
                     <object class="java.util.ArrayList"> 
                      <void method="add"> 
@@ -911,6 +919,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>1</int> 
                    </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join7.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join7.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join7.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join7.q.xml Thu Oct 
30 16:22:33 2014
@@ -401,6 +401,14 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                      <string>UNSET</string>
+                     </object>
+                    </void>
+                   </void>
                    <void property="valueCols"> 
                     <object class="java.util.ArrayList"> 
                      <void method="add"> 
@@ -976,6 +984,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>1</int> 
                    </void> 
@@ -1515,6 +1529,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>2</int> 
                    </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/join8.q.xml Thu Oct 
30 16:22:33 2014
@@ -336,6 +336,14 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object id="ReduceSinkDesc$ReducerTraits0" 
class="org.apache.hadoop.hive.ql.plan.ReduceSinkDesc$ReducerTraits" 
method="valueOf">
+                      <string>UNSET</string>
+                     </object>
+                    </void>
+                   </void>
                    <void property="valueCols"> 
                     <object class="java.util.ArrayList"> 
                      <void method="add"> 
@@ -952,6 +960,12 @@
                      </void> 
                     </object> 
                    </void> 
+                   <void property="reducerTraits">
+                    <void method="clear"/>
+                    <void method="add">
+                     <object idref="ReduceSinkDesc$ReducerTraits0"/>
+                    </void>
+                   </void>
                    <void property="tag"> 
                     <int>1</int> 
                    </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/sample1.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/sample1.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/sample1.q.xml 
(original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/sample1.q.xml Thu Oct 
30 16:22:33 2014
@@ -351,41 +351,35 @@
                <object class="java.util.HashMap"> 
                 <void method="put"> 
                  <string>_col3</string> 
-                 <object id="ExprNodeColumnDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                  <void property="column"> 
+                 <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
                    <string>hr</string> 
                   </void> 
-                  <void property="isPartitionColOrVirtualCol"> 
-                   <boolean>true</boolean> 
-                  </void> 
-                  <void property="tabAlias"> 
-                   <string>s</string> 
-                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
+                  <void property="value"> 
+                   <string>11</string> 
+                  </void> 
                  </object> 
                 </void> 
                 <void method="put"> 
                  <string>_col2</string> 
-                 <object id="ExprNodeColumnDesc1" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
-                  <void property="column"> 
+                 <object id="ExprNodeConstantDesc1" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
                    <string>ds</string> 
                   </void> 
-                  <void property="isPartitionColOrVirtualCol"> 
-                   <boolean>true</boolean> 
-                  </void> 
-                  <void property="tabAlias"> 
-                   <string>s</string> 
-                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
+                  <void property="value"> 
+                   <string>2008-04-08</string> 
+                  </void> 
                  </object> 
                 </void> 
                 <void method="put"> 
                  <string>_col1</string> 
-                 <object id="ExprNodeColumnDesc2" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                 <object id="ExprNodeColumnDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
                   <void property="column"> 
                    <string>value</string> 
                   </void> 
@@ -399,7 +393,7 @@
                 </void> 
                 <void method="put"> 
                  <string>_col0</string> 
-                 <object id="ExprNodeColumnDesc3" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
+                 <object id="ExprNodeColumnDesc1" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc"> 
                   <void property="column"> 
                    <string>key</string> 
                   </void> 
@@ -418,16 +412,16 @@
                 <void property="colList"> 
                  <object class="java.util.ArrayList"> 
                   <void method="add"> 
-                   <object idref="ExprNodeColumnDesc3"/> 
+                   <object idref="ExprNodeColumnDesc1"/> 
                   </void> 
                   <void method="add"> 
-                   <object idref="ExprNodeColumnDesc2"/> 
+                   <object idref="ExprNodeColumnDesc0"/> 
                   </void> 
                   <void method="add"> 
-                   <object idref="ExprNodeColumnDesc1"/> 
+                   <object idref="ExprNodeConstantDesc1"/> 
                   </void> 
                   <void method="add"> 
-                   <object idref="ExprNodeColumnDesc0"/> 
+                   <object idref="ExprNodeConstantDesc0"/> 
                   </void> 
                  </object> 
                 </void> 
@@ -727,38 +721,6 @@
                 </void> 
                </object> 
               </void> 
-              <void method="add"> 
-               <object id="ColumnInfo2" 
class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                <void property="internalName"> 
-                 <string>ds</string> 
-                </void> 
-                <void property="tabAlias"> 
-                 <string>s</string> 
-                </void> 
-                <void property="type"> 
-                 <object idref="PrimitiveTypeInfo0"/> 
-                </void> 
-                <void property="typeName"> 
-                 <string>string</string> 
-                </void> 
-               </object> 
-              </void> 
-              <void method="add"> 
-               <object id="ColumnInfo3" 
class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
-                <void property="internalName"> 
-                 <string>hr</string> 
-                </void> 
-                <void property="tabAlias"> 
-                 <string>s</string> 
-                </void> 
-                <void property="type"> 
-                 <object idref="PrimitiveTypeInfo0"/> 
-                </void> 
-                <void property="typeName"> 
-                 <string>string</string> 
-                </void> 
-               </object> 
-              </void> 
              </void> 
             </object> 
            </void> 
@@ -806,12 +768,6 @@
           <void method="add"> 
            <string>value</string> 
           </void> 
-          <void method="add"> 
-           <string>ds</string> 
-          </void> 
-          <void method="add"> 
-           <string>hr</string> 
-          </void> 
          </object> 
         </void> 
         <void property="schema"> 
@@ -824,10 +780,42 @@
             <object idref="ColumnInfo1"/> 
            </void> 
            <void method="add"> 
-            <object idref="ColumnInfo2"/> 
+            <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+             <void property="alias"> 
+              <string>ds</string> 
+             </void> 
+             <void property="internalName"> 
+              <string>ds</string> 
+             </void> 
+             <void property="tabAlias"> 
+              <string>s</string> 
+             </void> 
+             <void property="type"> 
+              <object idref="PrimitiveTypeInfo0"/> 
+             </void> 
+             <void property="typeName"> 
+              <string>string</string> 
+             </void> 
+            </object> 
            </void> 
            <void method="add"> 
-            <object idref="ColumnInfo3"/> 
+            <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 
+             <void property="alias"> 
+              <string>hr</string> 
+             </void> 
+             <void property="internalName"> 
+              <string>hr</string> 
+             </void> 
+             <void property="tabAlias"> 
+              <string>s</string> 
+             </void> 
+             <void property="type"> 
+              <object idref="PrimitiveTypeInfo0"/> 
+             </void> 
+             <void property="typeName"> 
+              <string>string</string> 
+             </void> 
+            </object> 
            </void> 
            <void method="add"> 
             <object class="org.apache.hadoop.hive.ql.exec.ColumnInfo"> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/udf1.q.xml Thu Oct 30 
16:22:33 2014
@@ -535,6 +535,9 @@
                 <void method="put"> 
                  <string>_col8</string> 
                  <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;&apos; rlike &apos;.*&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -546,6 +549,9 @@
                 <void method="put"> 
                  <string>_col7</string> 
                  <object id="ExprNodeConstantDesc1" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;ab&apos; like &apos;a&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -557,6 +563,9 @@
                 <void method="put"> 
                  <string>_col6</string> 
                  <object id="ExprNodeConstantDesc2" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;ab&apos; like &apos;_a%&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -568,6 +577,9 @@
                 <void method="put"> 
                  <string>_col5</string> 
                  <object id="ExprNodeConstantDesc3" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;ab&apos; like &apos;\%\_&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -579,6 +591,9 @@
                 <void method="put"> 
                  <string>_col4</string> 
                  <object id="ExprNodeConstantDesc4" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;%_&apos; like &apos;\%\_&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -590,6 +605,9 @@
                 <void method="put"> 
                  <string>_col3</string> 
                  <object id="ExprNodeConstantDesc5" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;ab&apos; like &apos;%a_&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -601,6 +619,9 @@
                 <void method="put"> 
                  <string>_col2</string> 
                  <object id="ExprNodeConstantDesc6" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;ab&apos; like &apos;%a%&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -612,6 +633,9 @@
                 <void method="put"> 
                  <string>_col1</string> 
                  <object id="ExprNodeConstantDesc7" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;b&apos; like &apos;%a%&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -623,6 +647,9 @@
                 <void method="put"> 
                  <string>_col9</string> 
                  <object id="ExprNodeConstantDesc8" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;a&apos; rlike &apos;[ab]&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -634,6 +661,9 @@
                 <void method="put"> 
                  <string>_col13</string> 
                  <object id="ExprNodeConstantDesc9" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>regexp_replace(&apos;abc&apos;, &apos;b&apos;, 
&apos;c&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo1"/> 
                   </void> 
@@ -645,6 +675,9 @@
                 <void method="put"> 
                  <string>_col12</string> 
                  <object id="ExprNodeConstantDesc10" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;hadoop&apos; rlike &apos;o*&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -656,6 +689,9 @@
                 <void method="put"> 
                  <string>_col11</string> 
                  <object id="ExprNodeConstantDesc11" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;hadoop&apos; rlike 
&apos;[a-z]*&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -667,6 +703,9 @@
                 <void method="put"> 
                  <string>_col10</string> 
                  <object id="ExprNodeConstantDesc12" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;&apos; rlike &apos;[ab]&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 
@@ -678,6 +717,9 @@
                 <void method="put"> 
                  <string>_col16</string> 
                  <object id="ExprNodeConstantDesc13" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>regexp_replace(&apos;hadoop&apos;, 
&apos;(.)[a-z]*&apos;, &apos;$1ive&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo1"/> 
                   </void> 
@@ -689,6 +731,9 @@
                 <void method="put"> 
                  <string>_col15</string> 
                  <object id="ExprNodeConstantDesc14" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>regexp_replace(&apos;abbbb&apos;, &apos;bb&apos;, 
&apos;b&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo1"/> 
                   </void> 
@@ -700,6 +745,9 @@
                 <void method="put"> 
                  <string>_col14</string> 
                  <object id="ExprNodeConstantDesc15" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>regexp_replace(&apos;abc&apos;, &apos;z&apos;, 
&apos;a&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo1"/> 
                   </void> 
@@ -711,6 +759,9 @@
                 <void method="put"> 
                  <string>_col0</string> 
                  <object id="ExprNodeConstantDesc16" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+                  <void property="foldedFromCol"> 
+                   <string>(&apos;a&apos; like &apos;%a%&apos;)</string> 
+                  </void> 
                   <void property="typeInfo"> 
                    <object idref="PrimitiveTypeInfo0"/> 
                   </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/udf4.q.xml Thu Oct 30 
16:22:33 2014
@@ -548,6 +548,9 @@
              <void method="put"> 
               <string>_col8</string> 
               <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>sqrt(0.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 
@@ -563,6 +566,9 @@
              <void method="put"> 
               <string>_col6</string> 
               <object id="ExprNodeConstantDesc1" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>sqrt(1.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 
@@ -574,6 +580,9 @@
              <void method="put"> 
               <string>_col5</string> 
               <object id="ExprNodeConstantDesc2" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>floor((- 1.5))</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -585,6 +594,9 @@
              <void method="put"> 
               <string>_col4</string> 
               <object id="ExprNodeConstantDesc3" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>floor(1.5)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -596,6 +608,9 @@
              <void method="put"> 
               <string>_col3</string> 
               <object id="ExprNodeConstantDesc4" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>floor(1.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -607,6 +622,9 @@
              <void method="put"> 
               <string>_col2</string> 
               <object id="ExprNodeConstantDesc5" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>round((- 1.5))</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 
@@ -618,6 +636,9 @@
              <void method="put"> 
               <string>_col1</string> 
               <object id="ExprNodeConstantDesc6" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>round(1.5)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 
@@ -629,6 +650,9 @@
              <void method="put"> 
               <string>_col9</string> 
               <object id="ExprNodeConstantDesc7" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>ceil(1.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -672,6 +696,9 @@
              <void method="put"> 
               <string>_col12</string> 
               <object id="ExprNodeConstantDesc8" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>ceil(1.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -683,6 +710,9 @@
              <void method="put"> 
               <string>_col11</string> 
               <object id="ExprNodeConstantDesc9" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>ceil((- 1.5))</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -694,6 +724,9 @@
              <void method="put"> 
               <string>_col10</string> 
               <object id="ExprNodeConstantDesc10" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>ceil(1.5)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo1"/> 
                </void> 
@@ -705,6 +738,9 @@
              <void method="put"> 
               <string>_col17</string> 
               <object id="ExprNodeConstantDesc11" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>(1 + (- 2))</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo2"/> 
                </void> 
@@ -716,6 +752,9 @@
              <void method="put"> 
               <string>_col16</string> 
               <object id="ExprNodeConstantDesc12" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>(1 + 2)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo2"/> 
                </void> 
@@ -727,6 +766,9 @@
              <void method="put"> 
               <string>_col15</string> 
               <object id="ExprNodeConstantDesc13" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>(- 3)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo2"/> 
                </void> 
@@ -749,6 +791,9 @@
              <void method="put"> 
               <string>_col0</string> 
               <object id="ExprNodeConstantDesc15" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>round(1.0)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 
@@ -760,6 +805,9 @@
              <void method="put"> 
               <string>_col18</string> 
               <object id="ExprNodeConstantDesc16" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>(~ 1)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo2"/> 
                </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml (original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/udf6.q.xml Thu Oct 30 
16:22:33 2014
@@ -338,6 +338,9 @@
              <void method="put"> 
               <string>_col0</string> 
               <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>concat(&apos;a&apos;, &apos;b&apos;)</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml 
(original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/udf_case.q.xml Thu 
Oct 30 16:22:33 2014
@@ -351,6 +351,9 @@
              <void method="put"> 
               <string>_col0</string> 
               <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) 
END</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 

Modified: hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml 
(original)
+++ hive/branches/spark/ql/src/test/results/compiler/plan/udf_when.q.xml Thu 
Oct 30 16:22:33 2014
@@ -351,6 +351,9 @@
              <void method="put"> 
               <string>_col0</string> 
               <object id="ExprNodeConstantDesc0" 
class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc"> 
+               <void property="foldedFromCol"> 
+                <string>CASE WHEN ((1 = 1)) THEN (2) WHEN ((3 = 5)) THEN (4) 
ELSE (5) END</string> 
+               </void> 
                <void property="typeInfo"> 
                 <object idref="PrimitiveTypeInfo0"/> 
                </void> 

Modified: hive/branches/spark/serde/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/pom.xml?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- hive/branches/spark/serde/pom.xml (original)
+++ hive/branches/spark/serde/pom.xml Thu Oct 30 16:22:33 2014
@@ -19,7 +19,7 @@
   <parent>
     <groupId>org.apache.hive</groupId>
     <artifactId>hive</artifactId>
-    <version>0.14.0-SNAPSHOT</version>
+    <version>0.15.0-SNAPSHOT</version>
     <relativePath>../pom.xml</relativePath>
   </parent>
 

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -24,6 +24,8 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
 
+import javax.annotation.Nullable;
+
 /**
  * Abstract class for implementing SerDe. The abstract class has been created, 
so that
  * new methods can be added in the underlying interface, SerDe, and only 
implementations
@@ -31,6 +33,8 @@ import org.apache.hadoop.io.Writable;
  */
 public abstract class AbstractSerDe implements SerDe {
 
+  protected String configErrors;
+
   /**
    * Initialize the SerDe. By default, this will use one set of properties, 
either the
    * table properties or the partition properties. If a SerDe needs access to 
both sets,
@@ -54,13 +58,14 @@ public abstract class AbstractSerDe impl
    * Initialize the HiveSerializer.
    *
    * @param conf
-   *          System properties
+   *          System properties. Can be null in compile time
    * @param tbl
    *          table properties
    * @throws SerDeException
    */
   @Deprecated
-  public abstract void initialize(Configuration conf, Properties tbl) throws 
SerDeException;
+  public abstract void initialize(@Nullable Configuration conf, Properties tbl)
+      throws SerDeException;
 
   /**
    * Returns the Writable class that would be returned by the serialize method.
@@ -101,4 +106,13 @@ public abstract class AbstractSerDe impl
    * structure of the Object returned from deserialize(...).
    */
   public abstract ObjectInspector getObjectInspector() throws SerDeException;
+
+  /**
+   * Get the error messages during the Serde configuration
+   *
+   * @return The error messages in the configuration which are empty if no 
error occurred
+   */
+  public String getConfigurationErrors() {
+    return configErrors == null ? "" : configErrors;
+  }
 }

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -42,6 +42,11 @@ import org.apache.hadoop.io.Writable;
  * MetadataTypedColumnsetSerDe.
  *
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.SERIALIZATION_FORMAT,
+    serdeConstants.SERIALIZATION_NULL_FORMAT,
+    serdeConstants.SERIALIZATION_LIB,
+    serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST })
 public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/OpenCSVSerde.java
 Thu Oct 30 16:22:33 2014
@@ -49,6 +49,9 @@ import au.com.bytecode.opencsv.CSVWriter
  * quote("), and escape characters(\) are the same as the opencsv library.
  *
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS,
+    OpenCSVSerde.SEPARATORCHAR, OpenCSVSerde.QUOTECHAR, 
OpenCSVSerde.ESCAPECHAR})
 public final class OpenCSVSerde extends AbstractSerDe {
 
   public static final Log LOG = 
LogFactory.getLog(OpenCSVSerde.class.getName());
@@ -69,7 +72,7 @@ public final class OpenCSVSerde extends 
   public void initialize(final Configuration conf, final Properties tbl) 
throws SerDeException {
 
     final List<String> columnNames = 
Arrays.asList(tbl.getProperty(serdeConstants.LIST_COLUMNS)
-            .split(","));
+        .split(","));
 
     numCols = columnNames.size();
 

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.serde2.obj
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveJavaObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -70,10 +69,16 @@ import com.google.common.collect.Lists;
  * writableStringObjectInspector. We should switch to that when we have a UTF-8
  * based Regex library.
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+    RegexSerDe.INPUT_REGEX, RegexSerDe.INPUT_REGEX_CASE_SENSITIVE })
 public class RegexSerDe extends AbstractSerDe {
 
   public static final Log LOG = LogFactory.getLog(RegexSerDe.class.getName());
 
+  public static final String INPUT_REGEX = "input.regex";
+  public static final String INPUT_REGEX_CASE_SENSITIVE = 
"input.regex.case.insensitive";
+
   int numColumns;
   String inputRegex;
 
@@ -95,11 +100,11 @@ public class RegexSerDe extends Abstract
     // We can get the table definition from tbl.
 
     // Read the configuration parameters
-    inputRegex = tbl.getProperty("input.regex");
+    inputRegex = tbl.getProperty(INPUT_REGEX);
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = 
tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
     boolean inputRegexIgnoreCase = "true".equalsIgnoreCase(tbl
-        .getProperty("input.regex.case.insensitive"));
+        .getProperty(INPUT_REGEX_CASE_SENSITIVE));
 
     // output format string is not supported anymore, warn user of deprecation
     if (null != tbl.getProperty("output.format.string")) {

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
 Thu Oct 30 16:22:33 2014
@@ -497,7 +497,8 @@ public final class SerDeUtils {
    * @return the overlayed properties
    */
   public static Properties createOverlayedProperties(Properties tblProps, 
Properties partProps) {
-    Properties props = new Properties(tblProps);
+    Properties props = new Properties();
+    props.putAll(tblProps);
     if (partProps != null) {
       props.putAll(partProps);
     }
@@ -506,7 +507,8 @@ public final class SerDeUtils {
 
   /**
    * Initializes a SerDe.
-   * @param serde
+   * @param deserializer
+   * @param conf
    * @param tblProps
    * @param partProps
    * @throws SerDeException
@@ -516,6 +518,28 @@ public final class SerDeUtils {
                                                 throws SerDeException {
     if (deserializer instanceof AbstractSerDe) {
       ((AbstractSerDe) deserializer).initialize(conf, tblProps, partProps);
+      String msg = ((AbstractSerDe) deserializer).getConfigurationErrors();
+      if (msg != null && !msg.isEmpty()) {
+        throw new SerDeException(msg);
+      }
+    } else {
+      deserializer.initialize(conf, createOverlayedProperties(tblProps, 
partProps));
+    }
+  }
+
+  /**
+   * Initializes a SerDe.
+   * @param deserializer
+   * @param conf
+   * @param tblProps
+   * @param partProps
+   * @throws SerDeException
+   */
+  public static void initializeSerDeWithoutErrorCheck(Deserializer 
deserializer,
+                                                      Configuration conf, 
Properties tblProps,
+                                                      Properties partProps) 
throws SerDeException {
+    if (deserializer instanceof AbstractSerDe) {
+      ((AbstractSerDe) deserializer).initialize(conf, tblProps, partProps);
     } else {
       deserializer.initialize(conf, createOverlayedProperties(tblProps, 
partProps));
     }

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroDeserializer.java
 Thu Oct 30 16:22:33 2014
@@ -40,7 +40,6 @@ import org.apache.avro.io.BinaryDecoder;
 import org.apache.avro.io.BinaryEncoder;
 import org.apache.avro.io.DecoderFactory;
 import org.apache.avro.io.EncoderFactory;
-import org.apache.avro.util.Utf8;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveChar;
@@ -201,7 +200,7 @@ class AvroDeserializer {
     // Avro requires NULLable types to be defined as unions of some type T
     // and NULL.  This is annoying and we're going to hide it from the user.
     if(AvroSerdeUtils.isNullableType(recordSchema)) {
-      return deserializeNullableUnion(datum, fileSchema, recordSchema, 
columnType);
+      return deserializeNullableUnion(datum, fileSchema, recordSchema);
     }
 
     switch(columnType.getCategory()) {
@@ -297,8 +296,8 @@ class AvroDeserializer {
    * Extract either a null or the correct type from a Nullable type.  This is
    * horrible in that we rebuild the TypeInfo every time.
    */
-  private Object deserializeNullableUnion(Object datum, Schema fileSchema, 
Schema recordSchema,
-                                          TypeInfo columnType) throws 
AvroSerdeException {
+  private Object deserializeNullableUnion(Object datum, Schema fileSchema, 
Schema recordSchema)
+                                            throws AvroSerdeException {
     int tag = GenericData.get().resolveUnion(recordSchema, datum); // 
Determine index of value
     Schema schema = recordSchema.getTypes().get(tag);
     if (schema.getType().equals(Schema.Type.NULL)) {
@@ -307,8 +306,14 @@ class AvroDeserializer {
 
     Schema currentFileSchema = null;
     if (fileSchema != null) {
-       currentFileSchema =
-           fileSchema.getType() == Type.UNION ? fileSchema.getTypes().get(tag) 
: fileSchema;
+      if (fileSchema.getType() == Type.UNION) {
+        // The fileSchema may have the null value in a different position, so
+        // we need to get the correct tag
+        tag = GenericData.get().resolveUnion(fileSchema, datum);
+        currentFileSchema = fileSchema.getTypes().get(tag);
+      } else {
+        currentFileSchema = fileSchema;
+      }
     }
     return worker(datum, currentFileSchema, schema, 
SchemaToTypeInfo.generateTypeInfo(schema));
 
@@ -370,10 +375,10 @@ class AvroDeserializer {
     // Avro only allows maps with Strings for keys, so we only have to worry
     // about deserializing the values
     Map<String, Object> map = new HashMap<String, Object>();
-    Map<Utf8, Object> mapDatum = (Map)datum;
+    Map<CharSequence, Object> mapDatum = (Map)datum;
     Schema valueSchema = mapSchema.getValueType();
     TypeInfo valueTypeInfo = columnType.getMapValueTypeInfo();
-    for (Utf8 key : mapDatum.keySet()) {
+    for (CharSequence key : mapDatum.keySet()) {
       Object value = mapDatum.get(key);
       map.put(key.toString(), worker(value, fileSchema == null ? null : 
fileSchema.getValueType(),
           valueSchema, valueTypeInfo));

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -26,8 +26,10 @@ import org.apache.avro.Schema;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
@@ -37,9 +39,18 @@ import org.apache.hadoop.io.Writable;
 /**
  * Read or write Avro data from Hive.
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+    AvroSerDe.LIST_COLUMN_COMMENTS, AvroSerDe.TABLE_NAME, 
AvroSerDe.TABLE_COMMENT,
+    AvroSerdeUtils.SCHEMA_LITERAL, AvroSerdeUtils.SCHEMA_URL,
+    AvroSerdeUtils.SCHEMA_NAMESPACE, AvroSerdeUtils.SCHEMA_NAME, 
AvroSerdeUtils.SCHEMA_DOC})
 public class AvroSerDe extends AbstractSerDe {
   private static final Log LOG = LogFactory.getLog(AvroSerDe.class);
 
+  public static final String TABLE_NAME = "name";
+  public static final String TABLE_COMMENT = "comment";
+  public static final String LIST_COLUMN_COMMENTS = "columns.comments";
+
   public static final String DECIMAL_TYPE_NAME = "decimal";
   public static final String CHAR_TYPE_NAME = "char";
   public static final String VARCHAR_TYPE_NAME = "varchar";
@@ -59,8 +70,6 @@ public class AvroSerDe extends AbstractS
   private AvroSerializer avroSerializer = null;
 
   private boolean badSchema = false;
-  private static String TABLE_NAME = "name";
-  private static String TABLE_COMMENT = "comment";
 
   @Override
   public void initialize(Configuration configuration, Properties 
tableProperties,
@@ -81,15 +90,15 @@ public class AvroSerDe extends AbstractS
     columnNames = null;
     columnTypes = null;
 
-    final String columnNameProperty = properties.getProperty("columns");
-    final String columnTypeProperty = properties.getProperty("columns.types");
-    final String columnCommentProperty = 
properties.getProperty("columns.comments");
+    final String columnNameProperty = 
properties.getProperty(serdeConstants.LIST_COLUMNS);
+    final String columnTypeProperty = 
properties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
+    final String columnCommentProperty = 
properties.getProperty(LIST_COLUMN_COMMENTS);
 
     if (properties.getProperty(AvroSerdeUtils.SCHEMA_LITERAL) != null
         || properties.getProperty(AvroSerdeUtils.SCHEMA_URL) != null
         || columnNameProperty == null || columnNameProperty.isEmpty()
         || columnTypeProperty == null || columnTypeProperty.isEmpty()) {
-      schema = AvroSerdeUtils.determineSchemaOrReturnErrorSchema(properties);
+      schema = determineSchemaOrReturnErrorSchema(properties);
     } else {
       // Get column names and sort order
       columnNames = Arrays.asList(columnNameProperty.split(","));
@@ -135,6 +144,32 @@ public class AvroSerDe extends AbstractS
     this.oi = aoig.getObjectInspector();
   }
 
+  /**
+   * Attempt to determine the schema via the usual means, but do not throw
+   * an exception if we fail.  Instead, signal failure via a special
+   * schema.  This is used because Hive calls init on the serde during
+   * any call, including calls to update the serde properties, meaning
+   * if the serde is in a bad state, there is no way to update that state.
+   */
+  public Schema determineSchemaOrReturnErrorSchema(Properties props) {
+    try {
+      configErrors = "";
+      return AvroSerdeUtils.determineSchemaOrThrowException(props);
+    } catch(AvroSerdeException he) {
+      LOG.warn("Encountered AvroSerdeException determining schema. Returning " 
+
+              "signal schema to indicate problem", he);
+      configErrors = new String("Encountered AvroSerdeException determining 
schema. Returning " +
+              "signal schema to indicate problem: " + he.getMessage());
+      return schema = SchemaResolutionProblem.SIGNAL_BAD_SCHEMA;
+    } catch (Exception e) {
+      LOG.warn("Encountered exception determining schema. Returning signal " +
+              "schema to indicate problem", e);
+      configErrors = new String("Encountered exception determining schema. 
Returning signal " +
+              "schema to indicate problem: " + e.getMessage());
+      return SchemaResolutionProblem.SIGNAL_BAD_SCHEMA;
+    }
+  }
+
   @Override
   public Class<? extends Writable> getSerializedClass() {
     return AvroGenericRecordWritable.class;

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
 Thu Oct 30 16:22:33 2014
@@ -91,26 +91,6 @@ public class AvroSerdeUtils {
     }
   }
 
-  /**
-   * Attempt to determine the schema via the usual means, but do not throw
-   * an exception if we fail.  Instead, signal failure via a special
-   * schema.  This is used because Hive calls init on the serde during
-   * any call, including calls to update the serde properties, meaning
-   * if the serde is in a bad state, there is no way to update that state.
-   */
-  public static Schema determineSchemaOrReturnErrorSchema(Properties props) {
-    try {
-      return determineSchemaOrThrowException(props);
-    } catch(AvroSerdeException he) {
-      LOG.warn("Encountered AvroSerdeException determining schema. Returning " 
+
-              "signal schema to indicate problem", he);
-      return SchemaResolutionProblem.SIGNAL_BAD_SCHEMA;
-    } catch (Exception e) {
-      LOG.warn("Encountered exception determining schema. Returning signal " +
-              "schema to indicate problem", e);
-      return SchemaResolutionProblem.SIGNAL_BAD_SCHEMA;
-    }
-  }
   // Protected for testing and so we can pass in a conf for testing.
   protected static Schema getSchemaFromFS(String schemaFSUrl,
                           Configuration conf) throws IOException, 
URISyntaxException {

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.serde2.Byt
 import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
@@ -78,7 +79,6 @@ import org.apache.hadoop.hive.serde2.typ
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.UnionTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.FloatWritable;
@@ -110,6 +110,9 @@ import org.apache.hadoop.io.Writable;
  * fields in the same top-level field will have the same sort order.
  *
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+    serdeConstants.SERIALIZATION_SORT_ORDER})
 public class BinarySortableSerDe extends AbstractSerDe {
 
   public static final Log LOG = 
LogFactory.getLog(BinarySortableSerDe.class.getName());

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -27,9 +27,11 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -50,6 +52,14 @@ import org.apache.hadoop.io.Writable;
  * (2) ColumnarSerDe initialize ColumnarStruct's field directly. But under the
  * field level, it works like LazySimpleSerDe<br>
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+    serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, 
serdeConstants.MAPKEY_DELIM,
+    serdeConstants.SERIALIZATION_FORMAT, 
serdeConstants.SERIALIZATION_NULL_FORMAT,
+    serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+    serdeConstants.ESCAPE_CHAR,
+    serdeConstants.SERIALIZATION_ENCODING,
+    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
 public class ColumnarSerDe extends ColumnarSerDeBase {
 
   @Override

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/columnar/LazyBinaryColumnarSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -22,8 +22,10 @@ import java.util.List;
 import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
 import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryFactory;
@@ -44,6 +46,7 @@ import org.apache.hadoop.io.Writable;
  * format and which is deserialized in a lazy, i.e. on-demand fashion.
  *
  */
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, 
serdeConstants.LIST_COLUMN_TYPES})
 public class LazyBinaryColumnarSerDe extends ColumnarSerDeBase {
 
   private List<String> columnNames;

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/dynamic_type/DynamicSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde.serd
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
@@ -50,6 +51,9 @@ import org.apache.thrift.transport.TIOSt
  * DynamicSerDe.
  *
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.SERIALIZATION_DDL, serdeConstants.SERIALIZATION_FORMAT,
+    DynamicSerDe.META_TABLE_NAME})
 public class DynamicSerDe extends AbstractSerDe {
 
   public static final Log LOG = 
LogFactory.getLog(DynamicSerDe.class.getName());

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/DateWritable.java
 Thu Oct 30 16:22:33 2014
@@ -128,7 +128,10 @@ public class DateWritable implements Wri
   public static long daysToMillis(int d) {
     // Convert from day offset to ms in UTC, then apply local timezone offset.
     long millisUtc = d * MILLIS_PER_DAY;
-    return millisUtc - LOCAL_TIMEZONE.get().getOffset(millisUtc);
+    long tmp =  millisUtc - LOCAL_TIMEZONE.get().getOffset(millisUtc);
+    // Between millisUtc and tmp, the time zone offset may have changed due to 
DST.
+    // Look up the offset again.
+    return millisUtc - LOCAL_TIMEZONE.get().getOffset(tmp);
   }
 
   public static int dateToDays(Date d) {

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/io/TimestampWritable.java
 Thu Oct 30 16:22:33 2014
@@ -26,10 +26,7 @@ import java.sql.Timestamp;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils;
 import org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryUtils.VInt;
@@ -55,7 +52,6 @@ import org.apache.hadoop.io.WritableUtil
  *
  */
 public class TimestampWritable implements 
WritableComparable<TimestampWritable> {
-  static final private Log LOG = LogFactory.getLog(TimestampWritable.class);
 
   static final public byte[] nullBytes = {0x0, 0x0, 0x0, 0x0};
 
@@ -125,6 +121,12 @@ public class TimestampWritable implement
     clearTimestamp();
   }
 
+  public void setTime(long time) {
+    timestamp.setTime(time);
+    bytesEmpty = true;
+    timestampEmpty = false;
+  }
+
   public void set(Timestamp t) {
     if (t == null) {
       timestamp.setTime(0);
@@ -301,7 +303,7 @@ public class TimestampWritable implement
       seconds = getSeconds();
       nanos = getNanos();
     }
-    return seconds + ((double) nanos) / 1000000000;
+    return seconds + nanos / 1000000000;
   }
 
 
@@ -453,7 +455,7 @@ public class TimestampWritable implement
 
   /**
    * Writes a Timestamp's serialized value to byte array b at the given offset
-   * @param timestamp to convert to bytes
+   * @param t to convert to bytes
    * @param b destination byte array
    * @param offset destination offset in the byte array
    */
@@ -538,7 +540,7 @@ public class TimestampWritable implement
 
     // We must ensure the exactness of the double's fractional portion.
     // 0.6 as the fraction part will be converted to 0.59999... and
-    // significantly reduce the savings from binary serializtion
+    // significantly reduce the savings from binary serialization
     BigDecimal bd = new BigDecimal(String.valueOf(f));
     bd = bd.subtract(new BigDecimal(seconds)).multiply(new 
BigDecimal(1000000000));
     int nanos = bd.intValue();

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyMap.java
 Thu Oct 30 16:22:33 2014
@@ -23,6 +23,8 @@ import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import 
org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import org.apache.hadoop.io.Text;
@@ -36,6 +38,7 @@ import org.apache.hadoop.io.Text;
  */
 public class LazyMap extends LazyNonPrimitive<LazyMapObjectInspector> {
 
+  public static final Log LOG = LogFactory.getLog(LazyMap.class);
   /**
    * Whether the data is already parsed or not.
    */
@@ -170,15 +173,19 @@ public class LazyMap extends LazyNonPrim
         valueLength[mapSize] = elementByteEnd - (keyEnd[mapSize] + 1);
         LazyPrimitive<?, ?> lazyKey = uncheckedGetKey(mapSize);
         if (lazyKey == null) {
-          continue;
-        }
-        Object key = lazyKey.getObject();
-        if(!keySet.contains(key)) {
-          mapSize++;
-          keySet.add(key);
-        } else {
+          LOG.warn("skipped empty entry or entry with empty key in the 
representation of column with MAP type.");
+          //reset keyInited[mapSize] flag, since it may be set to true in the 
case of previous empty entry
           keyInited[mapSize] = false;
+        } else {
+          Object key = lazyKey.getObject();
+          if(!keySet.contains(key)) {
+            mapSize++;
+            keySet.add(key);
+          } else {
+            keyInited[mapSize] = false;
+          }
         }
+
         // reset keyValueSeparatorPosition
         keyValueSeparatorPosition = -1;
         elementByteBegin = elementByteEnd + 1;

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java
 Thu Oct 30 16:22:33 2014
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.serde2.lazy;
 
 import java.io.IOException;
-import java.nio.charset.Charset;
 import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
@@ -31,10 +30,10 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractEncodingAwareSerDe;
-import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
@@ -64,6 +63,14 @@ import org.apache.hadoop.io.Writable;
  * Also LazySimpleSerDe outputs typed columns instead of treating all columns 
as
  * String like MetadataTypedColumnsetSerDe.
  */
+@SerDeSpec(schemaProps = {
+    serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES,
+    serdeConstants.FIELD_DELIM, serdeConstants.COLLECTION_DELIM, 
serdeConstants.MAPKEY_DELIM,
+    serdeConstants.SERIALIZATION_FORMAT, 
serdeConstants.SERIALIZATION_NULL_FORMAT,
+    serdeConstants.SERIALIZATION_LAST_COLUMN_TAKES_REST,
+    serdeConstants.ESCAPE_CHAR,
+    serdeConstants.SERIALIZATION_ENCODING,
+    LazySimpleSerDe.SERIALIZATION_EXTEND_NESTING_LEVELS})
 public class LazySimpleSerDe extends AbstractEncodingAwareSerDe {
 
   public static final Log LOG = LogFactory.getLog(LazySimpleSerDe.class

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
 Thu Oct 30 16:22:33 2014
@@ -27,14 +27,12 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Decimal128;
-import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
-import org.apache.hadoop.hive.serde2.ByteStream.Output;
 import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -76,6 +74,7 @@ import org.apache.hadoop.io.Writable;
  * deserialized until required. Binary means a field is serialized in binary
  * compact format.
  */
+@SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, 
serdeConstants.LIST_COLUMN_TYPES})
 public class LazyBinarySerDe extends AbstractSerDe {
   public static final Log LOG = 
LogFactory.getLog(LazyBinarySerDe.class.getName());
 

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseCharTypeInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseCharTypeInfo.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseCharTypeInfo.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/BaseCharTypeInfo.java
 Thu Oct 30 16:22:33 2014
@@ -27,6 +27,10 @@ public abstract class BaseCharTypeInfo e
   public BaseCharTypeInfo() {
   }
 
+  public BaseCharTypeInfo(String typeName) {
+    super(typeName);
+  }
+
   public BaseCharTypeInfo(String typeName, int length) {
     super(typeName);
     this.length = length;
@@ -53,4 +57,9 @@ public abstract class BaseCharTypeInfo e
     return sb.toString();
   }
 
+  @Override
+  public void setTypeName(String typeName) {
+    // type name should already be set by subclass
+    return;
+  }
 }

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/CharTypeInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/CharTypeInfo.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/CharTypeInfo.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/CharTypeInfo.java
 Thu Oct 30 16:22:33 2014
@@ -25,6 +25,7 @@ public class CharTypeInfo  extends BaseC
 
   // no-arg constructor to make kyro happy.
   public CharTypeInfo() {
+    super(serdeConstants.CHAR_TYPE_NAME);
   }
 
   public CharTypeInfo(int length) {

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/DecimalTypeInfo.java
 Thu Oct 30 16:22:33 2014
@@ -28,6 +28,7 @@ public class DecimalTypeInfo extends Pri
 
   // no-arg constructor to make kyro happy.
   public DecimalTypeInfo() {
+    super(serdeConstants.DECIMAL_TYPE_NAME);
   }
 
   public DecimalTypeInfo(int precision, int scale) {
@@ -43,6 +44,12 @@ public class DecimalTypeInfo extends Pri
   }
 
   @Override
+  public void setTypeName(String typeName) {
+    // No need to set type name, it should always be decimal
+    return;
+  }
+
+  @Override
   public boolean equals(Object other) {
     if (other == null || !(other instanceof DecimalTypeInfo)) {
       return false;
@@ -101,4 +108,20 @@ public class DecimalTypeInfo extends Pri
     return this.precision() - this.scale() >= dti.precision() - dti.scale();
   }
 
+  public int getPrecision() {
+    return precision;
+  }
+
+  public void setPrecision(int precision) {
+    this.precision = precision;
+  }
+
+  public int getScale() {
+    return scale;
+  }
+
+  public void setScale(int scale) {
+    this.scale = scale;
+  }
+
 }

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/StructTypeInfo.java
 Thu Oct 30 16:22:33 2014
@@ -102,7 +102,7 @@ public final class StructTypeInfo extend
   public TypeInfo getStructFieldTypeInfo(String field) {
     String fieldLowerCase = field.toLowerCase();
     for (int i = 0; i < allStructFieldNames.size(); i++) {
-      if (fieldLowerCase.equals(allStructFieldNames.get(i))) {
+      if (fieldLowerCase.equalsIgnoreCase(allStructFieldNames.get(i))) {
         return allStructFieldTypeInfos.get(i);
       }
     }

Modified: 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
 (original)
+++ 
hive/branches/spark/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/VarcharTypeInfo.java
 Thu Oct 30 16:22:33 2014
@@ -25,6 +25,7 @@ public class VarcharTypeInfo extends Bas
 
   // no-arg constructor to make kyro happy.
   public VarcharTypeInfo() {
+    super(serdeConstants.VARCHAR_TYPE_NAME);
   }
 
   public VarcharTypeInfo(int length) {

Modified: 
hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java?rev=1635536&r1=1635535&r2=1635536&view=diff
==============================================================================
--- 
hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
 (original)
+++ 
hive/branches/spark/serde/src/test/org/apache/hadoop/hive/serde2/avro/TestAvroDeserializer.java
 Thu Oct 30 16:22:33 2014
@@ -475,6 +475,67 @@ public class TestAvroDeserializer {
     assertTrue(theMap2.containsKey("mu"));
     assertEquals(null, theMap2.get("mu"));
   }
+  
+  @Test
+  public void canDeserializeMapsWithJavaLangStringKeys() throws IOException, 
SerDeException {
+    // Ensures maps can be deserialized when avro.java.string=String.
+    // See http://stackoverflow.com/a/19868919/312944 for why that might be 
used.
+    String schemaString = "{\n" +
+        "  \"namespace\": \"testing\",\n" +
+        "  \"name\": \"oneMap\",\n" +
+        "  \"type\": \"record\",\n" +
+        "  \"fields\": [\n" +
+        "    {\n" +
+        "      \"name\":\"aMap\",\n" +
+        "      \"type\":{\"type\":\"map\",\n" +
+        "      \"avro.java.string\":\"String\",\n" +
+        "      \"values\":\"long\"}\n" +
+        "\t}\n" +
+        "  ]\n" +
+        "}";
+    Schema s = AvroSerdeUtils.getSchemaFor(schemaString);
+    GenericData.Record record = new GenericData.Record(s);
+
+    Map<String, Long> m = new Hashtable<String, Long>();
+    m.put("one", 1l);
+    m.put("two", 2l);
+    m.put("three", 3l);
+
+    record.put("aMap", m);
+    assertTrue(GENERIC_DATA.validate(s, record));
+    System.out.println("record = " + record);
+
+    AvroGenericRecordWritable garw = 
Utils.serializeAndDeserializeRecord(record);
+
+    AvroObjectInspectorGenerator aoig = new AvroObjectInspectorGenerator(s);
+
+    AvroDeserializer de = new AvroDeserializer();
+
+    ArrayList<Object> row = 
(ArrayList<Object>)de.deserialize(aoig.getColumnNames(),
+            aoig.getColumnTypes(), garw, s);
+    assertEquals(1, row.size());
+    Object theMapObject = row.get(0);
+    assertTrue(theMapObject instanceof Map);
+    Map theMap = (Map)theMapObject;
+
+    // Verify the raw object that's been created
+    assertEquals(1l, theMap.get("one"));
+    assertEquals(2l, theMap.get("two"));
+    assertEquals(3l, theMap.get("three"));
+
+    // Verify that the provided object inspector can pull out these same values
+    StandardStructObjectInspector oi =
+            (StandardStructObjectInspector)aoig.getObjectInspector();
+
+    List<Object> z = oi.getStructFieldsDataAsList(row);
+    assertEquals(1, z.size());
+    StructField fieldRef = oi.getStructFieldRef("amap");
+
+    Map theMap2 = (Map)oi.getStructFieldData(row, fieldRef);
+    assertEquals(1l, theMap2.get("one"));
+    assertEquals(2l, theMap2.get("two"));
+    assertEquals(3l, theMap2.get("three"));
+  }
 
   private void verifyNullableType(GenericData.Record record, Schema s, String 
fieldName,
                                   String expected) throws SerDeException, 
IOException {


Reply via email to