Hi Neelesh,

 

I've just upgraded our cluster from hadoop-0.20/hive-0.10 to
hadoop-1.03(mapr2.1)/hive0.10 and started running into this issue
immediately. Strangely is doesn't appear to happen all of the time or be
local to any nodes or set of nodes.             

 

The only work around I've found so far it to copy the UDF jar into the
hadoop/lib directory on each task tracker node. Obviously this is not ideal
and I'm still looking for an actual fix.

 

Chris

 

From: neelesh gadhia [mailto:ngad...@yahoo.com] 
Sent: 14 February 2013 17:04
To: user@hive.apache.org; nitinpawar...@gmail.com
Subject: Re: Nullpointer Exception when using UDF

 

Nitin,

 

Below is the code I got/used from
<https://issues.apache.org/jira/browse/HIVE-2361>
https://issues.apache.org/jira/browse/HIVE-2361/


 

I have attached the file that has all the UDF's ( of which I am using
GenericUDFMax and GenericUDFSum) that fails with same errors at same point.

 

And also realize I tried using other udf (GenericUDFMax) and it fails
exactly at the same point.

 

Code for GenericUDFSum as shown below.

 

/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *      <http://www.apache.org/licenses/LICENSE-2.0>
http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package com.nexr.platform.analysis.udf;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.UDFType;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Conv
erter;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectIns
pectorCopyOption;
import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspe
ctorFactory;
import org.apache.hadoop.io.LongWritable;
import org.apache.hive.pdk.HivePdkUnitTest;
import org.apache.hive.pdk.HivePdkUnitTests;


@Description(name = "sum",
    value = "_FUNC_(hash_key, order_by_col1, order_by_col2 ...) " +
            "- Returns the summed value of group",
    extended = "Example:\n"
    + "  > SELECT _FUNC_(HASH(p1, p2), order_by_col1, order_by_col2, ... )
FROM (\n"
    + "  >         SELECT ~ FROM table DISTRIBUTE BY HASH(p1,p2) SORT BY
p1,p2,order_by_col1, order_by_col2 DESC, ... \n"
    + "  > );")

@HivePdkUnitTests(
    setup = "", cleanup = "",
    cases = {
      @HivePdkUnitTest(
        query = "SELECT t.empno, t.deptno, t.sal,
nexr_sum(hash(t.deptno),t.sal) as sal_sum"
            +"  FROM ("
            +"      select a.empno, a.deptno, a.sal from emp a"
            +"      distribute by hash(a.deptno)"
            +"      sort BY a.deptno, a.empno"
            +"  ) t;",
        result ="7782\t10\t2450\t2450\n"
            +"7839\t10\t5000\t7450\n"
            +"7934\t10\t1300\t8750\n"
            +"7369\t20\t800\t800\n"
            +"7566\t20\t2975\t3775\n"
            +"7788\t20\t3000\t6775\n"
            +"7876\t20\t1100\t7875\n"
            +"7902\t20\t3000\t10875\n"
            +"7499\t30\t1600\t1600\n"
            +"7521\t30\t1250\t2850\n"
            +"7654\t30\t1250\t4100\n"
            +"7698\t30\t2850\t6950\n"
            +"7844\t30\t1500\t8450\n"
            +"7900\t30\t950\t9400")
    }
  )
    
@UDFType(deterministic = false, stateful = true)
public class GenericUDFSum extends GenericUDF {
    private final LongWritable longResult = new LongWritable();
    private final DoubleWritable doubleResult = new DoubleWritable();
    private ObjectInspector hashOI, valueOI, prevHashStandardOI, resultOI;
    private Object prevHash;
    @Override
    public ObjectInspector initialize(ObjectInspector[] arguments) throws
UDFArgumentException {
        if (arguments.length != 2) {
            throw new UDFArgumentException("Exactly two argument is
expected.");
        }

        for(int i=0;i<arguments.length;i++){
            if (arguments[i].getCategory() !=
ObjectInspector.Category.PRIMITIVE) {
                throw new UDFArgumentTypeException(i,
                        "Only primitive type arguments are accepted but "
                        + arguments[i].getTypeName() + " is passed.");
            }
        }

        String t = arguments[1].getTypeName();
        if (t.equals(Constants.TINYINT_TYPE_NAME)||
                t.equals(Constants.SMALLINT_TYPE_NAME)||
                t.equals(Constants.INT_TYPE_NAME)||
                t.equals(Constants.BIGINT_TYPE_NAME)) {
            resultOI =
PrimitiveObjectInspectorFactory.writableLongObjectInspector;
        } else if (t.equals(Constants.FLOAT_TYPE_NAME)||
                t.equals(Constants.DOUBLE_TYPE_NAME)||
                t.equals(Constants.STRING_TYPE_NAME)) {
            resultOI =
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
        } else{ 
            throw new UDFArgumentTypeException(1,
                    "Only numeric or string type arguments are accepted but
"
                    + arguments[1].getTypeName() + " is passed.");
        }

        longResult.set(0);
        doubleResult.set(0);
        hashOI = arguments[0];
        valueOI = arguments[1];
 
prevHashStandardOI=ObjectInspectorUtils.getStandardObjectInspector(hashOI,Ob
jectInspectorCopyOption.JAVA);
        return resultOI;
    }

    @Override
    public Object evaluate(DeferredObject[] arguments) throws HiveException
{
        Object hash = arguments[0].get();
        Object value = arguments[1].get();
        if
(prevHash==null||ObjectInspectorUtils.compare(prevHash,prevHashStandardOI,ha
sh,hashOI)!=0) {
            longResult.set(0);
            doubleResult.set(0);
        }

        prevHash=ObjectInspectorUtils.copyToStandardObject(hash,hashOI,
ObjectInspectorCopyOption.JAVA);

        Converter converter =
ObjectInspectorConverters.getConverter(valueOI, resultOI);
        if(resultOI.getTypeName()==Constants.DOUBLE_TYPE_NAME){
            DoubleWritable valueW =
(DoubleWritable)converter.convert(value);
            doubleResult.set(doubleResult.get()+valueW.get());
            return doubleResult;
        }
        LongWritable valueW = (LongWritable)converter.convert(value);
        longResult.set(longResult.get()+valueW.get());
        return longResult;
    }

    @Override
    public String getDisplayString(String[] children) {
        return "sum(" + StringUtils.join(children, ',') + ")";
    }
}

 

 

 

  _____  

From: Nitin Pawar < <mailto:nitinpawar...@gmail.com>
nitinpawar...@gmail.com>
To:  <mailto:user@hive.apache.org> user@hive.apache.org; neelesh gadhia <
<mailto:ngad...@yahoo.com> ngad...@yahoo.com> 
Sent: Thursday, February 14, 2013 8:54 AM
Subject: Re: Nullpointer Exception when using UDF

 

Neelesh,

 

Will it be possible for you to share your code? 

 

Looks like in your UDF you are not handling NULL as input values.

 

Thanks,

Nitin 

 

On Thu, Feb 14, 2013 at 10:22 PM, neelesh gadhia <ngad...@yahoo.com> wrote:

Hi Dean,

 

Thanks for your response.  I reviewed the stack trace. As you mentioned the
error shows up at
org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator.initialize(ExprN
odeGenericFuncEvaluator.java:137)

 

But this probably is  a java class that comes with hadoop 1.1.1 and it
untouched.  Do you think there is a bug with this Java Class for hadoop
1.1.1?

 

or is the way the UDF I have created using the file downloaded from -
<https://issues.apache.org/jira/browse/HIVE-2361>
https://issues.apache.org/jira/browse/HIVE-2361, may be causing the issue?

 

 

I read few forums that indicate the class may not be in the classpath for
hadoop. Although I confirmed that is not the case.

 

Any further pointers or advise is appreciated.

 

thanks,

Neelesh

 

  _____  

From: Dean Wampler < <mailto:dean.wamp...@thinkbiganalytics.com>
dean.wamp...@thinkbiganalytics.com>
To:  <mailto:user@hive.apache.org> user@hive.apache.org; neelesh gadhia <
<mailto:ngad...@yahoo.com> ngad...@yahoo.com> 
Sent: Thursday, February 14, 2013 6:41 AM
Subject: Re:

 

According to your stack trace, you have NullPointerException on line 137 of
your UDF.

On Thu, Feb 14, 2013 at 2:28 AM, neelesh gadhia <ngad...@yahoo.com> wrote:

Hello,

I am a Newbie to using UDF's on hive. But implemented these GenericJDF (
<https://issues.apache.org/jira/browse/HIVE-2361>
https://issues.apache.org/jira/browse/HIVE-2361 ) on hive 0.9.0 and hadoop
1.1.1. Was able to add jar to hive

 

hive> select * from emp;
OK
1    10    1000
2    10    1200
3    12    1500
4    12    300
5    12    1800
6    20    5000
7    20    7000
8    20    10000
Time taken: 0.191 seconds

 

hive> add jar /usr/local/Cellar/hive/0.9.0/libexec/lib/GenUDF.jar;

Added /usr/local/Cellar/hive/0.9.0/libexec/lib/GenUDF.jar to class path
Added resource: /usr/local/Cellar/hive/0.9.0/libexec/lib/GenUDF.jar


hive> create temporary function nexr_sum as
'com.nexr.platform.analysis.udf.GenericUDFSum';
OK
Time taken: 0.012 seconds

 

and kicked the sample sql shown below.  

 

SELECT t.empno, t.deptno, t.sal, nexr_sum(hash(t.deptno),t.sal) as sal_sum
FROM (
select a.empno, a.deptno, a.sal from emp a
distribute by hash(a.deptno)
sort BY a.deptno, a.empno
) t;

 

The sql failed with errors. Any pointers or advise towards resolving this is
much appreciated. 

 

2013-02-13 23:30:18,925 INFO org.apache.hadoop.mapred.JobTracker: Adding
task (REDUCE) 'attempt_201302132324_0002_r_000000_3' to tip
task_201302132324_0002_r_000000, for tracker
'tracker_192.168.0.151:localhost/ <http://127.0.0.1:50099/> 127.0.0.1:50099'
2013-02-13 23:30:18,925 INFO org.apache.hadoop.mapred.JobTracker: Removing
task 'attempt_201302132324_0002_r_000000_2'
2013-02-13 23:30:26,484 INFO org.apache.hadoop.mapred.TaskInProgress: Error
from attempt_201302132324_0002_r_000000_3: java.lang.RuntimeException: Error
in configuring object
at
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
at
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
at org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:486)
at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:421)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.ja
va:1136)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39
)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl
.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
... 9 more
Caused by: java.lang.RuntimeException: Reduce operator initialization failed
at
org.apache.hadoop.hive.ql.exec.ExecReducer.configure(ExecReducer.java:157)
... 14 more
Caused by: java.lang.NullPointerException
at
org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator.initialize(ExprN
odeGenericFuncEvaluator.java:137)
at org.apache.hadoop.hive.ql.exec.Operator.initEvaluators(Operator.java:896)
at
org.apache.hadoop.hive.ql.exec.Operator.initEvaluatorsAndReturnStruct(Operat
or.java:922)
at
org.apache.hadoop.hive.ql.exec.SelectOperator.initializeOp(SelectOperator.ja
va:60)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:357)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:433)
at
org.apache.hadoop.hive.ql.exec.Operator.initializeChildren(Operator.java:389
)
at
org.apache.hadoop.hive.ql.exec.ExtractOperator.initializeOp(ExtractOperator.
java:40)
at org.apache.hadoop.hive.ql.exec.Operator.initialize(Operator.java:357)
at
org.apache.hadoop.hive.ql.exec.ExecReducer.configure(ExecReducer.java:150)
... 14 more

2013-02-13 23:30:29,819 INFO org.apache.hadoop.mapred.TaskInProgress:
TaskInProgress task_201302132324_0002_r_000000 has failed 4 times.
2013-02-13 23:30:29,820 INFO org.apache.hadoop.mapred.JobInProgress:
TaskTracker at '192.168.0.151' turned 'flaky'
.... 12 more lines..

 

Tried different function "GenericUDFMax".. same error.

 

Any pointers/advise, what could be wrong?





 

-- 
Dean Wampler, Ph.D.

thinkbiganalytics.com <http://thinkbiganalytics.com/> 

+1-312-339-1330

 

 





 

-- 
Nitin Pawar

 

Reply via email to