Error while executing program with Hive JDBC

2012-04-26 Thread Bhavesh Shah
Hello all,
I have written this small program But I am getting error.

Program:
-
import java.io.FileWriter;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;


public class SampleHiveProgram
{
String lyear=;
String lquarter=;
String driverName = org.apache.hadoop.hive.jdbc.HiveDriver;
public static void main(String[] args)
{
SampleHiveProgram s=new SampleHiveProgram();
s.startHiveThriftServer();
s.quarterTable();
}

public void startHiveThriftServer()
{
try
{
String cmd =
/home/hadoop/sqoop-1.3.0-cdh3u1/bin/StartHiveThriftServer.sh; // this is
the command to execute in the Unix shell
// create a process for the shell
ProcessBuilder pb = new ProcessBuilder(bash, -c, cmd);
pb.redirectErrorStream(true); // use this to capture messages
sent to stderr
Process shell = pb.start();
InputStream shellIn = shell.getInputStream(); // this captures
the output from the command
// wait for the shell to finish and get the return code
// at this point you can process the output issued by the
command
// for instance, this reads the output and writes it to
System.out:
int c;
while ((c = shellIn.read()) != -1)
{
System.out.write(c);
}
int shellExitStatus = shell.waitFor();
// close the stream
shellIn.close();
}
catch(Exception e)
{
e.printStackTrace();
System.exit(1);
}
}

public void quarterTable()
{
try
{
String start=2010-01-01;
String end=2011-01-01;
System.out.println(in quarter table...);
//create connection with database
Class.forName(driverName);
Connection con =
DriverManager.getConnection(jdbc:hive://localhost:1/default, , );
String sql=null;
Statement stmt = con.createStatement();
ResultSet res=null;

sql=drop table TmpQuarterTable;
System.out.println(Dropping the Quarter Table...);
res = stmt.executeQuery(sql);

//Creating Quarter Table
sql=create table TmpQuarterTable (year string, quarter string,
quarterstart string, quarterend string, quartername string) +
 ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES
TERMINATED BY '\012' STORED AS TEXTFILE ;
System.out.println(Creating the Quarter Table...);
res = stmt.executeQuery(sql);

//create the file
FileWriter fw=new FileWriter(/home/hadoop/Quarter.txt);

//convert string date to calendar date
DateFormat formatter =new SimpleDateFormat(-MM-dd);
Date sdate=(Date)formatter.parse(start);
Date edate=(Date)formatter.parse(end);

Calendar c1=Calendar.getInstance();
Calendar c2=Calendar.getInstance();

c1.setTime(sdate);
c2.setTime(edate);

int q=0;
String QuarterEndDate=null;
int resultMonthCount=0;
int resultYear =0;
int resultMonth =0;

Calendar c3=Calendar.getInstance();
c3.setTime(c1.getTime());
while(c3.compareTo(c2)=0)
{
  if(c3.get(Calendar.MONTH)=0 
c3.get(Calendar.MONTH)=2)
  {
  q=1;

QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-04-01;
  }
  else if(c3.get(Calendar.MONTH)=3 
c3.get(Calendar.MONTH)=5)
  {
  q=2;

QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-07-01;
  }
  else if(c3.get(Calendar.MONTH)=6 
c3.get(Calendar.MONTH)=8)
  {
  q=3;

QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-10-01;
  }
  else if(c3.get(Calendar.MONTH)=9 
c3.get(Calendar.MONTH)=11)
  {
  q=4;

QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR)+1)+-01-01;
  }
  //Got the QuarterEndDate (-MM-DD)
  //split the QuarterEndDate into qdate and create
quarter_end_date
  String[] qdate=QuarterEndDate.split(-);
  Calendar quarter_end_date=Calendar.getInstance();

quarter_end_date.set(Integer.parseInt(qdate[0]),Integer.parseInt(qdate[1]),Integer.parseInt(qdate[2]));
  System.out.println(quarter_end_date :

Re: Error while executing program with Hive JDBC

2012-04-26 Thread Aniket Mokashi
put libthrift and libfb303 jars on classpath.

Thanks,
Aniket

On Wed, Apr 25, 2012 at 11:14 PM, Bhavesh Shah bhavesh25s...@gmail.comwrote:

 Hello all,
 I have written this small program But I am getting error.

 Program:
 -
 import java.io.FileWriter;
 import java.io.InputStream;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.Statement;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;


 public class SampleHiveProgram
 {
String lyear=;
String lquarter=;
String driverName = org.apache.hadoop.hive.jdbc.HiveDriver;
public static void main(String[] args)
{
SampleHiveProgram s=new SampleHiveProgram();
s.startHiveThriftServer();
s.quarterTable();
}

public void startHiveThriftServer()
{
try
{
String cmd =
 /home/hadoop/sqoop-1.3.0-cdh3u1/bin/StartHiveThriftServer.sh; // this is
 the command to execute in the Unix shell
// create a process for the shell
ProcessBuilder pb = new ProcessBuilder(bash, -c, cmd);
pb.redirectErrorStream(true); // use this to capture messages
 sent to stderr
Process shell = pb.start();
InputStream shellIn = shell.getInputStream(); // this captures
 the output from the command
// wait for the shell to finish and get the return code
// at this point you can process the output issued by the
 command
// for instance, this reads the output and writes it to
 System.out:
int c;
while ((c = shellIn.read()) != -1)
{
System.out.write(c);
}
int shellExitStatus = shell.waitFor();
// close the stream
shellIn.close();
}
catch(Exception e)
{
e.printStackTrace();
System.exit(1);
}
}

public void quarterTable()
{
try
{
String start=2010-01-01;
String end=2011-01-01;
System.out.println(in quarter table...);
//create connection with database
Class.forName(driverName);
Connection con =
 DriverManager.getConnection(jdbc:hive://localhost:1/default, , );
String sql=null;
Statement stmt = con.createStatement();
ResultSet res=null;

sql=drop table TmpQuarterTable;
System.out.println(Dropping the Quarter Table...);
res = stmt.executeQuery(sql);

//Creating Quarter Table
sql=create table TmpQuarterTable (year string, quarter string,
 quarterstart string, quarterend string, quartername string) +
 ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES
 TERMINATED BY '\012' STORED AS TEXTFILE ;
System.out.println(Creating the Quarter Table...);
res = stmt.executeQuery(sql);

//create the file
FileWriter fw=new FileWriter(/home/hadoop/Quarter.txt);

//convert string date to calendar date
DateFormat formatter =new SimpleDateFormat(-MM-dd);
Date sdate=(Date)formatter.parse(start);
Date edate=(Date)formatter.parse(end);

Calendar c1=Calendar.getInstance();
Calendar c2=Calendar.getInstance();

c1.setTime(sdate);
c2.setTime(edate);

int q=0;
String QuarterEndDate=null;
int resultMonthCount=0;
int resultYear =0;
int resultMonth =0;

Calendar c3=Calendar.getInstance();
c3.setTime(c1.getTime());
while(c3.compareTo(c2)=0)
{
  if(c3.get(Calendar.MONTH)=0 
 c3.get(Calendar.MONTH)=2)
  {
  q=1;

 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-04-01;
  }
  else if(c3.get(Calendar.MONTH)=3 
 c3.get(Calendar.MONTH)=5)
  {
  q=2;

 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-07-01;
  }
  else if(c3.get(Calendar.MONTH)=6 
 c3.get(Calendar.MONTH)=8)
  {
  q=3;

 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-10-01;
  }
  else if(c3.get(Calendar.MONTH)=9 
 c3.get(Calendar.MONTH)=11)
  {
  q=4;

 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR)+1)+-01-01;
  }
  //Got the QuarterEndDate (-MM-DD)
  //split the QuarterEndDate into qdate and create
 quarter_end_date
  String[] qdate=QuarterEndDate.split(-);
  Calendar quarter_end_date=Calendar.getInstance();


 

Re: Error while executing program with Hive JDBC

2012-04-26 Thread alo alt
You have to use fb303 and libthrift jars, and putting them into the classpath 
or export them.

- Alex 

--
Alexander Lorenz
http://mapredit.blogspot.com

On Apr 26, 2012, at 8:14 AM, Bhavesh Shah wrote:

 Hello all,
 I have written this small program But I am getting error.
 
 Program:
 -
 import java.io.FileWriter;
 import java.io.InputStream;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.ResultSet;
 import java.sql.Statement;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Calendar;
 import java.util.Date;
 
 
 public class SampleHiveProgram 
 {
 String lyear=;
 String lquarter=;
 String driverName = org.apache.hadoop.hive.jdbc.HiveDriver;
 public static void main(String[] args) 
 {
 SampleHiveProgram s=new SampleHiveProgram();
 s.startHiveThriftServer();
 s.quarterTable();
 }
 
 public void startHiveThriftServer()
 {
 try
 {
 String cmd = 
 /home/hadoop/sqoop-1.3.0-cdh3u1/bin/StartHiveThriftServer.sh; // this is 
 the command to execute in the Unix shell
 // create a process for the shell
 ProcessBuilder pb = new ProcessBuilder(bash, -c, cmd);
 pb.redirectErrorStream(true); // use this to capture messages 
 sent to stderr
 Process shell = pb.start();
 InputStream shellIn = shell.getInputStream(); // this captures 
 the output from the command
 // wait for the shell to finish and get the return code
 // at this point you can process the output issued by the command
 // for instance, this reads the output and writes it to 
 System.out:
 int c;
 while ((c = shellIn.read()) != -1) 
 {
 System.out.write(c);
 }
 int shellExitStatus = shell.waitFor(); 
 // close the stream
 shellIn.close();
 }
 catch(Exception e)
 {
 e.printStackTrace();
 System.exit(1);
 }
 }
 
 public void quarterTable()
 {
 try
 {
 String start=2010-01-01;
 String end=2011-01-01;
 System.out.println(in quarter table...);
 //create connection with database
 Class.forName(driverName);
 Connection con = 
 DriverManager.getConnection(jdbc:hive://localhost:1/default, , );
 String sql=null;
 Statement stmt = con.createStatement();
 ResultSet res=null;
 
 sql=drop table TmpQuarterTable;
 System.out.println(Dropping the Quarter Table...);
 res = stmt.executeQuery(sql);
 
 //Creating Quarter Table
 sql=create table TmpQuarterTable (year string, quarter string, 
 quarterstart string, quarterend string, quartername string) +
  ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES 
 TERMINATED BY '\012' STORED AS TEXTFILE ;
 System.out.println(Creating the Quarter Table...);
 res = stmt.executeQuery(sql);
 
 //create the file
 FileWriter fw=new FileWriter(/home/hadoop/Quarter.txt);
 
 //convert string date to calendar date
 DateFormat formatter =new SimpleDateFormat(-MM-dd);
 Date sdate=(Date)formatter.parse(start);
 Date edate=(Date)formatter.parse(end);
 
 Calendar c1=Calendar.getInstance();
 Calendar c2=Calendar.getInstance();
 
 c1.setTime(sdate);
 c2.setTime(edate);
 
 int q=0;
 String QuarterEndDate=null;
 int resultMonthCount=0; 
 int resultYear =0;
 int resultMonth =0;
 
 Calendar c3=Calendar.getInstance();
 c3.setTime(c1.getTime());  
 while(c3.compareTo(c2)=0)
 {
   if(c3.get(Calendar.MONTH)=0  
 c3.get(Calendar.MONTH)=2)
   {
   q=1;
   
 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-04-01;
   }
   else if(c3.get(Calendar.MONTH)=3  
 c3.get(Calendar.MONTH)=5)
   {
   q=2;
   
 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-07-01;
   }
   else if(c3.get(Calendar.MONTH)=6  
 c3.get(Calendar.MONTH)=8)
   {
   q=3;
   
 QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR))+-10-01;
   }
   else if(c3.get(Calendar.MONTH)=9  
 c3.get(Calendar.MONTH)=11)
   {
  

[jira] [Commented] (HIVE-494) Select columns by index instead of name

2012-04-26 Thread Carl Steinbach (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-494?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13262449#comment-13262449
 ] 

Carl Steinbach commented on HIVE-494:
-

bq. It seemed not a standard SQL and also could be confusional especially with 
join/unions.

I'm a little worried about this too, mainly because it appears to extend SQL 
syntax and (speaking personally) I don't think I fully understand the long term 
impact of a change like this. If it turns out that this syntax is broken, 
deprecating it is going to be painful for all of the people who start using it.

bq. Could it be worth to be implemented?

I think we should pass on this unless it turns out to be part of standard SQL, 
or we can point to some other DB like MySQL that already implements it. On a 
related note, HIVE-1947 covers implementing similar syntax for the ORDER BY 
clause, and it turns out that this *is* part of standard SQL. It's also 
interesting to note that ordinal column references in the WHERE clause aren't 
supported since it would result in ambiguous statements like this:

{noformat}
SELECT a, b from src
WHERE 1=1;
{noformat}


 Select columns by index instead of name
 ---

 Key: HIVE-494
 URL: https://issues.apache.org/jira/browse/HIVE-494
 Project: Hive
  Issue Type: Wish
  Components: Clients, Query Processor
Reporter: Adam Kramer
Assignee: Navis
Priority: Minor
  Labels: SQL
 Attachments: HIVE-494.D1641.1.patch


 SELECT mytable[0], mytable[2] FROM some_table_name mytable;
 ...should return the first and third columns, respectively, from mytable 
 regardless of their column names.
 The need for names specifically is kind of silly when they just get 
 translated into numbers anyway.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




Error related Hadoop while running Amazon EMR Job

2012-04-26 Thread Bhavesh Shah
Hello all,

I launched my instance on Amazon EMR. And I run the job yesterday on it and
I got the following error in log fies (Log Files of JobTracker)

DataStreamer Exception: org.apache.hadoop.ipc.RemoteException:
java.io.IOException: File /mnt/var/lib/hadoop/tmp/mapred/system/
jobtracker.info could only be replicated to 0 nodes, instead of 1

And due to this my job get terminated. Is anything wrong while
configuration/running jobs?

Can someone suggest me what could be the reason while SHUTTING DOWN the Job
Flow in AMazon EMR?

Many Thanks.


-- 
Regards,
Bhavesh Shah


[jira] [Commented] (HIVE-2424) Don't expose thrift, commons and json classes in the hive exec jar

2012-04-26 Thread Travis Crawford (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13262743#comment-13262743
 ] 

Travis Crawford commented on HIVE-2424:
---

I think the root of the issue is hive-exec contains Hive classes that are 
available in no other jar, plus the repackaged 3rd-party jars. This works great 
for Hive itself, but presents integration issues if users need a different 
3rd-party dependency for some reason. In an ideal world everyone would use the 
same library versions, but since that's not always possible users may find 
themselves in a situation where they need to change a library version.

hive-exec.jar contains all of the following Hive sub projects:

* hive-common
* hive-ql
* hive-serde
* hive-shims

What are your thoughts on publishing these four jars as stand-alone artifacts, 
in addition to hive-exec. Since its strictly additive no existing behavior 
would be affected. It would allow users writing tools on top of Hive to depend 
on just the Hive classes, freeing them to construct their classpath as 
appropriate for their site.

 Don't expose thrift, commons and json classes in the hive exec jar 
 ---

 Key: HIVE-2424
 URL: https://issues.apache.org/jira/browse/HIVE-2424
 Project: Hive
  Issue Type: Improvement
  Components: Build Infrastructure
Reporter: Eli Collins

 The hive exec jar includes exploded thrift, json, and commons lang classes. 
 These may conflict with the user's classpath. This could be fixed by jar 
 jaring or using shade. A mechanism that allowed a user to substitute 
 alternative versions w/o recompiling might be a useful intermediate step 
 (though will require the user substitute alternative versions that work w/ 
 Hive).

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Work started] (HIVE-581) improve group by syntax

2012-04-26 Thread Weizhe Shi (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-581?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Work on HIVE-581 started by Weizhe Shi.

 improve group by syntax
 ---

 Key: HIVE-581
 URL: https://issues.apache.org/jira/browse/HIVE-581
 Project: Hive
  Issue Type: Improvement
  Components: Clients, Query Processor
Reporter: Larry Ogrodnek
Assignee: Weizhe Shi

 It would be nice if group by allowed either column aliases or column position 
 (like mysql).
 It can be burdensome to have to repeat UDFs both in the select and in the 
 group by.
 e.g. instead of:
 select f1(col1), f2(col2), f3(col3), count(1) group by f1(col1), f2(col2), 
 f3(col3);
 it would allow:
 select f1(col1), f2(col2), f3(col3), count(1) group by 1, 2, 3;

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-581) improve group by syntax

2012-04-26 Thread Weizhe Shi (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-581?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Weizhe Shi updated HIVE-581:


  Labels: features  (was: )
Release Note: Allow the syntax of position alias in groupby expression.
  Status: Patch Available  (was: In Progress)

 improve group by syntax
 ---

 Key: HIVE-581
 URL: https://issues.apache.org/jira/browse/HIVE-581
 Project: Hive
  Issue Type: Improvement
  Components: Clients, Query Processor
Reporter: Larry Ogrodnek
Assignee: Weizhe Shi
  Labels: features

 It would be nice if group by allowed either column aliases or column position 
 (like mysql).
 It can be burdensome to have to repeat UDFs both in the select and in the 
 group by.
 e.g. instead of:
 select f1(col1), f2(col2), f3(col3), count(1) group by f1(col1), f2(col2), 
 f3(col3);
 it would allow:
 select f1(col1), f2(col2), f3(col3), count(1) group by 1, 2, 3;

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-581) improve group by syntax

2012-04-26 Thread Carl Steinbach (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-581?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13262782#comment-13262782
 ] 

Carl Steinbach commented on HIVE-581:
-

@Weizhe: Glad to seem someone wants to work on this. Also wanted to suggest 
that it may be convenient to address HIVE-1947 in the same patch.

 improve group by syntax
 ---

 Key: HIVE-581
 URL: https://issues.apache.org/jira/browse/HIVE-581
 Project: Hive
  Issue Type: Improvement
  Components: Clients, Query Processor
Reporter: Larry Ogrodnek
Assignee: Weizhe Shi
  Labels: features

 It would be nice if group by allowed either column aliases or column position 
 (like mysql).
 It can be burdensome to have to repeat UDFs both in the select and in the 
 group by.
 e.g. instead of:
 select f1(col1), f2(col2), f3(col3), count(1) group by f1(col1), f2(col2), 
 f3(col3);
 it would allow:
 select f1(col1), f2(col2), f3(col3), count(1) group by 1, 2, 3;

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




Hive server concurrent connection

2012-04-26 Thread Benyi Wang
I'm a little confused with In fact, it's impossible for HiveServer to
support concurrent connections using the current Thrift API in hive wiki
page
https://cwiki.apache.org/confluence/display/Hive/HiveServer2+Thrift+API.

I started a hive server on hostA using cdh3u3

hadoop-hive.noarch  0.7.1+42.36-2
 installed

Then I logged on two nodes: hostB, and hostC, then start hive client

$ hive -h hostA -p 1

It seems that both of two hive clients work normally.

Am I wrong? or the issue in the wiki page has been resolved?


[jira] [Created] (HIVE-2981) store the query that populated the partition in the hive metastore

2012-04-26 Thread Namit Jain (JIRA)
Namit Jain created HIVE-2981:


 Summary: store the query that populated the partition in the hive 
metastore
 Key: HIVE-2981
 URL: https://issues.apache.org/jira/browse/HIVE-2981
 Project: Hive
  Issue Type: Improvement
Reporter: Namit Jain


This would be very useful for debugging in general.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2424) Don't expose thrift, commons and json classes in the hive exec jar

2012-04-26 Thread Travis Crawford (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13262898#comment-13262898
 ] 

Travis Crawford commented on HIVE-2424:
---

To make the discussion a bit more concrete, I went ahead and experimented with 
publishing subproject jars in addition to the existing fat jar.

Check out the following branch where I created a new exec subproject that 
produces hive-exec.jar, and where ql produces a hive-ql.jar like other sub 
projects.

https://github.com/traviscrawford/hive/compare/HIVE-2424_thin_jars

What do y'all think about this approach? It would let existing stuff continue 
working as-is, while making all Hive classes available for those that for 
whatever reason need to do something fancy.

Thoughts?

 Don't expose thrift, commons and json classes in the hive exec jar 
 ---

 Key: HIVE-2424
 URL: https://issues.apache.org/jira/browse/HIVE-2424
 Project: Hive
  Issue Type: Improvement
  Components: Build Infrastructure
Reporter: Eli Collins

 The hive exec jar includes exploded thrift, json, and commons lang classes. 
 These may conflict with the user's classpath. This could be fixed by jar 
 jaring or using shade. A mechanism that allowed a user to substitute 
 alternative versions w/o recompiling might be a useful intermediate step 
 (though will require the user substitute alternative versions that work w/ 
 Hive).

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




Hive-trunk-h0.21 - Build # 1398 - Still Failing

2012-04-26 Thread Apache Jenkins Server
Changes for Build #1397
[kevinwilfong] HIVE-2918. Hive Dynamic Partition Insert - move task not 
considering 'hive.exec.max.dynamic.partitions' from CLI. (cwsteinbach via 
kevinwilfong)


Changes for Build #1398



1 tests failed.
FAILED:  
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_dyn_part_max

Error Message:
Unexpected exception See build/ql/tmp/hive.log, or try ant test ... 
-Dtest.silent=false to get more logs.

Stack Trace:
junit.framework.AssertionFailedError: Unexpected exception
See build/ql/tmp/hive.log, or try ant test ... -Dtest.silent=false to get 
more logs.
at junit.framework.Assert.fail(Assert.java:50)
at 
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testNegativeCliDriver_dyn_part_max(TestNegativeCliDriver.java:5308)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:168)
at junit.framework.TestCase.runBare(TestCase.java:134)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:124)
at junit.framework.TestSuite.runTest(TestSuite.java:243)
at junit.framework.TestSuite.run(TestSuite.java:238)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:422)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:931)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:785)




The Apache Jenkins build system has built Hive-trunk-h0.21 (build #1398)

Status: Still Failing

Check console output at https://builds.apache.org/job/Hive-trunk-h0.21/1398/ to 
view the results.

[jira] [Created] (HIVE-2982) When using Inputsplits that don't extend FileInputSplits, hive attempts to create a Path with an empty string which will always crash.

2012-04-26 Thread Tristan Buckner (JIRA)
Tristan Buckner created HIVE-2982:
-

 Summary: When using Inputsplits that don't extend FileInputSplits, 
hive attempts to create a Path with an empty string which will always crash.
 Key: HIVE-2982
 URL: https://issues.apache.org/jira/browse/HIVE-2982
 Project: Hive
  Issue Type: Bug
  Components: StorageHandler
Affects Versions: 0.7.1
Reporter: Tristan Buckner
Priority: Minor


The method 
org.apache.hadoop.hive.ql.io.HiveInputFormat$HiveInputSplit.getPath() attempts 
to create a Path with an empty string:

public Path getPath() {
  if (inputSplit instanceof FileSplit) {
return ((FileSplit) inputSplit).getPath();
  }
return new Path();
  }

Path doesn't take null or an empty string so this means crash if not FileSplit.

I'm not sure if some other random string would be appropriate or fix this. 

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2424) Don't expose thrift, commons and json classes in the hive exec jar

2012-04-26 Thread Carl Steinbach (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263210#comment-13263210
 ] 

Carl Steinbach commented on HIVE-2424:
--

The downside of the thin jar approach is that the distribution now contains one 
more (redundant) JAR file, and developers have to maintain two new build files. 
There will also inevitably be questions from users about the differences 
between the hive-exec and hive-ql JARs. Using JarJar or Shade avoids these 
problems. Is there some reason why it doesn't satisfy your use case?

 Don't expose thrift, commons and json classes in the hive exec jar 
 ---

 Key: HIVE-2424
 URL: https://issues.apache.org/jira/browse/HIVE-2424
 Project: Hive
  Issue Type: Improvement
  Components: Build Infrastructure
Reporter: Eli Collins

 The hive exec jar includes exploded thrift, json, and commons lang classes. 
 These may conflict with the user's classpath. This could be fixed by jar 
 jaring or using shade. A mechanism that allowed a user to substitute 
 alternative versions w/o recompiling might be a useful intermediate step 
 (though will require the user substitute alternative versions that work w/ 
 Hive).

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2424) Don't expose thrift, commons and json classes in the hive exec jar

2012-04-26 Thread Travis Crawford (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263224#comment-13263224
 ] 

Travis Crawford commented on HIVE-2424:
---

Possibly jarjar/shade will solve the problem - I'm not familiar with how they 
work and will take a look.

Your point to user confusion about the new jars is definitely valid. Its a 
trade-off though, since there's existing confusion about how to work around 
hive-exec. Likely most users will just install a release and not be bothered by 
these additional jars. Developers integrating with Hive would likely benefit 
from being able to construct a custom classpath.

The posted approach does add an extra sub project  build files, but 
maintenance overhead is pretty small since mostly they inherit 
{{build-common.xml}} functionality. Perhaps I could offset the additional 
overhead by simplifying the maven-related tasks in {{build.xml}}? There's some 
repetitiveness that could be simplified with macros  other minor restructuring.

I'll take a look at jarjar and see if that easily solves this issue.

 Don't expose thrift, commons and json classes in the hive exec jar 
 ---

 Key: HIVE-2424
 URL: https://issues.apache.org/jira/browse/HIVE-2424
 Project: Hive
  Issue Type: Improvement
  Components: Build Infrastructure
Reporter: Eli Collins

 The hive exec jar includes exploded thrift, json, and commons lang classes. 
 These may conflict with the user's classpath. This could be fixed by jar 
 jaring or using shade. A mechanism that allowed a user to substitute 
 alternative versions w/o recompiling might be a useful intermediate step 
 (though will require the user substitute alternative versions that work w/ 
 Hive).

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2424) Don't expose thrift, commons and json classes in the hive exec jar

2012-04-26 Thread Travis Crawford (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2424?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263238#comment-13263238
 ] 

Travis Crawford commented on HIVE-2424:
---

Looking at JarJar it aims to solve exactly the issue being discussed here:

You can avoid problems where your library depends on a specific version of a 
library, which may conflict with the dependencies of another library.

However, it does have some consequences:

Jar Jar Links includes an Ant task that extends the built-in jar task. The 
normal zipfileset element is used to embed jar files. A new rule element is 
added which uses wildcards patterns to rename the embedded class files. 
Bytecode transformation (via ASM) is used to change references to the renamed 
classes, and special handling is provided for moving resource files and 
transforming string literals.

So the basic idea is avoiding class version conflicts through renaming. This is 
potentially problematic for classes outside the jarjar that are not aware of 
the class renames.

I can investigate this further but my initial reaction to the jarjar approach 
is that it sounds good if you're creating an entirely self-contained jar, but 
not as an element of a multi-jar classpath. For example, this approach seems 
totally appropriate for a fully self-contained JDBC jar that users can give to 
their favorite GUI app, where UDFs are installed server-side. For a Hadoop job 
that may depend on the bundled contents (UDF that makes a thrift call) this 
approach seems problematic because thrift would be renamed and therefore 
unavailable outside the jarjar, forcing users to resubmit the bundled jars 
under their original name.

Thoughts?

 Don't expose thrift, commons and json classes in the hive exec jar 
 ---

 Key: HIVE-2424
 URL: https://issues.apache.org/jira/browse/HIVE-2424
 Project: Hive
  Issue Type: Improvement
  Components: Build Infrastructure
Reporter: Eli Collins

 The hive exec jar includes exploded thrift, json, and commons lang classes. 
 These may conflict with the user's classpath. This could be fixed by jar 
 jaring or using shade. A mechanism that allowed a user to substitute 
 alternative versions w/o recompiling might be a useful intermediate step 
 (though will require the user substitute alternative versions that work w/ 
 Hive).

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Created] (HIVE-2983) Hive ant targets for publishing maven artifacts can be simplified

2012-04-26 Thread Travis Crawford (JIRA)
Travis Crawford created HIVE-2983:
-

 Summary: Hive ant targets for publishing maven artifacts can be 
simplified
 Key: HIVE-2983
 URL: https://issues.apache.org/jira/browse/HIVE-2983
 Project: Hive
  Issue Type: Improvement
Reporter: Travis Crawford
Assignee: Travis Crawford
Priority: Minor


Hive has a few ant tasks related to publishing maven artifacts. As not all sub 
projects publish artifacts the {{iterate}} macro that simplifies other tasks 
cannot be used in this context.

Hive already uses the {{for}} task from ant-contrib, which works great here. 
{{build.xml}} can be simplified by using the for task when preparing maven 
artifacts.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2983) Hive ant targets for publishing maven artifacts can be simplified

2012-04-26 Thread Phabricator (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2983?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Phabricator updated HIVE-2983:
--

Attachment: HIVE-2983.D2961.1.patch

travis requested code review of HIVE-2983 [jira] Hive ant targets for 
publishing maven artifacts can be simplified.
Reviewers: JIRA

  Simplify some maven-related ant tasks by using for loops.

  Hive has a few ant tasks related to publishing maven artifacts. As not all 
sub projects publish artifacts the iterate macro that simplifies other tasks 
cannot be used in this context.

  Hive already uses the for task from ant-contrib, which works great here. 
build.xml can be simplified by using the for task when preparing maven 
artifacts.

TEST PLAN
  EMPTY

REVISION DETAIL
  https://reviews.facebook.net/D2961

AFFECTED FILES
  build.xml

MANAGE HERALD DIFFERENTIAL RULES
  https://reviews.facebook.net/herald/view/differential/

WHY DID I GET THIS EMAIL?
  https://reviews.facebook.net/herald/transcript/6741/

Tip: use the X-Herald-Rules header to filter Herald messages in your client.


 Hive ant targets for publishing maven artifacts can be simplified
 -

 Key: HIVE-2983
 URL: https://issues.apache.org/jira/browse/HIVE-2983
 Project: Hive
  Issue Type: Improvement
Reporter: Travis Crawford
Assignee: Travis Crawford
Priority: Minor
 Attachments: HIVE-2983.D2961.1.patch


 Hive has a few ant tasks related to publishing maven artifacts. As not all 
 sub projects publish artifacts the {{iterate}} macro that simplifies other 
 tasks cannot be used in this context.
 Hive already uses the {{for}} task from ant-contrib, which works great here. 
 {{build.xml}} can be simplified by using the for task when preparing maven 
 artifacts.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2983) Hive ant targets for publishing maven artifacts can be simplified

2012-04-26 Thread Travis Crawford (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2983?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Travis Crawford updated HIVE-2983:
--

Status: Patch Available  (was: Open)

 Hive ant targets for publishing maven artifacts can be simplified
 -

 Key: HIVE-2983
 URL: https://issues.apache.org/jira/browse/HIVE-2983
 Project: Hive
  Issue Type: Improvement
Reporter: Travis Crawford
Assignee: Travis Crawford
Priority: Minor
 Attachments: HIVE-2983.D2961.1.patch


 Hive has a few ant tasks related to publishing maven artifacts. As not all 
 sub projects publish artifacts the {{iterate}} macro that simplifies other 
 tasks cannot be used in this context.
 Hive already uses the {{for}} task from ant-contrib, which works great here. 
 {{build.xml}} can be simplified by using the for task when preparing maven 
 artifacts.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2757) hive can't find hadoop executor scripts without HADOOP_HOME set

2012-04-26 Thread Ashutosh Chauhan (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2757?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263267#comment-13263267
 ] 

Ashutosh Chauhan commented on HIVE-2757:


@Roman,
Your patch is definitely an improvement over status quo. HADOOP_HOME is 
deprecated and we have to accomodate for HADOOP_PREFIX for Yarn. Instead of 
doing those changes in bin/hive, it definitely makes sense to do it here. Also, 
patch doesn't change the existing behavior. So, I am +1 on the approach. 
Can you post the full patch so that I can see if there are other changes you 
have in mind.

 hive can't find hadoop executor scripts without HADOOP_HOME set
 ---

 Key: HIVE-2757
 URL: https://issues.apache.org/jira/browse/HIVE-2757
 Project: Hive
  Issue Type: Bug
  Components: CLI
Affects Versions: 0.8.0
Reporter: Roman Shaposhnik
 Attachments: HIVE-2757.patch.txt


 The trouble is that in Hadoop 0.23 HADOOP_HOME has been deprecated. I think 
 it would be really nice if bin/hive can be modified to capture the which 
 hadoop
 and pass that as a property into the JVM.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2918) Hive Dynamic Partition Insert - move task not considering 'hive.exec.max.dynamic.partitions' from CLI

2012-04-26 Thread Ashutosh Chauhan (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2918?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263271#comment-13263271
 ] 

Ashutosh Chauhan commented on HIVE-2918:


Looks like this has broken the trunk. See, 
https://builds.apache.org/job/Hive-trunk-h0.21/1397/

 Hive Dynamic Partition Insert - move task not considering 
 'hive.exec.max.dynamic.partitions' from CLI
 -

 Key: HIVE-2918
 URL: https://issues.apache.org/jira/browse/HIVE-2918
 Project: Hive
  Issue Type: Bug
Affects Versions: 0.7.1, 0.8.0, 0.8.1
 Environment: Cent OS 64 bit
Reporter: Bejoy KS
Assignee: Carl Steinbach
 Attachments: HIVE-2918.D2703.1.patch


 Dynamic Partition insert showing an error with the number of partitions 
 created even after the default value of 'hive.exec.max.dynamic.partitions' is 
 bumped high to 2000.
 Error Message:
 Failed with exception Number of dynamic partitions created is 1413, which is 
 more than 1000. To solve this try to set hive.exec.max.dynamic.partitions to 
 at least 1413.
 These are the following properties set on hive CLI
 hive set hive.exec.dynamic.partition=true;
 hive set hive.exec.dynamic.partition.mode=nonstrict;
 hive set hive.exec.max.dynamic.partitions=2000;
 hive set hive.exec.max.dynamic.partitions.pernode=2000;
 This is the query with console error log
 hive 
  INSERT OVERWRITE TABLE partn_dyn Partition (pobox)
  SELECT country,state,pobox FROM non_partn_dyn;
 Total MapReduce jobs = 2
 Launching Job 1 out of 2
 Number of reduce tasks is set to 0 since there's no reduce operator
 Starting Job = job_201204021529_0002, Tracking URL = 
 http://0.0.0.0:50030/jobdetails.jsp?jobid=job_201204021529_0002
 Kill Command = /usr/lib/hadoop/bin/hadoop job  
 -Dmapred.job.tracker=0.0.0.0:8021 -kill job_201204021529_0002
 2012-04-02 16:05:28,619 Stage-1 map = 0%,  reduce = 0%
 2012-04-02 16:05:39,701 Stage-1 map = 100%,  reduce = 0%
 2012-04-02 16:05:50,800 Stage-1 map = 100%,  reduce = 100%
 Ended Job = job_201204021529_0002
 Ended Job = 248865587, job is filtered out (removed at runtime).
 Moving data to: 
 hdfs://0.0.0.0/tmp/hive-cloudera/hive_2012-04-02_16-05-24_919_5976014408587784412/-ext-1
 Loading data to table default.partn_dyn partition (pobox=null)
 Failed with exception Number of dynamic partitions created is 1413, which is 
 more than 1000. To solve this try to set hive.exec.max.dynamic.partitions to 
 at least 1413.
 FAILED: Execution Error, return code 1 from 
 org.apache.hadoop.hive.ql.exec.MoveTask
 I checked the job.xml of the first map only job, there the value 
 hive.exec.max.dynamic.partitions=2000 is reflected but the move task is 
 taking the default value from hive-site.xml . If I change the value in 
 hive-site.xml then the job completes successfully. Bottom line,the property 
 'hive.exec.max.dynamic.partitions'set on CLI is not being considered by move 
 task

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2804) Task log retrieval fails on secure cluster

2012-04-26 Thread Zhenxiao Luo (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2804?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263272#comment-13263272
 ] 

Zhenxiao Luo commented on HIVE-2804:


By creating an ThrowNullPointer UDF, and running it in MiniMRCluster, could 
reproduce a MapReduce job failing.

When running on apache trunk(which is using MR1), get the following error 
message:

[junit] Begin query: throw_npe.q
[junit] Error during job, obtaining debugging information...
[junit] Examining task ID: task_20120426165633901_0001_m_02 (and more) from 
job job_20120426165633901_0001
[junit] 
[junit] Task with the most failures(4): 
[junit] -
[junit] Task ID:
[junit] task_20120426165633901_0001_m_00
[junit] 
[junit] URL:
[junit] 
http://localhost:50030/taskdetails.jsp?jobid=job_20120426165633901_0001tipid=task_20120426165633901_0001_m_00
[junit] -
[junit] 
[junit] Exception: Client Execution failed with error code = 9

And in the output file:

PREHOOK: Output: 
hdfs://rotor:35306/home/cloudera/Code/hive/build/ql/scratchdir/hive_2012-04-26_16-57-11_129_4964809859660696177/-mr-1
Ended Job = job_20120426165633901_0001 with errors
FAILED: Execution Error, return code 2 from 
org.apache.hadoop.hive.ql.exec.MapRedTask

So, on apache trunk, TaskLog retrieval is working OK.

Since our CDH4 branch could not run MiniMRCluster yet, will try on an MR2 
cluster, and also try to run a local cluster in pseudo-distributed mode.

 Task log retrieval fails on secure cluster
 --

 Key: HIVE-2804
 URL: https://issues.apache.org/jira/browse/HIVE-2804
 Project: Hive
  Issue Type: Bug
  Components: Diagnosability, Query Processor, Security
Reporter: Carl Steinbach
Assignee: Zhenxiao Luo



--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2721) ability to select a view qualified by the database / schema name

2012-04-26 Thread Namit Jain (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2721?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Namit Jain updated HIVE-2721:
-

  Resolution: Fixed
Hadoop Flags: Reviewed
  Status: Resolved  (was: Patch Available)

Committed. Thanks Martin

 ability to select a view qualified by the database / schema name
 

 Key: HIVE-2721
 URL: https://issues.apache.org/jira/browse/HIVE-2721
 Project: Hive
  Issue Type: Bug
  Components: Database/Schema, Query Processor
Affects Versions: 0.7.0, 0.7.1, 0.8.0
Reporter: Robert Morton
Assignee: Martin Traverso
Priority: Blocker
 Attachments: HIVE-2721-1.patch, HIVE-2721.patch


 HIVE-1517 added support for selecting tables from different databases (aka 
 schemas) by qualifying the tables with the database name. The feature work 
 did not however extend this support to views. Note that this point came up in 
 the earlier JIRA, but was not addressed. See the following two comments:
 https://issues.apache.org/jira/browse/HIVE-1517?focusedCommentId=12996641page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-12996641
 https://issues.apache.org/jira/browse/HIVE-1517?focusedCommentId=12996679page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-12996679

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2904) ant gen-test failed

2012-04-26 Thread Namit Jain (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2904?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263281#comment-13263281
 ] 

Namit Jain commented on HIVE-2904:
--

+1

 ant gen-test failed
 ---

 Key: HIVE-2904
 URL: https://issues.apache.org/jira/browse/HIVE-2904
 Project: Hive
  Issue Type: Bug
Affects Versions: 0.8.1
Reporter: Sho Shimauchi
Assignee: tamtam180
  Labels: patch
 Attachments: HIVE-2904.1.patch, HIVE-2904.D2487.1.patch, 
 HIVE-2904.D2487.2.patch, HIVE-2904.D2487.3.patch, HIVE-2904.D2925.1.patch, 
 HIVE-2904.D2925.1.patch


 When I ran the commands introduced in Getting Started page, ant gen-test 
 failed with the following error.
 {quote}
 $ ant gen-test

 Buildfile: /Users/sho/src/apache/hive/ql/build.xml
 test-conditions:
  [echo] Project: ql
 test-init:
  [echo] Project: ql
 [mkdir] Created dir: /Users/sho/src/apache/hive/build/ql/test/data
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/clientpositive
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/clientnegative
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/positive
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/negative
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/data/warehouse
 [mkdir] Created dir: /Users/sho/src/apache/hive/build/ql/test/data/metadb
 gen-test:
  [echo] ql
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/ql/parse/TestParse.java
  from template TestParse.vm
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/ql/parse/TestParseNegative.java
  from template TestParseNegative.vm
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/cli/TestCliDriver.java
  from template TestCliDriver.vm
 BUILD FAILED
 /Users/sho/src/apache/hive/ql/build.xml:116: Problem: failed to create task 
 or type if
 Cause: The name is undefined.
 Action: Check the spelling.
 Action: Check that any custom tasks/types have been declared.
 Action: Check that any presetdef/macrodef declarations have taken place.
 {quote}
 Getting Started: 
 https://cwiki.apache.org/confluence/display/Hive/GettingStarted+EclipseSetup

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2904) ant gen-test failed

2012-04-26 Thread Namit Jain (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2904?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Namit Jain updated HIVE-2904:
-

  Resolution: Fixed
Hadoop Flags: Reviewed
  Status: Resolved  (was: Patch Available)

Committed. Thanks Carl

 ant gen-test failed
 ---

 Key: HIVE-2904
 URL: https://issues.apache.org/jira/browse/HIVE-2904
 Project: Hive
  Issue Type: Bug
Affects Versions: 0.8.1
Reporter: Sho Shimauchi
Assignee: tamtam180
  Labels: patch
 Attachments: HIVE-2904.1.patch, HIVE-2904.D2487.1.patch, 
 HIVE-2904.D2487.2.patch, HIVE-2904.D2487.3.patch, HIVE-2904.D2925.1.patch, 
 HIVE-2904.D2925.1.patch


 When I ran the commands introduced in Getting Started page, ant gen-test 
 failed with the following error.
 {quote}
 $ ant gen-test

 Buildfile: /Users/sho/src/apache/hive/ql/build.xml
 test-conditions:
  [echo] Project: ql
 test-init:
  [echo] Project: ql
 [mkdir] Created dir: /Users/sho/src/apache/hive/build/ql/test/data
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/clientpositive
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/clientnegative
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/positive
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/logs/negative
 [mkdir] Created dir: 
 /Users/sho/src/apache/hive/build/ql/test/data/warehouse
 [mkdir] Created dir: /Users/sho/src/apache/hive/build/ql/test/data/metadb
 gen-test:
  [echo] ql
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/ql/parse/TestParse.java
  from template TestParse.vm
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/ql/parse/TestParseNegative.java
  from template TestParseNegative.vm
  [qtestgen] Template Path:/Users/sho/src/apache/hive/ql/src/test/templates
  [qtestgen] 2012/03/25 15:27:10 org.apache.velocity.runtime.log.JdkLogChute 
 log
  [qtestgen] ???: FileResourceLoader : adding path 
 '/Users/sho/src/apache/hive/ql/src/test/templates'
  [qtestgen] Generated 
 /Users/sho/src/apache/hive/build/ql/test/src/org/apache/hadoop/hive/cli/TestCliDriver.java
  from template TestCliDriver.vm
 BUILD FAILED
 /Users/sho/src/apache/hive/ql/build.xml:116: Problem: failed to create task 
 or type if
 Cause: The name is undefined.
 Action: Check the spelling.
 Action: Check that any custom tasks/types have been declared.
 Action: Check that any presetdef/macrodef declarations have taken place.
 {quote}
 Getting Started: 
 https://cwiki.apache.org/confluence/display/Hive/GettingStarted+EclipseSetup

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Created] (HIVE-2984) Fix test failure in TestNegativeCliDriver.dyn_part_max caused by HIVE-2918

2012-04-26 Thread Carl Steinbach (JIRA)
Carl Steinbach created HIVE-2984:


 Summary: Fix test failure in TestNegativeCliDriver.dyn_part_max 
caused by HIVE-2918
 Key: HIVE-2984
 URL: https://issues.apache.org/jira/browse/HIVE-2984
 Project: Hive
  Issue Type: Bug
Reporter: Carl Steinbach




--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2984) Fix test failure in TestNegativeCliDriver.dyn_part_max caused by HIVE-2918

2012-04-26 Thread Carl Steinbach (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2984?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263290#comment-13263290
 ] 

Carl Steinbach commented on HIVE-2984:
--

https://builds.apache.org/job/Hive-trunk-h0.21/1397/


 Fix test failure in TestNegativeCliDriver.dyn_part_max caused by HIVE-2918
 --

 Key: HIVE-2984
 URL: https://issues.apache.org/jira/browse/HIVE-2984
 Project: Hive
  Issue Type: Bug
Reporter: Carl Steinbach



--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Created] (HIVE-2985) Create a new test framework

2012-04-26 Thread Namit Jain (JIRA)
Namit Jain created HIVE-2985:


 Summary: Create a new test framework
 Key: HIVE-2985
 URL: https://issues.apache.org/jira/browse/HIVE-2985
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain


The high level idea is to replicate the deployment framework from Facebook.

This will us get the changes tested thoroughly in our environment before they 
are committed.
Also, it make easier for contributors outside Facebook to test/debug their 
changes in this environment
and make sure they are not breaking anything.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Created] (HIVE-2986) Create the hooks

2012-04-26 Thread Namit Jain (JIRA)
Namit Jain created HIVE-2986:


 Summary: Create the hooks
 Key: HIVE-2986
 URL: https://issues.apache.org/jira/browse/HIVE-2986
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain


This is needed for https://issues.apache.org/jira/browse/HIVE-2985

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Assigned] (HIVE-2985) Create a new test framework

2012-04-26 Thread Namit Jain (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2985?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Namit Jain reassigned HIVE-2985:


Assignee: Namit Jain

 Create a new test framework
 ---

 Key: HIVE-2985
 URL: https://issues.apache.org/jira/browse/HIVE-2985
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain
Assignee: Namit Jain

 The high level idea is to replicate the deployment framework from Facebook.
 This will us get the changes tested thoroughly in our environment before they 
 are committed.
 Also, it make easier for contributors outside Facebook to test/debug their 
 changes in this environment
 and make sure they are not breaking anything.

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2986) Create the hooks

2012-04-26 Thread Namit Jain (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2986?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263299#comment-13263299
 ] 

Namit Jain commented on HIVE-2986:
--

https://reviews.facebook.net/differential/diff/9471/

 Create the hooks
 

 Key: HIVE-2986
 URL: https://issues.apache.org/jira/browse/HIVE-2986
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain

 This is needed for https://issues.apache.org/jira/browse/HIVE-2985

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2986) Create the hooks

2012-04-26 Thread Namit Jain (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2986?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Namit Jain updated HIVE-2986:
-

Attachment: hive.1.2986.patch

 Create the hooks
 

 Key: HIVE-2986
 URL: https://issues.apache.org/jira/browse/HIVE-2986
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain
 Attachments: hive.1.2986.patch


 This is needed for https://issues.apache.org/jira/browse/HIVE-2985

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Updated] (HIVE-2986) Create the hooks

2012-04-26 Thread Namit Jain (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-2986?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Namit Jain updated HIVE-2986:
-

Status: Patch Available  (was: Open)

 Create the hooks
 

 Key: HIVE-2986
 URL: https://issues.apache.org/jira/browse/HIVE-2986
 Project: Hive
  Issue Type: New Feature
Reporter: Namit Jain
 Attachments: hive.1.2986.patch


 This is needed for https://issues.apache.org/jira/browse/HIVE-2985

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




[jira] [Commented] (HIVE-2721) ability to select a view qualified by the database / schema name

2012-04-26 Thread Hudson (JIRA)

[ 
https://issues.apache.org/jira/browse/HIVE-2721?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanelfocusedCommentId=13263303#comment-13263303
 ] 

Hudson commented on HIVE-2721:
--

Integrated in Hive-trunk-h0.21 #1399 (See 
[https://builds.apache.org/job/Hive-trunk-h0.21/1399/])
HIVE-2721 ability to select a view qualified by the database / schema name
(Martin Traverso via namit) (Revision 1330991)

 Result = FAILURE
namit : http://svn.apache.org/viewcvs.cgi/?root=Apache-SVNview=revrev=1330991
Files : 
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
* /hive/trunk/ql/src/test/queries/clientpositive/view.q
* /hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
* /hive/trunk/ql/src/test/results/clientpositive/create_or_replace_view.q.out
* /hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
* /hive/trunk/ql/src/test/results/clientpositive/create_view_partitioned.q.out
* 
/hive/trunk/ql/src/test/results/clientpositive/insert2_overwrite_partitions.q.out
* /hive/trunk/ql/src/test/results/clientpositive/view.q.out


 ability to select a view qualified by the database / schema name
 

 Key: HIVE-2721
 URL: https://issues.apache.org/jira/browse/HIVE-2721
 Project: Hive
  Issue Type: Bug
  Components: Database/Schema, Query Processor
Affects Versions: 0.7.0, 0.7.1, 0.8.0
Reporter: Robert Morton
Assignee: Martin Traverso
Priority: Blocker
 Attachments: HIVE-2721-1.patch, HIVE-2721.patch


 HIVE-1517 added support for selecting tables from different databases (aka 
 schemas) by qualifying the tables with the database name. The feature work 
 did not however extend this support to views. Note that this point came up in 
 the earlier JIRA, but was not addressed. See the following two comments:
 https://issues.apache.org/jira/browse/HIVE-1517?focusedCommentId=12996641page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-12996641
 https://issues.apache.org/jira/browse/HIVE-1517?focusedCommentId=12996679page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-12996679

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators: 
https://issues.apache.org/jira/secure/ContactAdministrators!default.jspa
For more information on JIRA, see: http://www.atlassian.com/software/jira




Hive-trunk-h0.21 - Build # 1399 - Still Failing

2012-04-26 Thread Apache Jenkins Server
Changes for Build #1397
[kevinwilfong] HIVE-2918. Hive Dynamic Partition Insert - move task not 
considering 'hive.exec.max.dynamic.partitions' from CLI. (cwsteinbach via 
kevinwilfong)


Changes for Build #1398

Changes for Build #1399
[namit] HIVE-2721 ability to select a view qualified by the database / schema 
name
(Martin Traverso via namit)




7 tests failed.
REGRESSION:  
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_or_replace_view

Error Message:
Unexpected exception See build/ql/tmp/hive.log, or try ant test ... 
-Dtest.silent=false to get more logs.

Stack Trace:
junit.framework.AssertionFailedError: Unexpected exception
See build/ql/tmp/hive.log, or try ant test ... -Dtest.silent=false to get 
more logs.
at junit.framework.Assert.fail(Assert.java:50)
at 
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_or_replace_view(TestCliDriver.java:6331)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:168)
at junit.framework.TestCase.runBare(TestCase.java:134)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:124)
at junit.framework.TestSuite.runTest(TestSuite.java:243)
at junit.framework.TestSuite.run(TestSuite.java:238)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:422)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:931)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:785)


REGRESSION:  org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_view

Error Message:
Unexpected exception See build/ql/tmp/hive.log, or try ant test ... 
-Dtest.silent=false to get more logs.

Stack Trace:
junit.framework.AssertionFailedError: Unexpected exception
See build/ql/tmp/hive.log, or try ant test ... -Dtest.silent=false to get 
more logs.
at junit.framework.Assert.fail(Assert.java:50)
at 
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_view(TestCliDriver.java:6535)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:168)
at junit.framework.TestCase.runBare(TestCase.java:134)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:124)
at junit.framework.TestSuite.runTest(TestSuite.java:243)
at junit.framework.TestSuite.run(TestSuite.java:238)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:422)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:931)
at 
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:785)


REGRESSION:  
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_view_partitioned

Error Message:
Unexpected exception See build/ql/tmp/hive.log, or try ant test ... 
-Dtest.silent=false to get more logs.

Stack Trace:
junit.framework.AssertionFailedError: Unexpected exception
See build/ql/tmp/hive.log, or try ant test ... -Dtest.silent=false to get 
more logs.
at junit.framework.Assert.fail(Assert.java:50)
at 
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver_create_view_partitioned(TestCliDriver.java:6586)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:168)
at junit.framework.TestCase.runBare(TestCase.java:134)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:124)
at 

Trying to write an elastic search storage handler and running into trouble with MapOperator

2012-04-26 Thread Tristan Buckner
When I try to select from a table being backed by an elastic search
index I get this error (Plus some logging I added to try and see what
was going on):

2012-04-26 18:57:15,502 INFO
org.apache.hadoop.hive.ql.exec.MapOperator: Path to aliases is:
{hdfs://localhost:9000/user/hive/warehouse/elastic_search_profile_2=[elastic_search_profile_2]}
2012-04-26 18:57:15,670 INFO
org.apache.hadoop.hive.ql.exec.MapOperator: Adding alias
elastic_search_profile_2 to work list for file
hdfs://localhost:9000/user/hive/warehouse/elastic_search_profile_2
2012-04-26 18:57:15,671 ERROR
org.apache.hadoop.hive.ql.exec.MapOperator: Configuration does not
have any alias for path: /dev/null
2012-04-26 18:57:15,675 INFO
org.apache.hadoop.mapred.TaskLogsTruncater: Initializing logs'
truncater with mapRetainSize=-1 and reduceRetainSize=-1
2012-04-26 18:57:15,742 WARN org.apache.hadoop.mapred.Child: Error running child
java.lang.RuntimeException: Error in configuring object
at 
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
at 
org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
at 
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:396)
at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1059)
at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at 
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
... 9 more
Caused by: java.lang.RuntimeException: Error in configuring object
at 
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
at 
org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
at 
org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
... 14 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at 
org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
... 17 more
Caused by: java.lang.RuntimeException: Map operator initialization failed
at 
org.apache.hadoop.hive.ql.exec.ExecMapper.configure(ExecMapper.java:121)
... 22 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
org.apache.hadoop.hive.ql.metadata.HiveException: Configuration and
input path are inconsistent
at 
org.apache.hadoop.hive.ql.exec.MapOperator.setChildren(MapOperator.java:404)
at 
org.apache.hadoop.hive.ql.exec.ExecMapper.configure(ExecMapper.java:90)
... 22 more
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
Configuration and input path are inconsistent
at 
org.apache.hadoop.hive.ql.exec.MapOperator.setChildren(MapOperator.java:398)
... 23 more
2012-04-26 18:57:15,761 INFO org.apache.hadoop.mapred.Task: Runnning
cleanup for the task

I'm not sure what this alias map is and how it gets populated.  Any
pointers would be greatly appreciated.

Thanks,
Tristan


Re: ant test fails

2012-04-26 Thread Srinivasan Rajivelu
I am getting this error,

test-init:
 [echo] Project: service
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/data
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/logs/clientpositive
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/logs/clientnegative
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/logs/positive
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/logs/negative
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/data/warehouse
[mkdir] Created dir:
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build/service/test/data/metadb

test:
 [echo] Project: service

BUILD FAILED
/Users/srinivasanrajivelu/Desktop/hive-latest/src/build.xml:310: Keepgoing
execution: 1 of 12 iterations failed.

Total time: 304 minutes 16 seconds

Still test cases not running.


Could anyone of you have solutions for this problem...

On Mon, Apr 23, 2012 at 1:09 AM, Namit Jain nj...@fb.com wrote:

 When you run

 ant test -Dtestcase=TestCliDriver


 Which test fails ?

 Also, can you try running the tests in non-debug mode.
 I.e

 Do

 ant package (Instead of ant -Djavac.debug=on package)


 -namit


 On 4/22/12 9:59 PM, Srinivasan Rajivelu srinivasanrajiv...@gmail.com
 wrote:

 Hi All,
After running ant package, if I run ant test fails,
 
 I tried all,
 
 ant package
 ant clean package
 ant -Djavac.debug=on package
 
 I cannot run neither all test cases or positive test cases.
 
 ant test
 
 ant test -Dtestcase=TestCliDriver
 
 
 Both fails,
 
 
 And hive version that I am using is hive 0.8.1. and Operating System is
 MacOSX Lion.
 
 Error when I run ant test,
 
 [junit] 2012-04-15 13:16:25.951 java[39626:1803] Unable to load realm info
 from SCDynamicStore
 [junit] org.apache.thrift.transport.TTransportException: Could not
 create ServerSocket on address 0.0.0.0/0.0.0.0:1.
 [junit]  at
 org.apache.thrift.transport.TServerSocket.init(TServerSocket.java:93)
 [junit]  at
 org.apache.thrift.transport.TServerSocket.init(TServerSocket.java:75)
 [junit]  at
 org.apache.hadoop.hive.metastore.TServerSocketKeepAlive.init(TServerSock
 etKeepAlive.java:34)
 [junit]  at
 org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStor
 e.java:2994)
 [junit]  at
 org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStor
 e.java:2970)
 [junit]  at
 org.apache.hadoop.hive.metastore.MetaStoreUtils$1.run(MetaStoreUtils.java:
 740)
 [junit]  at java.lang.Thread.run(Thread.java:680)
 [junit] Tests run: 3, Failures: 0, Errors: 2, Time elapsed: 1.235 sec
 
 And please tell me how much time it will take to run all test cases. And
 this is blocking my work. Please help me with this issue.
 
 --
 Thanks  Regards,
 Srinivasan
 Some People dream of success..
  While others wake up and work hard at it..I am the later.




-- 
Thanks  Regards,
Srinivasan
Some People dream of success..
 While others wake up and work hard at it..I am the later.