See <https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/549/>

------------------------------------------
[...truncated 25848 lines...]
    [junit] 2011-02-10 12:13:35,202 null map = 100%,  reduce = 0%
    [junit] Ended Job = job_local_0001
    [junit] POSTHOOK: query: select key, value from testhivedrivertable
    [junit] POSTHOOK: type: QUERY
    [junit] POSTHOOK: Input: default@testhivedrivertable
    [junit] POSTHOOK: Output: 
file:/tmp/hudson/hive_2011-02-10_12-13-32_620_1910688274407044875/-mr-10000
    [junit] OK
    [junit] Hive history 
file=<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/service/tmp/hive_job_log_hudson_201102101213_237005758.txt>
    [junit] PREHOOK: query: drop table testhivedrivertable
    [junit] PREHOOK: type: DROPTABLE
    [junit] PREHOOK: Input: default@testhivedrivertable
    [junit] PREHOOK: Output: default@testhivedrivertable
    [junit] POSTHOOK: query: drop table testhivedrivertable
    [junit] POSTHOOK: type: DROPTABLE
    [junit] POSTHOOK: Input: default@testhivedrivertable
    [junit] POSTHOOK: Output: default@testhivedrivertable
    [junit] OK
    [junit] PREHOOK: query: create table testhivedrivertable (key int, value 
string)
    [junit] PREHOOK: type: CREATETABLE
    [junit] POSTHOOK: query: create table testhivedrivertable (key int, value 
string)
    [junit] POSTHOOK: type: CREATETABLE
    [junit] POSTHOOK: Output: default@testhivedrivertable
    [junit] OK
    [junit] PREHOOK: query: load data local inpath 
'<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/data/files/kv1.txt'>
 into table testhivedrivertable
    [junit] PREHOOK: type: LOAD
    [junit] Copying data from 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/data/files/kv1.txt>
    [junit] Loading data to table testhivedrivertable
    [junit] POSTHOOK: query: load data local inpath 
'<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/data/files/kv1.txt'>
 into table testhivedrivertable
    [junit] POSTHOOK: type: LOAD
    [junit] POSTHOOK: Output: default@testhivedrivertable
    [junit] OK
    [junit] PREHOOK: query: select key, value from testhivedrivertable where 
key > 10
    [junit] PREHOOK: type: QUERY
    [junit] PREHOOK: Input: default@testhivedrivertable
    [junit] PREHOOK: Output: 
file:/tmp/hudson/hive_2011-02-10_12-13-36_024_1757063799814296093/-mr-10000
    [junit] Total MapReduce jobs = 1
    [junit] Launching Job 1 out of 1
    [junit] Number of reduce tasks is set to 0 since there's no reduce operator
    [junit] Job running in-process (local Hadoop)
    [junit] 2011-02-10 12:13:38,779 null map = 100%,  reduce = 0%
    [junit] Ended Job = job_local_0001
    [junit] POSTHOOK: query: select key, value from testhivedrivertable where 
key > 10
    [junit] POSTHOOK: type: QUERY
    [junit] POSTHOOK: Input: default@testhivedrivertable
    [junit] POSTHOOK: Output: 
file:/tmp/hudson/hive_2011-02-10_12-13-36_024_1757063799814296093/-mr-10000
    [junit] OK
    [junit] PREHOOK: query: select count(1) as c from testhivedrivertable
    [junit] PREHOOK: type: QUERY
    [junit] PREHOOK: Input: default@testhivedrivertable
    [junit] PREHOOK: Output: 
file:/tmp/hudson/hive_2011-02-10_12-13-38_949_8120952092387632695/-mr-10000
    [junit] Total MapReduce jobs = 1
    [junit] Launching Job 1 out of 1
    [junit] Number of reduce tasks determined at compile time: 1
    [junit] In order to change the average load for a reducer (in bytes):
    [junit]   set hive.exec.reducers.bytes.per.reducer=<number>
    [junit] In order to limit the maximum number of reducers:
    [junit]   set hive.exec.reducers.max=<number>
    [junit] In order to set a constant number of reducers:
    [junit]   set mapred.reduce.tasks=<number>
    [junit] Job running in-process (local Hadoop)
    [junit] 2011-02-10 12:13:41,659 null map = 100%,  reduce = 100%
    [junit] Ended Job = job_local_0001
    [junit] POSTHOOK: query: select count(1) as c from testhivedrivertable
    [junit] POSTHOOK: type: QUERY
    [junit] POSTHOOK: Input: default@testhivedrivertable
    [junit] POSTHOOK: Output: 
file:/tmp/hudson/hive_2011-02-10_12-13-38_949_8120952092387632695/-mr-10000
    [junit] OK
    [junit] ------------- ---------------- ---------------
    [junit] 
    [junit] Testcase: testExecute took 10.386 sec
    [junit] Testcase: testNonHiveCommand took 0.958 sec
    [junit] Testcase: testMetastore took 0.283 sec
    [junit] Testcase: testGetClusterStatus took 0.103 sec
    [junit] Testcase: testFetch took 8.768 sec
    [junit] Testcase: testDynamicSerde took 7.218 sec

test-conditions:

gen-test:

create-dirs:

compile-ant-tasks:

create-dirs:

init:

compile:
     [echo] Compiling: anttasks
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/ant/build.xml>:40:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

deploy-ant-tasks:

create-dirs:

init:

compile:
     [echo] Compiling: anttasks
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/ant/build.xml>:40:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:

init:

compile:

ivy-init-dirs:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/ivy/lib/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:

ivy-retrieve-hadoop-source:
:: loading settings :: file = 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/ivy/ivysettings.xml>
[ivy:retrieve] :: resolving dependencies :: 
org.apache.hadoop.hive#shims;work...@vesta.apache.org
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  found hadoop#core;0.20.0 in hadoop-source
[ivy:retrieve]  found hadoop#core;0.20.3-CDH3-SNAPSHOT in hadoop-source
[ivy:retrieve] :: resolution report :: resolve 2708ms :: artifacts dl 0ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   2   |   0   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------
[ivy:retrieve] :: retrieving :: org.apache.hadoop.hive#shims
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)

install-hadoopcore-internal:

build_shims:
     [echo] Compiling shims against hadoop 0.20.0 
(<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/hadoopcore/hadoop-0.20.0)>
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:53:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

ivy-init-dirs:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/ivy/lib/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:

ivy-retrieve-hadoop-source:
:: loading settings :: file = 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/ivy/ivysettings.xml>
[ivy:retrieve] :: resolving dependencies :: 
org.apache.hadoop.hive#shims;work...@vesta.apache.org
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  found hadoop#core;0.20.0 in hadoop-source
[ivy:retrieve]  found hadoop#core;0.20.3-CDH3-SNAPSHOT in hadoop-source
[ivy:retrieve] :: resolution report :: resolve 2049ms :: artifacts dl 1ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   2   |   0   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------
[ivy:retrieve] :: retrieving :: org.apache.hadoop.hive#shims
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)

install-hadoopcore-internal:

build_shims:
     [echo] Compiling shims against hadoop 0.20.3-CDH3-SNAPSHOT 
(<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/hadoopcore/hadoop-0.20.3-CDH3-SNAPSHOT)>
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:53:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:75:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

compile-test:

ivy-init-dirs:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/ivy/lib/ivy-2.1.0.jar>
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:

ivy-retrieve-hadoop-source:
:: loading settings :: file = 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/ivy/ivysettings.xml>
[ivy:retrieve] :: resolving dependencies :: 
org.apache.hadoop.hive#shims;work...@vesta.apache.org
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  found hadoop#core;0.20.0 in hadoop-source
[ivy:retrieve]  found hadoop#core;0.20.3-CDH3-SNAPSHOT in hadoop-source
[ivy:retrieve] :: resolution report :: resolve 1977ms :: artifacts dl 1ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      default     |   2   |   0   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------
[ivy:retrieve] :: retrieving :: org.apache.hadoop.hive#shims
[ivy:retrieve]  confs: [default]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)

install-hadoopcore-internal:

compile_secure_test:
     [echo] Compiling shim tests against hadoop 0.20.3-CDH3-SNAPSHOT 
(<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/hadoopcore/hadoop-0.20.3-CDH3-SNAPSHOT)>
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:96:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds
    [javac] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:109:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

test-jar:
   [delete] Deleting: 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/shims/test/test-udfs.jar>
      [jar] Building MANIFEST-only jar: 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build/shims/test/test-udfs.jar>

test-init:

test:
    [junit] Running org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge
    [junit] java.lang.NoSuchMethodError: 
org.apache.hadoop.security.UserGroupInformation.getCurrentUser()Lorg/apache/hadoop/security/UserGroupInformation;
    [junit]     at 
org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge20S$Server.<init>(HadoopThriftAuthBridge20S.java:330)
    [junit]     at 
org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge$MyHadoopThriftAuthBridge20S$Server.<init>(TestHadoop20SAuthBridge.java:53)
    [junit]     at 
org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge$MyHadoopThriftAuthBridge20S.createServer(TestHadoop20SAuthBridge.java:48)
    [junit]     at 
org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge$MyHadoopThriftAuthBridge20S.createServer(TestHadoop20SAuthBridge.java:43)
    [junit]     at 
org.apache.hadoop.hive.metastore.HiveMetaStore.startMetaStore(HiveMetaStore.java:3113)
    [junit]     at 
org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge$1.run(TestHadoop20SAuthBridge.java:90)
    [junit]     at java.lang.Thread.run(Thread.java:619)
    [junit] Running org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hive.thrift.TestHadoop20SAuthBridge FAILED 
(crashed)
      [for] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/shims/build.xml>:
 The following error occurred while executing this line:
      [for] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build.xml>:213: 
The following error occurred while executing this line:
      [for] 
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build-common.xml>:454:
 Tests failed!

BUILD FAILED
<https://hudson.apache.org/hudson/job/Hive-trunk-h0.20/ws/hive/build.xml>:208: 
Keepgoing execution: 2 of 11 iterations failed.

Total time: 225 minutes 33 seconds
Archiving artifacts
Recording test results

Reply via email to