See <https://builds.apache.org/job/Pig-trunk/1114/changes>
Changes:
[gdfm] PIG-1387: Syntactical Sugar for PIG-1385 (azaroth)
Syntactic sugar to convert to tuples "()", bags "{}" and maps "[]".
------------------------------------------
[...truncated 38864 lines...]
[junit] at java.security.AccessController.doPrivileged(Native Method)
[junit] at javax.security.auth.Subject.doAs(Subject.java:396)
[junit] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953)
[junit]
[junit] org.apache.hadoop.ipc.RemoteException: java.io.IOException: Could
not complete write to file
/tmp/TestStore-output--9198829266639495053.txt_cleanupOnFailure_succeeded1 by
DFSClient_1839538274
[junit] at
org.apache.hadoop.hdfs.server.namenode.NameNode.complete(NameNode.java:449)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:508)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:959)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:955)
[junit] at java.security.AccessController.doPrivileged(Native Method)
[junit] at javax.security.auth.Subject.doAs(Subject.java:396)
[junit] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953)
[junit]
[junit] at org.apache.hadoop.ipc.Client.call(Client.java:740)
[junit] at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:220)
[junit] at $Proxy0.complete(Unknown Source)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
[junit] at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
[junit] at $Proxy0.complete(Unknown Source)
[junit] at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.closeInternal(DFSClient.java:3264)
[junit] at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.close(DFSClient.java:3188)
[junit] at
org.apache.hadoop.hdfs.DFSClient$LeaseChecker.close(DFSClient.java:1043)
[junit] at org.apache.hadoop.hdfs.DFSClient.close(DFSClient.java:237)
[junit] at
org.apache.hadoop.hdfs.DistributedFileSystem.close(DistributedFileSystem.java:269)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:83)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
[junit] at
org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
[junit] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[junit] at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
[junit] at
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
[junit] at
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
[junit] at
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
[junit] at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
[junit] at
junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
[junit] 11/10/25 22:45:43 WARN hdfs.StateChange: DIR*
NameSystem.completeFile: failed to complete
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded
because dir.getFileBlocks() is null and pendingFile is null
[junit] 11/10/25 22:45:43 INFO ipc.Server: IPC Server handler 6 on 48809,
call
complete(/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded,
DFSClient_1839538274) from 127.0.0.1:36824: error: java.io.IOException: Could
not complete write to file
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded by
DFSClient_1839538274
[junit] java.io.IOException: Could not complete write to file
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded by
DFSClient_1839538274
[junit] at
org.apache.hadoop.hdfs.server.namenode.NameNode.complete(NameNode.java:449)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:508)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:959)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:955)
[junit] at java.security.AccessController.doPrivileged(Native Method)
[junit] at javax.security.auth.Subject.doAs(Subject.java:396)
[junit] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953)
[junit] 11/10/25 22:45:43 ERROR hdfs.DFSClient: Exception closing file
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded :
org.apache.hadoop.ipc.RemoteException: java.io.IOException: Could not complete
write to file
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded by
DFSClient_1839538274
[junit] at
org.apache.hadoop.hdfs.server.namenode.NameNode.complete(NameNode.java:449)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:508)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:959)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:955)
[junit] at java.security.AccessController.doPrivileged(Native Method)
[junit] at javax.security.auth.Subject.doAs(Subject.java:396)
[junit] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953)
[junit]
[junit] org.apache.hadoop.ipc.RemoteException: java.io.IOException: Could
not complete write to file
/tmp/TestStore-output-5739028886607412230.txt_cleanupOnFailure_succeeded by
DFSClient_1839538274
[junit] at
org.apache.hadoop.hdfs.server.namenode.NameNode.complete(NameNode.java:449)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:508)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:959)
[junit] at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:955)
[junit] at java.security.AccessController.doPrivileged(Native Method)
[junit] Shutting down the Mini HDFS Cluster
[junit] at javax.security.auth.Subject.doAs(Subject.java:396)
[junit] at org.apache.hadoop.ipc.Server$Handler.run(Server.java:953)
[junit]
[junit] at org.apache.hadoop.ipc.Client.call(Client.java:740)
[junit] Shutting down DataNode 3
[junit] at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:220)
[junit] at $Proxy0.complete(Unknown Source)
[junit] at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:82)
[junit] at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:59)
[junit] at $Proxy0.complete(Unknown Source)
[junit] at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.closeInternal(DFSClient.java:3264)
[junit] at
org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.close(DFSClient.java:3188)
[junit] at
org.apache.hadoop.hdfs.DFSClient$LeaseChecker.close(DFSClient.java:1043)
[junit] at org.apache.hadoop.hdfs.DFSClient.close(DFSClient.java:237)
[junit] at
org.apache.hadoop.hdfs.DistributedFileSystem.close(DistributedFileSystem.java:269)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsClusters(MiniGenericCluster.java:83)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutdownMiniDfsAndMrClusters(MiniGenericCluster.java:77)
[junit] at
org.apache.pig.test.MiniGenericCluster.shutDown(MiniGenericCluster.java:68)
[junit] at
org.apache.pig.test.TestStore.oneTimeTearDown(TestStore.java:128)
[junit] at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
[junit] at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
[junit] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
[junit] at java.lang.reflect.Method.invoke(Method.java:597)
[junit] at
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
[junit] at
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
[junit] at
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
[junit] at
org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:37)
[junit] at org.junit.runners.ParentRunner.run(ParentRunner.java:220)
[junit] at
junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
[junit] at
org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
[junit] 2011-10-25 22:45:43.591:INFO::Stopped
SelectChannelConnector@localhost:0
[junit] 11/10/25 22:45:43 INFO ipc.Server: Stopping server on 39081
[junit] 11/10/25 22:45:43 INFO ipc.Server: IPC Server handler 0 on 39081:
exiting
[junit] 11/10/25 22:45:43 INFO ipc.Server: IPC Server handler 2 on 39081:
exiting
[junit] 11/10/25 22:45:43 INFO ipc.Server: Stopping IPC Server listener on
39081
[junit] 11/10/25 22:45:43 INFO ipc.Server: IPC Server handler 1 on 39081:
exiting
[junit] 11/10/25 22:45:43 INFO ipc.Server: Stopping IPC Server Responder
[junit] 11/10/25 22:45:43 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 1
[junit] 11/10/25 22:45:43 WARN datanode.DataNode:
DatanodeRegistration(127.0.0.1:49732,
storageID=DS-1942333873-67.195.138.20-49732-1319582389785, infoPort=52301,
ipcPort=39081):DataXceiveServer: java.nio.channels.AsynchronousCloseException
[junit] at
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
[junit] at
sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
[junit] at
sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
[junit] at
org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:130)
[junit] at java.lang.Thread.run(Thread.java:662)
[junit]
[junit] 11/10/25 22:45:43 INFO datanode.DataBlockScanner: Exiting
DataBlockScanner thread.
[junit] 11/10/25 22:45:43 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:39304
to delete blk_5712939127206739837_1121 blk_-8182242911326505577_1123
blk_-2463948357799343838_1122 blk_-4832026471964570125_1126
blk_1527691758239730467_1127
[junit] 11/10/25 22:45:43 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:48673
to delete blk_-5478801490798109751_1124 blk_-8182242911326505577_1123
blk_-4832026471964570125_1126
[junit] 11/10/25 22:45:43 INFO datanode.DataNode:
DatanodeRegistration(127.0.0.1:49732,
storageID=DS-1942333873-67.195.138.20-49732-1319582389785, infoPort=52301,
ipcPort=39081):Finishing DataNode in:
FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data7/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data8/current'}>
[junit] 11/10/25 22:45:43 INFO ipc.Server: Stopping server on 39081
[junit] 11/10/25 22:45:43 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] 11/10/25 22:45:44 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] Shutting down DataNode 2
[junit] 2011-10-25 22:45:44.696:INFO::Stopped
SelectChannelConnector@localhost:0
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping server on 43253
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 0 on 43253:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 1 on 43253:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 2 on 43253:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping IPC Server listener on
43253
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping IPC Server Responder
[junit] 11/10/25 22:45:44 WARN datanode.DataNode:
DatanodeRegistration(127.0.0.1:48673,
storageID=DS-1784236486-67.195.138.20-48673-1319582389501, infoPort=44511,
ipcPort=43253):DataXceiveServer: java.nio.channels.AsynchronousCloseException
[junit] at
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
[junit] at
sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
[junit] at
sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
[junit] at
org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:130)
[junit] at java.lang.Thread.run(Thread.java:662)
[junit]
[junit] 11/10/25 22:45:44 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] 11/10/25 22:45:44 INFO datanode.DataBlockScanner: Exiting
DataBlockScanner thread.
[junit] 11/10/25 22:45:44 INFO datanode.DataNode:
DatanodeRegistration(127.0.0.1:48673,
storageID=DS-1784236486-67.195.138.20-48673-1319582389501, infoPort=44511,
ipcPort=43253):Finishing DataNode in:
FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data5/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data6/current'}>
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping server on 43253
[junit] 11/10/25 22:45:44 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] Shutting down DataNode 1
[junit] 2011-10-25 22:45:44.799:INFO::Stopped
SelectChannelConnector@localhost:0
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping server on 41262
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 0 on 41262:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 1 on 41262:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: IPC Server handler 2 on 41262:
exiting
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping IPC Server listener on
41262
[junit] 11/10/25 22:45:44 INFO ipc.Server: Stopping IPC Server Responder
[junit] 11/10/25 22:45:44 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 1
[junit] 11/10/25 22:45:44 WARN datanode.DataNode:
DatanodeRegistration(127.0.0.1:39304,
storageID=DS-1385698992-67.195.138.20-39304-1319582389246, infoPort=42870,
ipcPort=41262):DataXceiveServer: java.nio.channels.AsynchronousCloseException
[junit] at
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
[junit] at
sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
[junit] at
sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
[junit] at
org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:130)
[junit] at java.lang.Thread.run(Thread.java:662)
[junit]
[junit] 11/10/25 22:45:45 INFO datanode.DataBlockScanner: Exiting
DataBlockScanner thread.
[junit] 11/10/25 22:45:45 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] 11/10/25 22:45:45 INFO datanode.DataNode:
DatanodeRegistration(127.0.0.1:39304,
storageID=DS-1385698992-67.195.138.20-39304-1319582389246, infoPort=42870,
ipcPort=41262):Finishing DataNode in:
FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data3/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data4/current'}>
[junit] 11/10/25 22:45:45 INFO ipc.Server: Stopping server on 41262
[junit] 11/10/25 22:45:45 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] Shutting down DataNode 0
[junit] 2011-10-25 22:45:45.902:INFO::Stopped
SelectChannelConnector@localhost:0
[junit] 11/10/25 22:45:46 INFO ipc.Server: Stopping server on 46626
[junit] 11/10/25 22:45:46 INFO ipc.Server: IPC Server handler 0 on 46626:
exiting
[junit] 11/10/25 22:45:46 INFO ipc.Server: Stopping IPC Server listener on
46626
[junit] 11/10/25 22:45:46 INFO ipc.Server: Stopping IPC Server Responder
[junit] 11/10/25 22:45:46 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 1
[junit] 11/10/25 22:45:46 INFO ipc.Server: IPC Server handler 1 on 46626:
exiting
[junit] 11/10/25 22:45:46 WARN datanode.DataNode:
DatanodeRegistration(127.0.0.1:37432,
storageID=DS-996349449-67.195.138.20-37432-1319582388946, infoPort=54275,
ipcPort=46626):DataXceiveServer: java.nio.channels.AsynchronousCloseException
[junit] at
java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:185)
[junit] at
sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:159)
[junit] at
sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:84)
[junit] at
org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:130)
[junit] at java.lang.Thread.run(Thread.java:662)
[junit]
[junit] 11/10/25 22:45:46 INFO ipc.Server: IPC Server handler 2 on 46626:
exiting
[junit] 11/10/25 22:45:46 INFO datanode.DataBlockScanner: Exiting
DataBlockScanner thread.
[junit] 11/10/25 22:45:46 INFO datanode.DataNode:
DatanodeRegistration(127.0.0.1:37432,
storageID=DS-996349449-67.195.138.20-37432-1319582388946, infoPort=54275,
ipcPort=46626):Finishing DataNode in:
FSDataset{dirpath='<https://builds.apache.org/job/Pig-trunk/ws/trunk/build/test/data/dfs/data/data1/current,/home/jenkins/jenkins-slave/workspace/Pig-trunk/trunk/build/test/data/dfs/data/data2/current'}>
[junit] 11/10/25 22:45:46 INFO ipc.Server: Stopping server on 46626
[junit] 11/10/25 22:45:46 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] 11/10/25 22:45:46 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:49732
to delete blk_5712939127206739837_1121 blk_-5478801490798109751_1124
blk_-8182242911326505577_1123 blk_-2463948357799343838_1122
blk_-4832026471964570125_1126 blk_1527691758239730467_1127
[junit] 11/10/25 22:45:46 INFO hdfs.StateChange: BLOCK* ask 127.0.0.1:37432
to delete blk_5712939127206739837_1121 blk_-5478801490798109751_1124
blk_-2463948357799343838_1122 blk_1527691758239730467_1127
[junit] 11/10/25 22:45:47 INFO datanode.DataNode: Waiting for threadgroup
to exit, active threads is 0
[junit] 2011-10-25 22:45:47.004:INFO::Stopped
SelectChannelConnector@localhost:0
[junit] 11/10/25 22:45:47 WARN namenode.FSNamesystem: ReplicationMonitor
thread received InterruptedException.java.lang.InterruptedException: sleep
interrupted
[junit] 11/10/25 22:45:47 INFO namenode.FSNamesystem: Number of
transactions: 694 Total time for transactions(ms): 10Number of transactions
batched in Syncs: 113 Number of syncs: 485 SyncTimes(ms): 6345 283
[junit] 11/10/25 22:45:47 INFO namenode.DecommissionManager: Interrupted
Monitor
[junit] java.lang.InterruptedException: sleep interrupted
[junit] at java.lang.Thread.sleep(Native Method)
[junit] at
org.apache.hadoop.hdfs.server.namenode.DecommissionManager$Monitor.run(DecommissionManager.java:65)
[junit] at java.lang.Thread.run(Thread.java:662)
[junit] 11/10/25 22:45:47 INFO ipc.Server: Stopping server on 48809
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 1 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 0 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 2 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 3 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 4 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 5 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 6 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 7 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 8 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: IPC Server handler 9 on 48809:
exiting
[junit] 11/10/25 22:45:47 INFO ipc.Server: Stopping IPC Server listener on
48809
[junit] 11/10/25 22:45:47 INFO ipc.Server: Stopping IPC Server Responder
[junit] Tests run: 17, Failures: 0, Errors: 0, Time elapsed: 355.283 sec
[junit] Running org.apache.pig.test.TestStringUDFs
[junit] 11/10/25 22:45:47 WARN builtin.SUBSTRING: No logger object provided
to UDF: org.apache.pig.builtin.SUBSTRING. java.lang.NullPointerException
[junit] 11/10/25 22:45:47 WARN builtin.SUBSTRING: No logger object provided
to UDF: org.apache.pig.builtin.SUBSTRING.
java.lang.StringIndexOutOfBoundsException: String index out of range: -2
[junit] 11/10/25 22:45:47 WARN builtin.SUBSTRING: No logger object provided
to UDF: org.apache.pig.builtin.SUBSTRING.
java.lang.StringIndexOutOfBoundsException: String index out of range: -1
[junit] 11/10/25 22:45:47 WARN builtin.SUBSTRING: No logger object provided
to UDF: org.apache.pig.builtin.SUBSTRING.
java.lang.StringIndexOutOfBoundsException: String index out of range: -8
[junit] 11/10/25 22:45:47 WARN builtin.SUBSTRING: No logger object provided
to UDF: org.apache.pig.builtin.SUBSTRING.
java.lang.StringIndexOutOfBoundsException: String index out of range: -2
[junit] 11/10/25 22:45:47 WARN builtin.INDEXOF: No logger object provided
to UDF: org.apache.pig.builtin.INDEXOF. Failed to process input; error - null
[junit] 11/10/25 22:45:47 WARN builtin.LAST_INDEX_OF: No logger object
provided to UDF: org.apache.pig.builtin.LAST_INDEX_OF. Failed to process input;
error - null
[junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 0.095 sec
[delete] Deleting directory /tmp/pig_junit_tmp511233859
BUILD FAILED
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:675: The following
error occurred while executing this line:
<https://builds.apache.org/job/Pig-trunk/ws/trunk/build.xml>:731: Tests failed!
Total time: 21 minutes 39 seconds
Build step 'Execute shell' marked build as failure
[FINDBUGS] Skipping publisher since build result is FAILURE
Recording test results
Publishing Javadoc
Archiving artifacts
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure