[ https://issues.apache.org/jira/browse/HDFS-15628?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Kihwal Lee updated HDFS-15628: ------------------------------ Fix Version/s: 3.2.3 > HttpFS server throws NPE if a file is a symlink > ----------------------------------------------- > > Key: HDFS-15628 > URL: https://issues.apache.org/jira/browse/HDFS-15628 > Project: Hadoop HDFS > Issue Type: Bug > Components: fs, httpfs > Reporter: Ahmed Hussein > Assignee: Ahmed Hussein > Priority: Major > Fix For: 3.3.1, 3.4.0, 3.2.3 > > Attachments: HDFS-15628.001.patch, HDFS-15628.002.patch > > > If a directory containing a symlink is listed, the client (WebHfdsFileSystem) > blows up with a NPE. If {{type}} is {{SYMLINK}}, there must be {{symlink}} > field whose value is the link target string. HttpFS returns a response > without {{symlink}} filed. {{WebHfdsFileSystem}} assumes it is there for a > symlink and blindly tries to parse it, causing NPE. > This is not an issue if the destination cluster does not have symlinks > enabled. > > {code:bash} > java.io.IOException: localhost:55901: Response decoding failure: > java.lang.NullPointerException > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$FsPathResponseRunner.getResponse(WebHdfsFileSystem.java:967) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:816) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:638) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:676) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1899) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:672) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem.listStatus(WebHdfsFileSystem.java:1731) > at > org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.testListSymLinkStatus(BaseTestHttpFSWith.java:388) > at > org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.operation(BaseTestHttpFSWith.java:1230) > at > org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.testOperation(BaseTestHttpFSWith.java:1363) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at > org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50) > at > org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) > at > org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47) > at > org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) > at > org.apache.hadoop.test.TestHdfsHelper$HdfsStatement.evaluate(TestHdfsHelper.java:95) > at > org.apache.hadoop.test.TestDirHelper$1.evaluate(TestDirHelper.java:106) > at > org.apache.hadoop.test.TestExceptionHelper$1.evaluate(TestExceptionHelper.java:42) > at > org.apache.hadoop.test.TestJettyHelper$1.evaluate(TestJettyHelper.java:74) > at > org.apache.hadoop.test.TestDirHelper$1.evaluate(TestDirHelper.java:106) > at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325) > at > org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78) > at > org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57) > at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) > at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) > at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) > at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) > at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) > at org.junit.runners.ParentRunner.run(ParentRunner.java:363) > at org.junit.runners.Suite.runChild(Suite.java:128) > at org.junit.runners.Suite.runChild(Suite.java:27) > at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290) > at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71) > at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288) > at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58) > at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268) > at org.junit.runners.ParentRunner.run(ParentRunner.java:363) > at org.junit.runner.JUnitCore.run(JUnitCore.java:137) > at > com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:69) > at > com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33) > at > com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:220) > at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:53) > Caused by: java.lang.NullPointerException > at > org.apache.hadoop.hdfs.DFSUtilClient.string2Bytes(DFSUtilClient.java:116) > at > org.apache.hadoop.hdfs.web.JsonUtilClient.toFileStatus(JsonUtilClient.java:131) > at > org.apache.hadoop.hdfs.web.JsonUtilClient.toHdfsFileStatusArray(JsonUtilClient.java:215) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$15.decodeResponse(WebHdfsFileSystem.java:1723) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$15.decodeResponse(WebHdfsFileSystem.java:1719) > at > org.apache.hadoop.hdfs.web.WebHdfsFileSystem$FsPathResponseRunner.getResponse(WebHdfsFileSystem.java:962) > ... 46 more > {code} > -- This message was sent by Atlassian Jira (v8.3.4#803005) --------------------------------------------------------------------- To unsubscribe, e-mail: hdfs-issues-unsubscr...@hadoop.apache.org For additional commands, e-mail: hdfs-issues-h...@hadoop.apache.org