[ 
https://issues.apache.org/jira/browse/HADOOP-18219?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Shilun Fan updated HADOOP-18219:
--------------------------------
    Component/s: test

> Fix shadedclient test failure
> -----------------------------
>
>                 Key: HADOOP-18219
>                 URL: https://issues.apache.org/jira/browse/HADOOP-18219
>             Project: Hadoop Common
>          Issue Type: Bug
>          Components: test
>    Affects Versions: 3.4.0
>         Environment: Debian 10
>            Reporter: Gautham Banasandra
>            Assignee: Akira Ajisaka
>            Priority: Blocker
>              Labels: pull-request-available
>             Fix For: 3.4.0
>
>          Time Spent: 2h 10m
>  Remaining Estimate: 0h
>
> Two of the shaded client tests are failing on Debian 10 ever since this 
> commit - 
> https://github.com/apache/hadoop/commit/63187083cc3b9bb1c1e90e692e271958561f9cc8.
>  The failures are as follows -
> 1st test failure -
> {code}
> [INFO] Running org.apache.hadoop.example.ITUseMiniCluster
> [ERROR] Tests run: 2, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 
> 18.315 s <<< FAILURE! - in org.apache.hadoop.example.ITUseMiniCluster
> [ERROR] useWebHDFS(org.apache.hadoop.example.ITUseMiniCluster)  Time elapsed: 
> 12.048 s  <<< ERROR!
> org.apache.hadoop.yarn.exceptions.YarnRuntimeException: 
> org.apache.hadoop.yarn.webapp.WebAppException: Error starting http server
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.startResourceManager(MiniYARNCluster.java:384)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.access$300(MiniYARNCluster.java:129)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster$ResourceManagerWrapper.serviceStart(MiniYARNCluster.java:500)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:123)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.serviceStart(MiniYARNCluster.java:333)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.example.ITUseMiniCluster.clusterUp(ITUseMiniCluster.java:84)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>       at 
> org.junit.internal.runners.statements.RunBefores.invokeMethod(RunBefores.java:33)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:384)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:345)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:126)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
> Caused by: org.apache.hadoop.yarn.webapp.WebAppException: Error starting http 
> server
>       at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:479)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.startWepApp(ResourceManager.java:1443)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.serviceStart(ResourceManager.java:1552)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.startResourceManager(MiniYARNCluster.java:376)
>       ... 37 more
> Caused by: java.io.IOException: Unable to initialize WebAppContext
>       at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:1380)
>       at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:475)
>       ... 41 more
> Caused by: org.apache.hadoop.shaded.com.google.inject.ProvisionException: 
> Unable to provision, see the following errors:
> 1) Error injecting constructor, javax.xml.bind.JAXBException
>  - with linked exception:
> [java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory]
>   at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:54)
>   at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebApp.setup(RMWebApp.java:57)
>   while locating 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver
> 1 error
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InternalProvisionException.toProvisionException(InternalProvisionException.java:226)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl$1.get(InjectorImpl.java:1097)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl.getInstance(InjectorImpl.java:1131)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory$GuiceInstantiatedComponentProvider.getInstance(GuiceComponentProviderFactory.java:345)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory$ManagedSingleton.<init>(IoCProviderFactory.java:202)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory.wrap(IoCProviderFactory.java:123)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory._getComponentProvider(IoCProviderFactory.java:116)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderFactory.getComponentProvider(ProviderFactory.java:153)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderServices.getComponent(ProviderServices.java:278)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderServices.getProviders(ProviderServices.java:151)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.factory.ContextResolverFactory.init(ContextResolverFactory.java:83)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl._initiate(WebApplicationImpl.java:1332)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl.access$700(WebApplicationImpl.java:180)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:799)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:795)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl.initiate(WebApplicationImpl.java:795)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.guice.spi.container.servlet.GuiceContainer.initiate(GuiceContainer.java:121)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer$InternalWebComponent.initiate(ServletContainer.java:339)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.WebComponent.load(WebComponent.java:605)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.WebComponent.init(WebComponent.java:207)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:394)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:744)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.FilterDefinition.init(FilterDefinition.java:110)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.ManagedFilterPipeline.initPipeline(ManagedFilterPipeline.java:98)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.GuiceFilter.init(GuiceFilter.java:232)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.FilterHolder.initialize(FilterHolder.java:140)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:731)
>       at 
> java.base/java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948)
>       at 
> java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
>       at 
> java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:755)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:379)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.startWebapp(WebAppContext.java:1449)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1414)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:910)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:288)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:524)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:117)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:110)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.StatisticsHandler.doStart(StatisticsHandler.java:253)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.Server.start(Server.java:423)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:110)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.Server.doStart(Server.java:387)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:1343)
>       ... 42 more
> Caused by: javax.xml.bind.JAXBException
>  - with linked exception:
> [java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory]
>       at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:241)
>       at javax.xml.bind.ContextFinder.find(ContextFinder.java:477)
>       at javax.xml.bind.JAXBContext.newInstance(JAXBContext.java:656)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.api.json.JSONJAXBContext.<init>(JSONJAXBContext.java:255)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:122)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver$$FastClassByGuice$$6a7be7f6.newInstance(<generated>)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.DefaultConstructionProxyFactory$FastClassProxy.newInstance(DefaultConstructionProxyFactory.java:89)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorInjector.provision(ConstructorInjector.java:114)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorInjector.construct(ConstructorInjector.java:91)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorBindingImpl$Factory.get(ConstructorBindingImpl.java:306)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ProviderToInternalFactoryAdapter.get(ProviderToInternalFactoryAdapter.java:40)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.SingletonScope$1.get(SingletonScope.java:168)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InternalFactoryToProviderAdapter.get(InternalFactoryToProviderAdapter.java:39)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl$1.get(InjectorImpl.java:1094)
>       ... 95 more
> Caused by: java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory
>       at 
> java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:581)
>       at 
> java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:178)
>       at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppClassLoader.loadClass(WebAppClassLoader.java:538)
>       at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>       at javax.xml.bind.ContextFinder.safeLoadClass(ContextFinder.java:594)
>       at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:239)
>       ... 108 more
> {code}
> 2nd test failure -
> {code}
> [ERROR] useHdfsFileSystem(org.apache.hadoop.example.ITUseMiniCluster)  Time 
> elapsed: 6.202 s  <<< ERROR!
> org.apache.hadoop.yarn.exceptions.YarnRuntimeException: 
> org.apache.hadoop.yarn.webapp.WebAppException: Error starting http server
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.startResourceManager(MiniYARNCluster.java:384)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.access$300(MiniYARNCluster.java:129)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster$ResourceManagerWrapper.serviceStart(MiniYARNCluster.java:500)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.service.CompositeService.serviceStart(CompositeService.java:123)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.serviceStart(MiniYARNCluster.java:333)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.example.ITUseMiniCluster.clusterUp(ITUseMiniCluster.java:84)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>       at 
> java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>       at 
> java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.base/java.lang.reflect.Method.invoke(Method.java:566)
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>       at 
> org.junit.internal.runners.statements.RunBefores.invokeMethod(RunBefores.java:33)
>       at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24)
>       at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:384)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:345)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:126)
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:418)
> Caused by: org.apache.hadoop.yarn.webapp.WebAppException: Error starting http 
> server
>       at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:479)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.startWepApp(ResourceManager.java:1443)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.ResourceManager.serviceStart(ResourceManager.java:1552)
>       at 
> org.apache.hadoop.service.AbstractService.start(AbstractService.java:195)
>       at 
> org.apache.hadoop.yarn.server.MiniYARNCluster.startResourceManager(MiniYARNCluster.java:376)
>       ... 37 more
> Caused by: java.io.IOException: Unable to initialize WebAppContext
>       at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:1380)
>       at org.apache.hadoop.yarn.webapp.WebApps$Builder.start(WebApps.java:475)
>       ... 41 more
> Caused by: org.apache.hadoop.shaded.com.google.inject.ProvisionException: 
> Unable to provision, see the following errors:
> 1) Error injecting constructor, javax.xml.bind.JAXBException
>  - with linked exception:
> [java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory]
>   at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:54)
>   at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.RMWebApp.setup(RMWebApp.java:57)
>   while locating 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver
> 1 error
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InternalProvisionException.toProvisionException(InternalProvisionException.java:226)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl$1.get(InjectorImpl.java:1097)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl.getInstance(InjectorImpl.java:1131)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.guice.spi.container.GuiceComponentProviderFactory$GuiceInstantiatedComponentProvider.getInstance(GuiceComponentProviderFactory.java:345)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory$ManagedSingleton.<init>(IoCProviderFactory.java:202)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory.wrap(IoCProviderFactory.java:123)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ioc.IoCProviderFactory._getComponentProvider(IoCProviderFactory.java:116)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderFactory.getComponentProvider(ProviderFactory.java:153)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderServices.getComponent(ProviderServices.java:278)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.component.ProviderServices.getProviders(ProviderServices.java:151)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.core.spi.factory.ContextResolverFactory.init(ContextResolverFactory.java:83)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl._initiate(WebApplicationImpl.java:1332)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl.access$700(WebApplicationImpl.java:180)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:799)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl$13.f(WebApplicationImpl.java:795)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.server.impl.application.WebApplicationImpl.initiate(WebApplicationImpl.java:795)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.guice.spi.container.servlet.GuiceContainer.initiate(GuiceContainer.java:121)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer$InternalWebComponent.initiate(ServletContainer.java:339)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.WebComponent.load(WebComponent.java:605)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.WebComponent.init(WebComponent.java:207)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:394)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.spi.container.servlet.ServletContainer.init(ServletContainer.java:744)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.FilterDefinition.init(FilterDefinition.java:110)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.ManagedFilterPipeline.initPipeline(ManagedFilterPipeline.java:98)
>       at 
> org.apache.hadoop.shaded.com.google.inject.servlet.GuiceFilter.init(GuiceFilter.java:232)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.FilterHolder.initialize(FilterHolder.java:140)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletHandler.lambda$initialize$0(ServletHandler.java:731)
>       at 
> java.base/java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:948)
>       at 
> java.base/java.util.stream.Streams$ConcatSpliterator.forEachRemaining(Streams.java:734)
>       at 
> java.base/java.util.stream.ReferencePipeline$Head.forEach(ReferencePipeline.java:658)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletHandler.initialize(ServletHandler.java:755)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletContextHandler.startContext(ServletContextHandler.java:379)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.startWebapp(WebAppContext.java:1449)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.startContext(WebAppContext.java:1414)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.ContextHandler.doStart(ContextHandler.java:910)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.servlet.ServletContextHandler.doStart(ServletContextHandler.java:288)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppContext.doStart(WebAppContext.java:524)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:117)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:110)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.StatisticsHandler.doStart(StatisticsHandler.java:253)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.start(ContainerLifeCycle.java:169)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.Server.start(Server.java:423)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.ContainerLifeCycle.doStart(ContainerLifeCycle.java:110)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.handler.AbstractHandler.doStart(AbstractHandler.java:97)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.server.Server.doStart(Server.java:387)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.util.component.AbstractLifeCycle.start(AbstractLifeCycle.java:73)
>       at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:1343)
>       ... 42 more
> Caused by: javax.xml.bind.JAXBException
>  - with linked exception:
> [java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory]
>       at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:241)
>       at javax.xml.bind.ContextFinder.find(ContextFinder.java:477)
>       at javax.xml.bind.JAXBContext.newInstance(JAXBContext.java:656)
>       at 
> org.apache.hadoop.shaded.com.sun.jersey.api.json.JSONJAXBContext.<init>(JSONJAXBContext.java:255)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver.<init>(JAXBContextResolver.java:122)
>       at 
> org.apache.hadoop.yarn.server.resourcemanager.webapp.JAXBContextResolver$$FastClassByGuice$$6a7be7f6.newInstance(<generated>)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.DefaultConstructionProxyFactory$FastClassProxy.newInstance(DefaultConstructionProxyFactory.java:89)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorInjector.provision(ConstructorInjector.java:114)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorInjector.construct(ConstructorInjector.java:91)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ConstructorBindingImpl$Factory.get(ConstructorBindingImpl.java:306)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.ProviderToInternalFactoryAdapter.get(ProviderToInternalFactoryAdapter.java:40)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.SingletonScope$1.get(SingletonScope.java:168)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InternalFactoryToProviderAdapter.get(InternalFactoryToProviderAdapter.java:39)
>       at 
> org.apache.hadoop.shaded.com.google.inject.internal.InjectorImpl$1.get(InjectorImpl.java:1094)
>       ... 95 more
> Caused by: java.lang.ClassNotFoundException: 
> com.sun.xml.internal.bind.v2.ContextFactory
>       at 
> java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:581)
>       at 
> java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:178)
>       at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>       at 
> org.apache.hadoop.shaded.org.eclipse.jetty.webapp.WebAppClassLoader.loadClass(WebAppClassLoader.java:538)
>       at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:522)
>       at javax.xml.bind.ContextFinder.safeLoadClass(ContextFinder.java:594)
>       at javax.xml.bind.ContextFinder.newInstance(ContextFinder.java:239)
>       ... 108 more
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-issues-h...@hadoop.apache.org

Reply via email to