[ 
https://issues.apache.org/jira/browse/FLINK-19843?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17223305#comment-17223305
 ] 

Dian Fu commented on FLINK-19843:
---------------------------------

https://dev.azure.com/apache-flink/apache-flink/_build/results?buildId=8626&view=logs&j=ba53eb01-1462-56a3-8e98-0dd97fbcaab5&t=bfbc6239-57a0-5db0-63f3-41551b4f7d51

> ParquetFsStreamingSinkITCase.testPart failed with "Trying to access closed 
> classloader"
> ---------------------------------------------------------------------------------------
>
>                 Key: FLINK-19843
>                 URL: https://issues.apache.org/jira/browse/FLINK-19843
>             Project: Flink
>          Issue Type: Bug
>          Components: Formats (JSON, Avro, Parquet, ORC, SequenceFile)
>    Affects Versions: 1.12.0
>            Reporter: Dian Fu
>            Priority: Major
>              Labels: test-stability
>
> https://dev.azure.com/apache-flink/apache-flink/_build/results?buildId=8431&view=logs&j=ba53eb01-1462-56a3-8e98-0dd97fbcaab5&t=bfbc6239-57a0-5db0-63f3-41551b4f7d51
> {code}
> 2020-10-27T22:51:46.7422561Z [ERROR] 
> testPart(org.apache.flink.formats.parquet.ParquetFsStreamingSinkITCase) Time 
> elapsed: 7.031 s <<< ERROR! 2020-10-27T22:51:46.7423062Z 
> java.lang.RuntimeException: Failed to fetch next result 
> 2020-10-27T22:51:46.7425294Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultIterator.nextResultFromFetcher(CollectResultIterator.java:106)
>  2020-10-27T22:51:46.7426708Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultIterator.hasNext(CollectResultIterator.java:77)
>  2020-10-27T22:51:46.7427791Z at 
> org.apache.flink.table.planner.sinks.SelectTableSinkBase$RowIteratorWrapper.hasNext(SelectTableSinkBase.java:115)
>  2020-10-27T22:51:46.7428869Z at 
> org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.hasNext(TableResultImpl.java:355)
>  2020-10-27T22:51:46.7429957Z at 
> java.util.Iterator.forEachRemaining(Iterator.java:115) 
> 2020-10-27T22:51:46.7430652Z at 
> org.apache.flink.util.CollectionUtil.iteratorToList(CollectionUtil.java:114) 
> 2020-10-27T22:51:46.7431826Z at 
> org.apache.flink.table.planner.runtime.stream.FsStreamingSinkITCaseBase.check(FsStreamingSinkITCaseBase.scala:141)
>  2020-10-27T22:51:46.7432859Z at 
> org.apache.flink.table.planner.runtime.stream.FsStreamingSinkITCaseBase.test(FsStreamingSinkITCaseBase.scala:122)
>  2020-10-27T22:51:46.7433902Z at 
> org.apache.flink.table.planner.runtime.stream.FsStreamingSinkITCaseBase.testPart(FsStreamingSinkITCaseBase.scala:86)
>  2020-10-27T22:51:46.7434702Z at 
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> 2020-10-27T22:51:46.7435452Z at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> 2020-10-27T22:51:46.7436661Z at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  2020-10-27T22:51:46.7437367Z at 
> java.lang.reflect.Method.invoke(Method.java:498) 2020-10-27T22:51:46.7438119Z 
> at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
>  2020-10-27T22:51:46.7438966Z at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  2020-10-27T22:51:46.7439789Z at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
>  2020-10-27T22:51:46.7440666Z at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  2020-10-27T22:51:46.7441740Z at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) 
> 2020-10-27T22:51:46.7442533Z at 
> org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) 
> 2020-10-27T22:51:46.7443290Z at 
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:298)
>  2020-10-27T22:51:46.7444227Z at 
> org.junit.internal.runners.statements.FailOnTimeout$CallableStatement.call(FailOnTimeout.java:292)
>  2020-10-27T22:51:46.7445043Z at 
> java.util.concurrent.FutureTask.run(FutureTask.java:266) 
> 2020-10-27T22:51:46.7445631Z at java.lang.Thread.run(Thread.java:748) 
> 2020-10-27T22:51:46.7446383Z Caused by: java.io.IOException: Failed to fetch 
> job execution result 2020-10-27T22:51:46.7447239Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.getAccumulatorResults(CollectResultFetcher.java:175)
>  2020-10-27T22:51:46.7448233Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.next(CollectResultFetcher.java:126)
>  2020-10-27T22:51:46.7449239Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultIterator.nextResultFromFetcher(CollectResultIterator.java:103)
>  2020-10-27T22:51:46.7449963Z ... 22 more 2020-10-27T22:51:46.7450619Z Caused 
> by: java.util.concurrent.ExecutionException: 
> org.apache.flink.runtime.client.JobExecutionException: Job execution failed. 
> 2020-10-27T22:51:46.7451795Z at 
> java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) 
> 2020-10-27T22:51:46.7452573Z at 
> java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1928) 
> 2020-10-27T22:51:46.7453500Z at 
> org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.getAccumulatorResults(CollectResultFetcher.java:172)
>  2020-10-27T22:51:46.7454213Z ... 24 more 2020-10-27T22:51:46.7454773Z Caused 
> by: org.apache.flink.runtime.client.JobExecutionException: Job execution 
> failed. 2020-10-27T22:51:46.7455573Z at 
> org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:147)
>  2020-10-27T22:51:46.7456621Z at 
> org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$2(MiniClusterJobClient.java:119)
>  2020-10-27T22:51:46.7457526Z at 
> java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:616) 
> 2020-10-27T22:51:46.7458304Z at 
> java.util.concurrent.CompletableFuture.uniApplyStage(CompletableFuture.java:628)
>  2020-10-27T22:51:46.7459124Z at 
> java.util.concurrent.CompletableFuture.thenApply(CompletableFuture.java:1996) 
> 2020-10-27T22:51:46.7460037Z at 
> org.apache.flink.runtime.minicluster.MiniClusterJobClient.getJobExecutionResult(MiniClusterJobClient.java:117)
>  2020-10-27T22:51:46.7461155Z ... 25 more 2020-10-27T22:51:46.7461778Z Caused 
> by: org.apache.flink.runtime.JobException: Recovery is suppressed by 
> NoRestartBackoffTimeStrategy 2020-10-27T22:51:46.7462882Z at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:116)
>  2020-10-27T22:51:46.7464036Z at 
> org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:78)
>  2020-10-27T22:51:46.7465069Z at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:218)
>  2020-10-27T22:51:46.7466097Z at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:211)
>  2020-10-27T22:51:46.7467159Z at 
> org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:205)
>  2020-10-27T22:51:46.7468108Z at 
> org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:523)
>  2020-10-27T22:51:46.7468999Z at 
> org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:419)
>  2020-10-27T22:51:46.7469731Z at 
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> 2020-10-27T22:51:46.7470414Z at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> 2020-10-27T22:51:46.7471424Z at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  2020-10-27T22:51:46.7472158Z at 
> java.lang.reflect.Method.invoke(Method.java:498) 2020-10-27T22:51:46.7472887Z 
> at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:286)
>  2020-10-27T22:51:46.7473757Z at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:201)
>  2020-10-27T22:51:46.7474636Z at 
> org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:74)
>  2020-10-27T22:51:46.7475487Z at 
> org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:154)
>  2020-10-27T22:51:46.7476376Z at 
> akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:26) 
> 2020-10-27T22:51:46.7477064Z at 
> akka.japi.pf.UnitCaseStatement.apply(CaseStatements.scala:21) 
> 2020-10-27T22:51:46.7477756Z at 
> scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123) 
> 2020-10-27T22:51:46.7478476Z at 
> akka.japi.pf.UnitCaseStatement.applyOrElse(CaseStatements.scala:21) 
> 2020-10-27T22:51:46.7479191Z at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170) 
> 2020-10-27T22:51:46.7479882Z at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) 
> 2020-10-27T22:51:46.7480591Z at 
> scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) 
> 2020-10-27T22:51:46.7481457Z at 
> akka.actor.Actor$class.aroundReceive(Actor.scala:517) 
> 2020-10-27T22:51:46.7482108Z at 
> akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225) 
> 2020-10-27T22:51:46.7482807Z at 
> akka.actor.ActorCell.receiveMessage(ActorCell.scala:592) 
> 2020-10-27T22:51:46.7483544Z at 
> akka.actor.ActorCell.invoke(ActorCell.scala:561) 2020-10-27T22:51:46.7484210Z 
> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258) 
> 2020-10-27T22:51:46.7484803Z at akka.dispatch.Mailbox.run(Mailbox.scala:225) 
> 2020-10-27T22:51:46.7485362Z at akka.dispatch.Mailbox.exec(Mailbox.scala:235) 
> 2020-10-27T22:51:46.7486135Z at 
> akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) 
> 2020-10-27T22:51:46.7486871Z at 
> akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) 
> 2020-10-27T22:51:46.7487561Z at 
> akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) 
> 2020-10-27T22:51:46.7488288Z at 
> akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
>  2020-10-27T22:51:46.7491259Z Caused by: java.lang.IllegalStateException: 
> Trying to access closed classloader. Please check if you store classloaders 
> directly or indirectly in static fields. If the stacktrace suggests that the 
> leak occurs in a third party library and cannot be fixed immediately, you can 
> disable this check with the configuration 
> 'classloader.check-leaked-classloader'. 2020-10-27T22:51:46.7492610Z at 
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:161)
>  2020-10-27T22:51:46.7493370Z at 
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:179)
>  2020-10-27T22:51:46.7493961Z at 
> org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2780) 
> 2020-10-27T22:51:46.7494430Z at 
> org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3036) 
> 2020-10-27T22:51:46.7494986Z at 
> org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2995) 
> 2020-10-27T22:51:46.7495434Z at 
> org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) 
> 2020-10-27T22:51:46.7495987Z at 
> org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) 
> 2020-10-27T22:51:46.7496428Z at 
> org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) 
> 2020-10-27T22:51:46.7496851Z at 
> org.apache.hadoop.conf.Configuration.getTrimmed(Configuration.java:1254) 
> 2020-10-27T22:51:46.7497298Z at 
> org.apache.hadoop.conf.Configuration.getInt(Configuration.java:1479) 
> 2020-10-27T22:51:46.7497762Z at 
> org.apache.hadoop.io.compress.GzipCodec.createInputStream(GzipCodec.java:182) 
> 2020-10-27T22:51:46.7498255Z at 
> org.apache.parquet.hadoop.CodecFactory$HeapBytesDecompressor.decompress(CodecFactory.java:109)
>  2020-10-27T22:51:46.7498838Z at 
> org.apache.parquet.hadoop.ColumnChunkPageReadStore$ColumnChunkPageReader$1.visit(ColumnChunkPageReadStore.java:103)
>  2020-10-27T22:51:46.7499449Z at 
> org.apache.parquet.hadoop.ColumnChunkPageReadStore$ColumnChunkPageReader$1.visit(ColumnChunkPageReadStore.java:99)
>  2020-10-27T22:51:46.7499964Z at 
> org.apache.parquet.column.page.DataPageV1.accept(DataPageV1.java:120) 
> 2020-10-27T22:51:46.7500494Z at 
> org.apache.parquet.hadoop.ColumnChunkPageReadStore$ColumnChunkPageReader.readPage(ColumnChunkPageReadStore.java:99)
>  2020-10-27T22:51:46.7501343Z at 
> org.apache.flink.formats.parquet.vector.reader.AbstractColumnReader.readToVector(AbstractColumnReader.java:171)
>  2020-10-27T22:51:46.7501958Z at 
> org.apache.flink.formats.parquet.vector.ParquetColumnarRowSplitReader.nextBatch(ParquetColumnarRowSplitReader.java:299)
>  2020-10-27T22:51:46.7502608Z at 
> org.apache.flink.formats.parquet.vector.ParquetColumnarRowSplitReader.ensureBatch(ParquetColumnarRowSplitReader.java:270)
>  2020-10-27T22:51:46.7503255Z at 
> org.apache.flink.formats.parquet.vector.ParquetColumnarRowSplitReader.reachedEnd(ParquetColumnarRowSplitReader.java:251)
>  2020-10-27T22:51:46.7503984Z at 
> org.apache.flink.formats.parquet.ParquetFileSystemFormatFactory$ParquetInputFormat.reachedEnd(ParquetFileSystemFormatFactory.java:198)
>  2020-10-27T22:51:46.7504643Z at 
> org.apache.flink.streaming.api.functions.source.InputFormatSourceFunction.run(InputFormatSourceFunction.java:90)
>  2020-10-27T22:51:46.7505201Z at 
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:100)
>  2020-10-27T22:51:46.7505681Z at 
> org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:63)
>  2020-10-27T22:51:46.7506287Z at 
> org.apache.flink.streaming.runtime.tasks.SourceStreamTask$LegacySourceFunctionThread.run(SourceStreamTask.java:213)
>  2020-10-27T22:51:46.7506648Z 2020-10-27T22:51:46.7507973Z Exception in 
> thread "Thread-14" java.lang.IllegalStateException: Trying to access closed 
> classloader. Please check if you store classloaders directly or indirectly in 
> static fields. If the stacktrace suggests that the leak occurs in a third 
> party library and cannot be fixed immediately, you can disable this check 
> with the configuration 'classloader.check-leaked-classloader'. 
> 2020-10-27T22:51:46.7509055Z at 
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.ensureInner(FlinkUserCodeClassLoaders.java:161)
>  2020-10-27T22:51:46.7509903Z at 
> org.apache.flink.runtime.execution.librarycache.FlinkUserCodeClassLoaders$SafetyNetWrapperClassLoader.getResource(FlinkUserCodeClassLoaders.java:179)
>  2020-10-27T22:51:46.7510510Z at 
> org.apache.hadoop.conf.Configuration.getResource(Configuration.java:2780) 
> 2020-10-27T22:51:46.7511109Z at 
> org.apache.hadoop.conf.Configuration.getStreamReader(Configuration.java:3036) 
> 2020-10-27T22:51:46.7511579Z at 
> org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:2995) 
> 2020-10-27T22:51:46.7512048Z at 
> org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:2968) 
> 2020-10-27T22:51:46.7512482Z at 
> org.apache.hadoop.conf.Configuration.getProps(Configuration.java:2848) 
> 2020-10-27T22:51:46.7512921Z at 
> org.apache.hadoop.conf.Configuration.get(Configuration.java:1200) 
> 2020-10-27T22:51:46.7513444Z at 
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1812) 
> 2020-10-27T22:51:46.7513895Z at 
> org.apache.hadoop.conf.Configuration.getTimeDuration(Configuration.java:1789) 
> 2020-10-27T22:51:46.7514390Z at 
> org.apache.hadoop.util.ShutdownHookManager.getShutdownTimeout(ShutdownHookManager.java:183)
>  2020-10-27T22:51:46.7514909Z at 
> org.apache.hadoop.util.ShutdownHookManager.shutdownExecutor(ShutdownHookManager.java:145)
>  2020-10-27T22:51:46.7515398Z at 
> org.apache.hadoop.util.ShutdownHookManager.access$300(ShutdownHookManager.java:65)
>  2020-10-27T22:51:46.7516021Z at 
> org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:102)
>  2020-10-27T22:51:47.4802370Z [INFO] Running 
> org.apache.flink.formats.parquet.ParquetTableSourceITCase
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to