????????:
mongosql ??sql: CREATE TABLE label ( distinct_id BIGINT, xwho String, sync5 decimal, sync4 decimal, sync6 string, syncea1 string, aa string, ceshi string, ttt string, tongji decimal, qweqwe array<string>, xingwei array<string>, rr array<string>, eee array<string>, gggg array<string>, riqigeshi_del string, ceshisync2 string, string_tag string, number_tag decimal, boolean_tag int, datetime_tag bigint, arr_tag array<string>, ds int ) WITH ( 'connector' = 'mongodb', 'database'='dev', 'collection'='labelSync', 'uri'='mongodb://admin:SM67q89izW4itH7%25@192.168.221.201:27017,192.168.221.202:27018,192.168.221.202:27019/dev', 'maxConnectionIdleTime'='20000', 'batchSize'='100' ) ????sql: INSERT INTO label select smarttag_base_table_3.distinct_id as distinct_id, smarttag_base_table_3.xwho as xwho, sync5,sync4,sync6, syncea1, aa,ceshi, ttt, tongji, qweqwe, xingwei, rr, eee, gggg, riqigeshi_del, ceshisync2,string_tag, number_tag, boolean_tag, datetime_tag, arr_tag,smarttag_base_table_3.ds as ds from smarttag_base_table_3 FULL JOIN smarttag_base_table_2 on smarttag_base_table_3.distinct_id=smarttag_base_table_2.distinct_id and smarttag_base_table_3.xwho=smarttag_base_table_2.xwho and smarttag_base_table_3.ds=smarttag_base_table_2.ds FULL JOIN smarttag_derived_table_4 on smarttag_base_table_2.distinct_id=smarttag_derived_table_4.distinct_id and smarttag_base_table_2.xwho=smarttag_derived_table_4.xwho and smarttag_base_table_2.ds=smarttag_derived_table_4.ds FULL JOIN smarttag_derived_table_1 on smarttag_derived_table_4.distinct_id=smarttag_derived_table_1.distinct_id and smarttag_derived_table_4.xwho=smarttag_derived_table_1.xwho and smarttag_derived_table_4.ds=smarttag_derived_table_1.ds FULL JOIN smarttag_base_table_5 on smarttag_derived_table_1.distinct_id=smarttag_base_table_5.distinct_id and smarttag_derived_table_1.xwho=smarttag_base_table_5.xwho and smarttag_derived_table_1.ds=smarttag_base_table_5.ds ????????????mongo??sqlsink,????????????join??????sinkmongo??????????????: org.apache.flink.table.api.TableException: Table sink 'default_catalog.default_database.label' doesn't support consuming update and delete changes which is produced by node Join(joinType=[FullOuterJoin], where=[((distinct_id2 = distinct_id0) AND (xwho2 = xwho0) AND (ds2 = ds0))], select=[distinct_id, xwho, sync5, ds, sync4, sync6, syncea1, aa, distinct_id2, xwho2, ceshi, ttt, tongji, qweqwe, xingwei, rr, eee, gggg, riqigeshi_del, ceshisync2, ds2, distinct_id0, xwho0, string_tag, number_tag, boolean_tag, datetime_tag, arr_tag, ds0], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey]) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.createNewNode(FlinkChangelogModeInferenceProgram.scala:389) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.visit(FlinkChangelogModeInferenceProgram.scala:267) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.org$apache$flink$table$planner$plan$optimize$program$FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$visitChild(FlinkChangelogModeInferenceProgram.scala:348) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$anonfun$3.apply(FlinkChangelogModeInferenceProgram.scala:337) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$anonfun$3.apply(FlinkChangelogModeInferenceProgram.scala:336) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.immutable.Range.foreach(Range.scala:160) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.visitChildren(FlinkChangelogModeInferenceProgram.scala:336) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.visit(FlinkChangelogModeInferenceProgram.scala:287) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.org$apache$flink$table$planner$plan$optimize$program$FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$visitChild(FlinkChangelogModeInferenceProgram.scala:348) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$anonfun$3.apply(FlinkChangelogModeInferenceProgram.scala:337) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor$$anonfun$3.apply(FlinkChangelogModeInferenceProgram.scala:336) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234) at scala.collection.immutable.Range.foreach(Range.scala:160) at scala.collection.TraversableLike$class.map(TraversableLike.scala:234) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.visitChildren(FlinkChangelogModeInferenceProgram.scala:336) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram$SatisfyModifyKindSetTraitVisitor.visit(FlinkChangelogModeInferenceProgram.scala:126) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram.optimize(FlinkChangelogModeInferenceProgram.scala:51) at org.apache.flink.table.planner.plan.optimize.program.FlinkChangelogModeInferenceProgram.optimize(FlinkChangelogModeInferenceProgram.scala:40) at org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:63) at org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:60) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:60) at org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:55) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.immutable.Range.foreach(Range.scala:160) at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram.optimize(FlinkGroupProgram.scala:55) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157) at scala.collection.Iterator$class.foreach(Iterator.scala:891) at scala.collection.AbstractIterator.foreach(Iterator.scala:1334) at scala.collection.IterableLike$class.foreach(IterableLike.scala:72) at scala.collection.AbstractIterable.foreach(Iterable.scala:54) at scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157) at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104) at org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57) at org.apache.flink.table.planner.plan.optimize.StreamCommonSubGraphBasedOptimizer.optimizeTree(StreamCommonSubGraphBasedOptimizer.scala:163) at org.apache.flink.table.planner.plan.optimize.StreamCommonSubGraphBasedOptimizer.doOptimize(StreamCommonSubGraphBasedOptimizer.scala:77) at org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:77) at org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:300) at org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:183) at org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1665) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:752) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:872) at org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:742) at TableAPI.envReadFileSysteam(TableAPI.java:496) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) at org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:54) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) at org.junit.runners.ParentRunner.run(ParentRunner.java:413) at org.junit.runner.JUnitCore.run(JUnitCore.java:137) at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:69) at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33) at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:221) at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:54) ?????????????????????? -1 ?????? ------------------ ???????? ------------------ ??????: "user-zh" <jingsongl...@gmail.com>; ????????: 2021??11??12??(??????) ????3:30 ??????: "user-zh"<user-zh@flink.apache.org>; ????: Re: Flinksql ????????full join ???????? Hi, ??????????????cherry-pick??1.12??????????????feature????1.14?????????????????? Best, Jingsong On Fri, Nov 12, 2021 at 3:06 PM ?????? <2572805...@qq.com.invalid> wrote: > > ???????? ??????????debug???????????????? > > > ??flink1.12.5??????flink-orc_2.11????????org/apache/flink/orc/vector/AbstractOrcColumnVector.java???? > ??createFlinkVector??????????ListColumnVector????????????????flink??master??????2021/5/12??wangwei1025??????pr??????????????????????????????????????1.12.5??????????????????wangwei1025???????????????????? > > > ??????: > > > > > string_tag string > > > > > number_tag number > > > > > boolean_tag boolean > > > > > datetime_tag datetime > > > > > arr_tag array<string&gt; > > ??????????????????????????????SQL ????????????array<string&gt;?????????????????????? > SQL:CREATE TABLE smarttag_base_table_5 ( > &nbsp;&nbsp; distinct_id BIGINT, > &nbsp; xwho VARCHAR, > string_tag string, > number_tag decimal, > boolean_tag integer, > datetime_tag bigint, > arr_tag ARRAY<STRING&gt;, > &nbsp;ds INTEGER > ) WITH ( > &nbsp; 'connector' = 'filesystem',&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; -- ????: ?????????????? > &nbsp; 'path' = 'hdfs://ark1:8020/tmp/usertag/20211029/db_31abd9593e9983ec/orcfile/smarttag_base_table_5/',&nbsp; -- ????: ?????????????? > &nbsp; 'format' = 'orc'&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; &nbsp; -- ????: ?????????????????????????????????? ?????? ?????????????????? > ) > > > > > > ????:Unsupport vector: org.apache.hadoop.hive.ql.exec.vector.ListColumnVector > ??????????array<string&gt;???????????????? ???? ??????????hive?????? > source??hdfs??????orc????&nbsp; > > ?????? > > > &nbsp; > > > > > ------------------ ???????? ------------------ > ??????: "user-zh" <tsreape...@gmail.com&gt;; > ????????:&nbsp;2021??11??12??(??????) ????10:59 > ??????:&nbsp;"flink??????????"<user-zh@flink.apache.org&gt;; > > ????:&nbsp;Re: Flinksql ????????full join ???????? > > > > Hi?? > > ???????????????????????????? join ?????????????? source ?????????????????????? source ???? > ddl???????????????????????????????????????????????????????????????????? source ???????????????????????????????? > > ?????? <2572805...@qq.com.invalid&gt; ??2021??11??11?????? ????9:44?????? > > &gt; ????:??????????full join???? > &gt; > &gt; > &gt; ????: > &gt; java.lang.RuntimeException: Failed to fetch next result > &gt; > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultIterator.nextResultFromFetcher(CollectResultIterator.java:109) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultIterator.hasNext(CollectResultIterator.java:80) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.table.planner.sinks.SelectTableSinkBase$RowIteratorWrapper.hasNext(SelectTableSinkBase.java:117) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.table.api.internal.TableResultImpl$CloseableRowIteratorWrapper.hasNext(TableResultImpl.java:350) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.table.utils.PrintUtils.printAsTableauForm(PrintUtils.java:149) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.table.api.internal.TableResultImpl.print(TableResultImpl.java:154) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at TableAPI.envReadFileSysteam(TableAPI.java:441) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native > &gt; Method) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at java.lang.reflect.Method.invoke(Method.java:498) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.rules.ExternalResource$1.evaluate(ExternalResource.java:54) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at org.junit.rules.RunRules.evaluate(RunRules.java:20) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.junit.runners.ParentRunner.run(ParentRunner.java:413) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at org.junit.runner.JUnitCore.run(JUnitCore.java:137) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:69) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:221) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:54) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at sun.reflect.NativeMethodAccessorImpl.invoke0(Native > &gt; Method) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at java.lang.reflect.Method.invoke(Method.java:498) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; com.intellij.rt.execution.CommandLineWrapper.main(CommandLineWrapper.java:64) > &gt; Caused by: java.io.IOException: Failed to fetch job execution result > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.getAccumulatorResults(CollectResultFetcher.java:169) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.next(CollectResultFetcher.java:118) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultIterator.nextResultFromFetcher(CollectResultIterator.java:106) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;... 39 more > &gt; Caused by: java.util.concurrent.ExecutionException: > &gt; org.apache.flink.runtime.client.JobExecutionException: Job execution failed. > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1915) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.getAccumulatorResults(CollectResultFetcher.java:167) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;... 41 more > &gt; Caused by: org.apache.flink.runtime.client.JobExecutionException: Job > &gt; execution failed. > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.jobmaster.JobResult.toJobExecutionResult(JobResult.java:144) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.minicluster.MiniClusterJobClient.lambda$getJobExecutionResult$2(MiniClusterJobClient.java:117) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.CompletableFuture.uniApply(CompletableFuture.java:602) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.CompletableFuture.uniApplyStage(CompletableFuture.java:614) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.CompletableFuture.thenApply(CompletableFuture.java:1983) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.minicluster.MiniClusterJobClient.getJobExecutionResult(MiniClusterJobClient.java:114) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.collect.CollectResultFetcher.getAccumulatorResults(CollectResultFetcher.java:166) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;... 41 more > &gt; Caused by: org.apache.flink.runtime.JobException: Recovery is suppressed > &gt; by NoRestartBackoffTimeStrategy > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.handleFailure(ExecutionFailureHandler.java:118) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.executiongraph.failover.flip1.ExecutionFailureHandler.getFailureHandlingResult(ExecutionFailureHandler.java:80) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.scheduler.DefaultScheduler.handleTaskFailure(DefaultScheduler.java:233) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.scheduler.DefaultScheduler.maybeHandleTaskFailure(DefaultScheduler.java:224) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.scheduler.DefaultScheduler.updateTaskExecutionStateInternal(DefaultScheduler.java:215) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.scheduler.SchedulerBase.updateTaskExecutionState(SchedulerBase.java:666) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.scheduler.SchedulerNG.updateTaskExecutionState(SchedulerNG.java:89) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.jobmaster.JobMaster.updateTaskExecutionState(JobMaster.java:446) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at sun.reflect.GeneratedMethodAccessor23.invoke(Unknown > &gt; Source) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at java.lang.reflect.Method.invoke(Method.java:498) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcInvocation(AkkaRpcActor.java:305) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleRpcMessage(AkkaRpcActor.java:212) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.rpc.akka.FencedAkkaRpcActor.handleRpcMessage(FencedAkkaRpcActor.java:77) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.rpc.akka.AkkaRpcActor.handleMessage(AkkaRpcActor.java:158) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.japi.pf > &gt; .UnitCaseStatement.apply(CaseStatements.scala:26) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.japi.pf > &gt; .UnitCaseStatement.apply(CaseStatements.scala:21) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; scala.PartialFunction$class.applyOrElse(PartialFunction.scala:123) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.japi.pf > &gt; .UnitCaseStatement.applyOrElse(CaseStatements.scala:21) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:170) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; scala.PartialFunction$OrElse.applyOrElse(PartialFunction.scala:171) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.actor.Actor$class.aroundReceive(Actor.scala:517) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.actor.AbstractActor.aroundReceive(AbstractActor.scala:225) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.actor.ActorCell.receiveMessage(ActorCell.scala:592) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.actor.ActorCell.invoke(ActorCell.scala:561) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.dispatch.Mailbox.processMailbox(Mailbox.scala:258) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.dispatch.Mailbox.run(Mailbox.scala:225) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at akka.dispatch.Mailbox.exec(Mailbox.scala:235) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.dispatch.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.dispatch.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.dispatch.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; akka.dispatch.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) > &gt; Caused by: java.lang.RuntimeException: One or more fetchers have > &gt; encountered exception > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.fetcher.SplitFetcherManager.checkErrors(SplitFetcherManager.java:199) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.SourceReaderBase.getNextFetch(SourceReaderBase.java:154) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.SourceReaderBase.pollNext(SourceReaderBase.java:116) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.api.operators.SourceOperator.emitNext(SourceOperator.java:275) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at org.apache.flink.streaming.runtime.io > &gt; .StreamTaskSourceInput.emitNext(StreamTaskSourceInput.java:67) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at org.apache.flink.streaming.runtime.io > &gt; .StreamOneInputProcessor.processInput(StreamOneInputProcessor.java:65) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.runtime.tasks.StreamTask.processInput(StreamTask.java:398) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.runMailboxLoop(MailboxProcessor.java:191) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.runtime.tasks.StreamTask.runMailboxLoop(StreamTask.java:619) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:583) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:758) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.runtime.taskmanager.Task.run(Task.java:573) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at java.lang.Thread.run(Thread.java:748) > &gt; Caused by: java.lang.RuntimeException: SplitFetcher thread 0 received > &gt; unexpected exception while polling the records > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:146) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.run(SplitFetcher.java:101) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.FutureTask.run(FutureTask.java:266) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;... 1 more > &gt; Caused by: java.lang.UnsupportedOperationException: Unsupport vector: > &gt; org.apache.hadoop.hive.ql.exec.vector.ListColumnVector > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.vector.AbstractOrcColumnVector.createFlinkVector(AbstractOrcColumnVector.java:73) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.OrcColumnarRowFileInputFormat.lambda$createPartitionedFormat$84717d21$1(OrcColumnarRowFileInputFormat.java:161) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.OrcColumnarRowFileInputFormat.createReaderBatch(OrcColumnarRowFileInputFormat.java:88) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.AbstractOrcFileInputFormat.createPoolOfBatches(AbstractOrcFileInputFormat.java:157) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.AbstractOrcFileInputFormat.createReader(AbstractOrcFileInputFormat.java:103) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.orc.AbstractOrcFileInputFormat.createReader(AbstractOrcFileInputFormat.java:52) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.file.src.impl.FileSourceSplitReader.checkSplitOrStartNext(FileSourceSplitReader.java:112) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.file.src.impl.FileSourceSplitReader.fetch(FileSourceSplitReader.java:65) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.fetcher.FetchTask.run(FetchTask.java:56) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;at > &gt; org.apache.flink.connector.base.source.reader.fetcher.SplitFetcher.runOnce(SplitFetcher.java:138) > &gt; &amp;nbsp;&amp;nbsp; &amp;nbsp;... 6 more > &gt; > &gt; 2021-11-11 > &gt; 21:30:32.431|INFO|org.apache.flink.runtime.blob.AbstractBlobCache|TransientBlobCache > &gt; shutdown hook|close|240|Shutting down BLOB cache > &gt; 2021-11-11 > &gt; 21:30:32.433|INFO|org.apache.flink.runtime.blob.AbstractBlobCache|PermanentBlobCache > &gt; shutdown hook|close|240|Shutting down BLOB cache > &gt; 2021-11-11 > &gt; 21:30:32.447|INFO|org.apache.flink.runtime.blob.BlobServer|BlobServer > &gt; shutdown hook|close|345|Stopped BLOB server at 0.0.0.0:60726 > &gt; > &gt; ?????????????????????? -1 > &gt; > &gt; > &gt; > &gt; > &gt; > &gt; > &gt; sql: > &gt; select * from smarttag_base_table_3 FULL JOIN smarttag_base_table_2 on > &gt; smarttag_base_table_3.distinct_id=smarttag_base_table_2.distinct_id > &gt; &amp;nbsp;FULL JOIN smarttag_derived_table_4 on > &gt; smarttag_base_table_2.distinct_id=smarttag_derived_table_4.distinct_id > &gt; &amp;nbsp;FULL JOIN smarttag_derived_table_1 on > &gt; smarttag_derived_table_4.distinct_id=smarttag_derived_table_1.distinct_id > &gt; &amp;nbsp;FULL JOIN smarttag_base_table_5 on > &gt; smarttag_derived_table_1.distinct_id=smarttag_base_table_5.distinct_id > &gt; > &gt; > &gt; > &gt; ?? > &gt; > &gt; > &gt; &amp;nbsp; -- Best, Jingsong Lee