[ 
https://issues.apache.org/jira/browse/FLINK-29190?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

zck closed FLINK-29190.
-----------------------
    Fix Version/s: 1.15.2
     Release Note: https://issues.apache.org/jira/browse/FLINK-24761
       Resolution: Fixed

> flink hive left join mysql error
> --------------------------------
>
>                 Key: FLINK-29190
>                 URL: https://issues.apache.org/jira/browse/FLINK-29190
>             Project: Flink
>          Issue Type: Bug
>          Components: Table SQL / Runtime
>    Affects Versions: 1.14.0
>            Reporter: zck
>            Priority: Major
>             Fix For: 1.15.2
>
>
> If I remove the custom function, it is working fine.
> select * from hive_table left join mysql_table on hive_table.id=mysql_table 
> .id where
> mysql_table .name='kcz' and hive_table.dt=date_sub(CURRENT_DATE, 1);
>  
> date_sub is a user function.
> public class DateSubUDF extends ScalarFunction {
> public String eval(LocalDate date, int day) {
> return DateUtils.getStringByLocalDate(date.minusDays(day));
> }
> }
>  
> CREATE TABLE if not exists car_info
> (
> vin STRING,
> battery_factory STRING
> ) WITH (
> 'connector' = 'jdbc',
> 'url' = 'url',
> 'table-name' = 'car_info',
> 'username' = 'name',
> 'password' = 'password'
> );
> hive table does not use flink's table building syntax, using the hive native 
> table.
>  
> this is a error log.
>  
>  
> org.apache.flink.client.program.ProgramInvocationException: The main method 
> caused an error: Could not instantiate generated class 'PartitionPruner$9'
>     at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:372)
>     at 
> org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:222)
>     at 
> org.apache.flink.client.ClientUtils.executeProgram(ClientUtils.java:114)
>     at 
> org.apache.flink.client.cli.CliFrontend.executeProgram(CliFrontend.java:812)
>     at org.apache.flink.client.cli.CliFrontend.run(CliFrontend.java:246)
>     at 
> org.apache.flink.client.cli.CliFrontend.parseAndRun(CliFrontend.java:1054)
>     at 
> org.apache.flink.client.cli.CliFrontend.lambda$main$10(CliFrontend.java:1132)
>     at java.security.AccessController.doPrivileged(Native Method)
>     at javax.security.auth.Subject.doAs(Subject.java:422)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1761)
>     at 
> org.apache.flink.runtime.security.contexts.HadoopSecurityContext.runSecured(HadoopSecurityContext.java:41)
>     at org.apache.flink.client.cli.CliFrontend.main(CliFrontend.java:1132)
> Caused by: java.lang.RuntimeException: Could not instantiate generated class 
> 'PartitionPruner$9'
>     at 
> org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:75)
>     at 
> org.apache.flink.table.planner.plan.utils.PartitionPruner$.prunePartitions(PartitionPruner.scala:112)
>     at 
> org.apache.flink.table.planner.plan.utils.PartitionPruner.prunePartitions(PartitionPruner.scala)
>     at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRule.lambda$onMatch$3(PushPartitionIntoTableSourceScanRule.java:163)
>     at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRule.readPartitionFromCatalogWithoutFilterAndPrune(PushPartitionIntoTableSourceScanRule.java:373)
>     at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRule.readPartitionFromCatalogAndPrune(PushPartitionIntoTableSourceScanRule.java:351)
>     at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRule.readPartitionsAndPrune(PushPartitionIntoTableSourceScanRule.java:303)
>     at 
> org.apache.flink.table.planner.plan.rules.logical.PushPartitionIntoTableSourceScanRule.onMatch(PushPartitionIntoTableSourceScanRule.java:171)
>     at 
> org.apache.calcite.plan.AbstractRelOptPlanner.fireRule(AbstractRelOptPlanner.java:333)
>     at org.apache.calcite.plan.hep.HepPlanner.applyRule(HepPlanner.java:542)
>     at org.apache.calcite.plan.hep.HepPlanner.applyRules(HepPlanner.java:407)
>     at 
> org.apache.calcite.plan.hep.HepPlanner.executeInstruction(HepPlanner.java:243)
>     at 
> org.apache.calcite.plan.hep.HepInstruction$RuleInstance.execute(HepInstruction.java:127)
>     at 
> org.apache.calcite.plan.hep.HepPlanner.executeProgram(HepPlanner.java:202)
>     at org.apache.calcite.plan.hep.HepPlanner.findBestExp(HepPlanner.java:189)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkHepProgram.optimize(FlinkHepProgram.scala:69)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkHepRuleSetProgram.optimize(FlinkHepRuleSetProgram.scala:87)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:63)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1$$anonfun$apply$1.apply(FlinkGroupProgram.scala:60)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at scala.collection.Iterator$class.foreach(Iterator.scala:891)
>     at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
>     at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
>     at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
>     at 
> scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)
>     at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:60)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram$$anonfun$optimize$1.apply(FlinkGroupProgram.scala:55)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at scala.collection.immutable.Range.foreach(Range.scala:160)
>     at 
> scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)
>     at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkGroupProgram.optimize(FlinkGroupProgram.scala:55)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:62)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram$$anonfun$optimize$1.apply(FlinkChainedProgram.scala:58)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at 
> scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
>     at scala.collection.Iterator$class.foreach(Iterator.scala:891)
>     at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
>     at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
>     at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
>     at 
> scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)
>     at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)
>     at 
> org.apache.flink.table.planner.plan.optimize.program.FlinkChainedProgram.optimize(FlinkChainedProgram.scala:57)
>     at 
> org.apache.flink.table.planner.plan.optimize.StreamCommonSubGraphBasedOptimizer.optimizeTree(StreamCommonSubGraphBasedOptimizer.scala:163)
>     at 
> org.apache.flink.table.planner.plan.optimize.StreamCommonSubGraphBasedOptimizer.doOptimize(StreamCommonSubGraphBasedOptimizer.scala:77)
>     at 
> org.apache.flink.table.planner.plan.optimize.CommonSubGraphBasedOptimizer.optimize(CommonSubGraphBasedOptimizer.scala:77)
>     at 
> org.apache.flink.table.planner.delegation.PlannerBase.optimize(PlannerBase.scala:300)
>     at 
> org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:183)
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1665)
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeQueryOperation(TableEnvironmentImpl.java:805)
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeInternal(TableEnvironmentImpl.java:1274)
>     at 
> org.apache.flink.table.api.internal.TableEnvironmentImpl.executeSql(TableEnvironmentImpl.java:742)
>     at com.hycan.bigdata.utils.SqlUtils.callCommand(SqlUtils.java:49)
>     at com.hycan.bigdata.job.SchemaJob.main(SchemaJob.java:94)
>     at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
>     at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
>     at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>     at java.lang.reflect.Method.invoke(Method.java:498)
>     at 
> org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:355)
>     ... 11 more
> Caused by: org.apache.flink.util.FlinkRuntimeException: 
> org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
> compiled. This is a bug. Please file an issue.
>     at 
> org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:76)
>     at 
> org.apache.flink.table.runtime.generated.GeneratedClass.compile(GeneratedClass.java:102)
>     at 
> org.apache.flink.table.runtime.generated.GeneratedClass.newInstance(GeneratedClass.java:69)
>     ... 72 more
> Caused by: 
> org.apache.flink.shaded.guava30.com.google.common.util.concurrent.UncheckedExecutionException:
>  org.apache.flink.api.common.InvalidProgramException: Table program cannot be 
> compiled. This is a bug. Please file an issue.
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2051)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache.get(LocalCache.java:3962)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4859)
>     at 
> org.apache.flink.table.runtime.generated.CompileUtils.compile(CompileUtils.java:74)
>     ... 74 more
> Caused by: org.apache.flink.api.common.InvalidProgramException: Table program 
> cannot be compiled. This is a bug. Please file an issue.
>     at 
> org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:89)
>     at 
> org.apache.flink.table.runtime.generated.CompileUtils.lambda$compile$1(CompileUtils.java:74)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4864)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3529)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2278)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2155)
>     at 
> org.apache.flink.shaded.guava30.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2045)
>     ... 77 more
> Caused by: org.codehaus.commons.compiler.CompileException: Line 11, Column 
> 30: Cannot determine simple type name "com"
>     at org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:12211)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6833)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6594)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6607)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6607)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6607)
>     at 
> org.codehaus.janino.UnitCompiler.getReferenceType(UnitCompiler.java:6607)
>     at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:6573)
>     at org.codehaus.janino.UnitCompiler.access$13900(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$22$1.visitReferenceType(UnitCompiler.java:6481)
>     at 
> org.codehaus.janino.UnitCompiler$22$1.visitReferenceType(UnitCompiler.java:6476)
>     at org.codehaus.janino.Java$ReferenceType.accept(Java.java:3928)
>     at org.codehaus.janino.UnitCompiler$22.visitType(UnitCompiler.java:6476)
>     at org.codehaus.janino.UnitCompiler$22.visitType(UnitCompiler.java:6469)
>     at org.codehaus.janino.Java$ReferenceType.accept(Java.java:3927)
>     at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6469)
>     at org.codehaus.janino.UnitCompiler.access$1300(UnitCompiler.java:215)
>     at org.codehaus.janino.UnitCompiler$25.getType(UnitCompiler.java:8271)
>     at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:6873)
>     at org.codehaus.janino.UnitCompiler.access$14400(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitFieldAccess(UnitCompiler.java:6499)
>     at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitFieldAccess(UnitCompiler.java:6494)
>     at org.codehaus.janino.Java$FieldAccess.accept(Java.java:4310)
>     at 
> org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6494)
>     at 
> org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6490)
>     at org.codehaus.janino.Java$Lvalue.accept(Java.java:4148)
>     at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6490)
>     at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6469)
>     at org.codehaus.janino.Java$Rvalue.accept(Java.java:4116)
>     at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6469)
>     at org.codehaus.janino.UnitCompiler.getType2(UnitCompiler.java:6855)
>     at org.codehaus.janino.UnitCompiler.access$14200(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitAmbiguousName(UnitCompiler.java:6497)
>     at 
> org.codehaus.janino.UnitCompiler$22$2$1.visitAmbiguousName(UnitCompiler.java:6494)
>     at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:4224)
>     at 
> org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6494)
>     at 
> org.codehaus.janino.UnitCompiler$22$2.visitLvalue(UnitCompiler.java:6490)
>     at org.codehaus.janino.Java$Lvalue.accept(Java.java:4148)
>     at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6490)
>     at org.codehaus.janino.UnitCompiler$22.visitRvalue(UnitCompiler.java:6469)
>     at org.codehaus.janino.Java$Rvalue.accept(Java.java:4116)
>     at org.codehaus.janino.UnitCompiler.getType(UnitCompiler.java:6469)
>     at org.codehaus.janino.UnitCompiler.findIMethod(UnitCompiler.java:9026)
>     at org.codehaus.janino.UnitCompiler.compileGet2(UnitCompiler.java:5062)
>     at org.codehaus.janino.UnitCompiler.access$9100(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$16.visitMethodInvocation(UnitCompiler.java:4423)
>     at 
> org.codehaus.janino.UnitCompiler$16.visitMethodInvocation(UnitCompiler.java:4396)
>     at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:5073)
>     at org.codehaus.janino.UnitCompiler.compileGet(UnitCompiler.java:4396)
>     at 
> org.codehaus.janino.UnitCompiler.compileGetValue(UnitCompiler.java:5662)
>     at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:3783)
>     at org.codehaus.janino.UnitCompiler.access$5900(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$13.visitMethodInvocation(UnitCompiler.java:3762)
>     at 
> org.codehaus.janino.UnitCompiler$13.visitMethodInvocation(UnitCompiler.java:3734)
>     at org.codehaus.janino.Java$MethodInvocation.accept(Java.java:5073)
>     at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3734)
>     at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:2360)
>     at org.codehaus.janino.UnitCompiler.access$1800(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1494)
>     at 
> org.codehaus.janino.UnitCompiler$6.visitExpressionStatement(UnitCompiler.java:1487)
>     at org.codehaus.janino.Java$ExpressionStatement.accept(Java.java:2874)
>     at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1487)
>     at 
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1567)
>     at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3388)
>     at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1357)
>     at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1330)
>     at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:822)
>     at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:432)
>     at org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:215)
>     at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:411)
>     at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:406)
>     at 
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1414)
>     at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:406)
>     at org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:378)
>     at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:237)
>     at 
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:465)
>     at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:216)
>     at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:207)
>     at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80)
>     at org.codehaus.commons.compiler.Cookable.cook(Cookable.java:75)
>     at 
> org.apache.flink.table.runtime.generated.CompileUtils.doCompile(CompileUtils.java:86)
>     ... 83 more



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to