[
https://issues.apache.org/jira/browse/FLINK-19398?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Dawid Wysakowicz updated FLINK-19398:
-------------------------------------
Comment: was deleted
(was: This not the only case when Hive does not work if submitted from the user
code. I had problems when trying to submit a query from sql-client with the
flink-sql-connector-hive jar passed with the {{-j}} option.
{code}
org.apache.flink.table.client.gateway.SqlExecutionException: Invalid SQL query.
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQueryInternal(LocalExecutor.java:527)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQuery(LocalExecutor.java:365)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.cli.CliClient.callSelect(CliClient.java:634)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.cli.CliClient.callCommand(CliClient.java:324)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at java.util.Optional.ifPresent(Optional.java:159) [?:1.8.0_252]
at org.apache.flink.table.client.cli.CliClient.open(CliClient.java:216)
[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at org.apache.flink.table.client.SqlClient.openCli(SqlClient.java:141)
[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:114)
[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:196)
[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
Caused by: org.apache.flink.table.catalog.exceptions.CatalogException: Failed
to create Hive Metastore client
at
org.apache.flink.table.catalog.hive.client.HiveShimV230.getHiveMetastoreClient(HiveShimV230.java:52)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.createMetastoreClient(HiveMetastoreClientWrapper.java:245)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.<init>(HiveMetastoreClientWrapper.java:76)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory.create(HiveMetastoreClientFactory.java:35)
~[?:?]
at
org.apache.flink.connectors.hive.util.HivePartitionUtils.getAllPartitions(HivePartitionUtils.java:110)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:134)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:120)
~[?:?]
at
org.apache.flink.table.planner.plan.nodes.common.CommonPhysicalTableSourceScan.createSourceTransformation(CommonPhysicalTableSourceScan.scala:88)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:94)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlan(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToTransformation(BatchExecLegacySink.scala:129)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:95)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlan(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:86)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner.translateToPlan(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:167)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1261)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translateAndClearBuffer(TableEnvironmentImpl.java:1253)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.getPipeline(StreamTableEnvironmentImpl.java:327)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$createPipeline$1(ExecutionContext.java:286)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:257)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.createPipeline(ExecutionContext.java:283)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQueryInternal(LocalExecutor.java:521)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
... 8 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_252]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_252]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_252]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_252]
at
org.apache.flink.table.catalog.hive.client.HiveShimV230.getHiveMetastoreClient(HiveShimV230.java:50)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.createMetastoreClient(HiveMetastoreClientWrapper.java:245)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.<init>(HiveMetastoreClientWrapper.java:76)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory.create(HiveMetastoreClientFactory.java:35)
~[?:?]
at
org.apache.flink.connectors.hive.util.HivePartitionUtils.getAllPartitions(HivePartitionUtils.java:110)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:134)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:120)
~[?:?]
at
org.apache.flink.table.planner.plan.nodes.common.CommonPhysicalTableSourceScan.createSourceTransformation(CommonPhysicalTableSourceScan.scala:88)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:94)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlan(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToTransformation(BatchExecLegacySink.scala:129)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:95)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlan(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:86)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner.translateToPlan(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:167)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1261)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translateAndClearBuffer(TableEnvironmentImpl.java:1253)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.getPipeline(StreamTableEnvironmentImpl.java:327)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$createPipeline$1(ExecutionContext.java:286)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:257)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.createPipeline(ExecutionContext.java:283)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQueryInternal(LocalExecutor.java:521)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
... 8 more
Caused by: java.lang.RuntimeException: Unable to instantiate
org.apache.hadoop.hive.metastore.HiveMetaStoreClient
at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1709)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:89)
~[?:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_252]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_252]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_252]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_252]
at
org.apache.flink.table.catalog.hive.client.HiveShimV230.getHiveMetastoreClient(HiveShimV230.java:50)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.createMetastoreClient(HiveMetastoreClientWrapper.java:245)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.<init>(HiveMetastoreClientWrapper.java:76)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory.create(HiveMetastoreClientFactory.java:35)
~[?:?]
at
org.apache.flink.connectors.hive.util.HivePartitionUtils.getAllPartitions(HivePartitionUtils.java:110)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:134)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:120)
~[?:?]
at
org.apache.flink.table.planner.plan.nodes.common.CommonPhysicalTableSourceScan.createSourceTransformation(CommonPhysicalTableSourceScan.scala:88)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:94)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlan(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToTransformation(BatchExecLegacySink.scala:129)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:95)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlan(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:86)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner.translateToPlan(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:167)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1261)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translateAndClearBuffer(TableEnvironmentImpl.java:1253)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.getPipeline(StreamTableEnvironmentImpl.java:327)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$createPipeline$1(ExecutionContext.java:286)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:257)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.createPipeline(ExecutionContext.java:283)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQueryInternal(LocalExecutor.java:521)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
... 8 more
Caused by: java.lang.NoSuchMethodException:
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(org.apache.hadoop.hive.conf.HiveConf,
org.apache.hadoop.hive.metastore.HiveMetaHookLoader, java.lang.Boolean)
at java.lang.Class.getConstructor0(Class.java:3082) ~[?:1.8.0_252]
at java.lang.Class.getDeclaredConstructor(Class.java:2178)
~[?:1.8.0_252]
at
org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1705)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
~[?:?]
at
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:89)
~[?:?]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_252]
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_252]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_252]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_252]
at
org.apache.flink.table.catalog.hive.client.HiveShimV230.getHiveMetastoreClient(HiveShimV230.java:50)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.createMetastoreClient(HiveMetastoreClientWrapper.java:245)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientWrapper.<init>(HiveMetastoreClientWrapper.java:76)
~[?:?]
at
org.apache.flink.table.catalog.hive.client.HiveMetastoreClientFactory.create(HiveMetastoreClientFactory.java:35)
~[?:?]
at
org.apache.flink.connectors.hive.util.HivePartitionUtils.getAllPartitions(HivePartitionUtils.java:110)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource.getDataStream(HiveTableSource.java:134)
~[?:?]
at
org.apache.flink.connectors.hive.HiveTableSource$1.produceDataStream(HiveTableSource.java:120)
~[?:?]
at
org.apache.flink.table.planner.plan.nodes.common.CommonPhysicalTableSourceScan.createSourceTransformation(CommonPhysicalTableSourceScan.scala:88)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:94)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlanInternal(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecTableSourceScan.translateToPlan(BatchExecTableSourceScan.scala:44)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToTransformation(BatchExecLegacySink.scala:129)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:95)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlanInternal(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.exec.ExecNode$class.translateToPlan(ExecNode.scala:59)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.plan.nodes.physical.batch.BatchExecLegacySink.translateToPlan(BatchExecLegacySink.scala:48)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:86)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner$$anonfun$translateToPlan$1.apply(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.Iterator$class.foreach(Iterator.scala:891)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterator.foreach(Iterator.scala:1334)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractIterable.foreach(Iterable.scala:54)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at scala.collection.AbstractTraversable.map(Traversable.scala:104)
~[flink-dist_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.BatchPlanner.translateToPlan(BatchPlanner.scala:85)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.planner.delegation.PlannerBase.translate(PlannerBase.scala:167)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translate(TableEnvironmentImpl.java:1261)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.internal.TableEnvironmentImpl.translateAndClearBuffer(TableEnvironmentImpl.java:1253)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.api.bridge.java.internal.StreamTableEnvironmentImpl.getPipeline(StreamTableEnvironmentImpl.java:327)
~[flink-table-blink_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$createPipeline$1(ExecutionContext.java:286)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:257)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.ExecutionContext.createPipeline(ExecutionContext.java:283)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
at
org.apache.flink.table.client.gateway.local.LocalExecutor.executeQueryInternal(LocalExecutor.java:521)
~[flink-sql-client_2.11-1.12-SNAPSHOT.jar:1.12-SNAPSHOT]
... 8 more
{code})
> Hive connector fails with IllegalAccessError if submitted as usercode
> ---------------------------------------------------------------------
>
> Key: FLINK-19398
> URL: https://issues.apache.org/jira/browse/FLINK-19398
> Project: Flink
> Issue Type: Bug
> Components: Connectors / Hive
> Affects Versions: 1.12.0, 1.11.2
> Reporter: Fabian Hueske
> Assignee: Yun Gao
> Priority: Blocker
> Labels: pull-request-available
> Fix For: 1.12.0, 1.11.4
>
>
> Using Flink's Hive connector fails if the dependency is loaded with the user
> code classloader with the following exception.
> {code:java}
> java.lang.IllegalAccessError: tried to access method
> org.apache.flink.streaming.api.functions.sink.filesystem.Buckets.<init>(Lorg/apache/flink/core/fs/Path;Lorg/apache/flink/streaming/api/functions/sink/filesystem/BucketAssigner;Lorg/apache/flink/streaming/api/functions/sink/filesystem/BucketFactory;Lorg/apache/flink/streaming/api/functions/sink/filesystem/BucketWriter;Lorg/apache/flink/streaming/api/functions/sink/filesystem/RollingPolicy;ILorg/apache/flink/streaming/api/functions/sink/filesystem/OutputFileConfig;)V
> from class
> org.apache.flink.streaming.api.functions.sink.filesystem.HadoopPathBasedBulkFormatBuilder
> at
> org.apache.flink.streaming.api.functions.sink.filesystem.HadoopPathBasedBulkFormatBuilder.createBuckets(HadoopPathBasedBulkFormatBuilder.java:127)
>
> ~[flink-connector-hive_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.table.filesystem.stream.StreamingFileWriter.initializeState(StreamingFileWriter.java:81)
> ~[flink-table-blink_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.api.operators.StreamOperatorStateHandler.initializeOperatorState(StreamOperatorStateHandler.java:106)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.api.operators.AbstractStreamOperator.initializeState(AbstractStreamOperator.java:258)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.runtime.tasks.OperatorChain.initializeStateAndOpenOperators(OperatorChain.java:290)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.lambda$beforeInvoke$0(StreamTask.java:479)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.runtime.tasks.StreamTaskActionExecutor$SynchronizedStreamTaskActionExecutor.runThrowing(StreamTaskActionExecutor.java:92)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.beforeInvoke(StreamTask.java:475)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at
> org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:528)
> ~[flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at org.apache.flink.runtime.taskmanager.Task.doRun(Task.java:721)
> [flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> at org.apache.flink.runtime.taskmanager.Task.run(Task.java:546)
> [flink-dist_2.12-1.11.2-stream2-SNAPSHOT.jar:1.11.2-stream2-SNAPSHOT]
> {code}
> The problem is the constructor of {{Buckets}} with default visibility which
> is called from {{HadoopPathBasedBulkFormatBuilder}} . This works as long as
> both classes are loaded with the same classloader but when they are loaded in
> different classloaders, the access fails.
> {{Buckets}} is loaded with the system CL because it is part of
> flink-streaming-java.
>
> To solve this issue, we should change the visibility of the {{Buckets}}
> constructor to {{public}}.
>
--
This message was sent by Atlassian Jira
(v8.3.4#803005)