[
https://issues.apache.org/jira/browse/HIVE-29085?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Soumyakanti Das updated HIVE-29085:
-----------------------------------
Description:
When both hive.sql.table and hive.sql.query are set, we get the following
exception:
{code:java}
See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log, or
check ./ql/target/surefire-reports or ./itests/qtest/target/surefire-reports/
for specific test cases logs.
org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException:
MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
exception while initializing the SqlSerDe: null)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1431)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1436)
at
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:158)
at
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:116)
at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84)
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214)
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354)
at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327)
at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244)
at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105)
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:345)
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:189)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:142)
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:137)
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:190)
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:235)
at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258)
at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:202)
at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:128)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:426)
at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:357)
at
org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:746)
at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:716)
at
org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:115)
at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:140)
at
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
at
java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
at java.base/java.lang.reflect.Method.invoke(Method.java:580)
at
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at
org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:119)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at
org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runners.Suite.runChild(Suite.java:128)
at org.junit.runners.Suite.runChild(Suite.java:27)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at
org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:90)
at org.junit.rules.RunRules.evaluate(RunRules.java:20)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at
org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:316)
at
org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:240)
at
org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:214)
at
org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:155)
at
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
at org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162)
at org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507)
at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495)
Caused by: java.lang.RuntimeException:
MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
exception while initializing the SqlSerDe: null)
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:359)
at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:343)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1366)
... 63 more
Caused by: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException
Caught exception while initializing the SqlSerDe: null)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:101)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:80)
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:357)
... 65 more
{code}
This can be reproduced by copying the following to {{{}test.q{}}}:
{noformat}
CREATE EXTERNAL TABLE test_external_table_postgres
(
id INT,
name STRING
)
STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
TBLPROPERTIES (
"hive.sql.database.type" = "POSTGRES",
"hive.sql.jdbc.driver" = "org.postgresql.Driver",
"hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/test",
"hive.sql.dbcp.username" = "hiveuser",
"hive.sql.dbcp.password" = "password",
"hive.sql.table" = "test",
"hive.sql.query" = "select id, name from test"
);{noformat}
And running:
{noformat}
mvn test -pl itests/qtest -Pitests -Dtest=TestMiniLlapLocalCliDriver
-Dtest.output.overwrite=true -Dqfile="test.q"{noformat}
In the docs, it is mentioned that user should provide one or the other
property, as can be seen here:
[https://hive.apache.org/docs/latest/jdbc-storage-handler_95651916/]
{noformat}
hive.sql.table / hive.sql.query: You will need to specify either
"hive.sql.table" or "hive.sql.query" to tell how to get data from jdbc
database. "hive.sql.table" denotes a single table, and "hive.sql.query" denotes
an arbitrary sql query.{noformat}
Setting both properties together should not be allowed as a user can provide
two different tables in those properties, but currently it doesn't fail
gracefully.
was:
When both hive.sql.table and hive.sql.query are set, we get the following
exception:
{code:java}
org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException:
MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
exception while initializing the SqlSerDe: null)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1431)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1436)
at
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:158)
at
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:116)
...
...
...
Caused by: java.lang.RuntimeException:
MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
exception while initializing the SqlSerDe: null)
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:359)
at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:343)
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1366)
... 63 more
Caused by: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException
Caught exception while initializing the SqlSerDe: null)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:101)
at
org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:80)
at
org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:357)
{code}
This can be reproduced by copying the following to {{{}test.q{}}}:
{noformat}
CREATE EXTERNAL TABLE test_external_table_postgres
(
id INT,
name STRING
)
STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
TBLPROPERTIES (
"hive.sql.database.type" = "POSTGRES",
"hive.sql.jdbc.driver" = "org.postgresql.Driver",
"hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/test",
"hive.sql.dbcp.username" = "hiveuser",
"hive.sql.dbcp.password" = "password",
"hive.sql.table" = "test",
"hive.sql.query" = "select id, name from test"
);{noformat}
And running:
{noformat}
mvn test -pl itests/qtest -Pitests -Dtest=TestMiniLlapLocalCliDriver
-Dtest.output.overwrite=true -Dqfile="test.q"{noformat}
In the docs, it is mentioned that user should provide one or the other
property, as can be seen here:
[https://hive.apache.org/docs/latest/jdbc-storage-handler_95651916/]
{noformat}
hive.sql.table / hive.sql.query: You will need to specify either
"hive.sql.table" or "hive.sql.query" to tell how to get data from jdbc
database. "hive.sql.table" denotes a single table, and "hive.sql.query" denotes
an arbitrary sql query.{noformat}
Setting both properties together should not be allowed as a user can provide
two different tables in those properties, but currently it doesn't fail
gracefully.
> CREATE EXTERNAL TABLE fails for JDBC tables when both hive.sql.table and
> hive.sql.query are set
> -----------------------------------------------------------------------------------------------
>
> Key: HIVE-29085
> URL: https://issues.apache.org/jira/browse/HIVE-29085
> Project: Hive
> Issue Type: Bug
> Components: JDBC storage handler
> Affects Versions: 4.1.0
> Reporter: Soumyakanti Das
> Assignee: Soumyakanti Das
> Priority: Major
> Labels: pull-request-available
>
> When both hive.sql.table and hive.sql.query are set, we get the following
> exception:
> {code:java}
> See ./ql/target/tmp/log/hive.log or ./itests/qtest/target/tmp/log/hive.log,
> or check ./ql/target/surefire-reports or
> ./itests/qtest/target/surefire-reports/ for specific test cases logs.
> org.apache.hadoop.hive.ql.metadata.HiveException:
> java.lang.RuntimeException:
> MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
> exception while initializing the SqlSerDe: null)
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1431)
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1436)
> at
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:158)
> at
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:116)
> at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84)
> at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214)
> at
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
> at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354)
> at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327)
> at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244)
> at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105)
> at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:345)
> at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:189)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:142)
> at org.apache.hadoop.hive.ql.Driver.run(Driver.java:137)
> at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:190)
> at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:235)
> at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:258)
> at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:202)
> at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:128)
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:426)
> at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:357)
> at
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:746)
> at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:716)
> at
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:115)
> at org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:140)
> at
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
> at
> java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
> at java.base/java.lang.reflect.Method.invoke(Method.java:580)
> at
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
> at
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
> at
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
> at
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:119)
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> at
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
> at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
> at
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
> at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
> at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
> at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
> at org.junit.runners.Suite.runChild(Suite.java:128)
> at org.junit.runners.Suite.runChild(Suite.java:27)
> at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
> at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
> at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
> at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
> at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
> at
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:90)
> at org.junit.rules.RunRules.evaluate(RunRules.java:20)
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
> at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:316)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:240)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:214)
> at
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:155)
> at
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
> at
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162)
> at org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507)
> at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495)
> Caused by: java.lang.RuntimeException:
> MetaException(message:org.apache.hadoop.hive.serde2.SerDeException Caught
> exception while initializing the SqlSerDe: null)
> at
> org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:359)
> at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:343)
> at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1366)
> ... 63 more
> Caused by: MetaException(message:org.apache.hadoop.hive.serde2.SerDeException
> Caught exception while initializing the SqlSerDe: null)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:101)
> at
> org.apache.hadoop.hive.metastore.HiveMetaStoreUtils.getDeserializer(HiveMetaStoreUtils.java:80)
> at
> org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:357)
> ... 65 more
> {code}
> This can be reproduced by copying the following to {{{}test.q{}}}:
> {noformat}
> CREATE EXTERNAL TABLE test_external_table_postgres
> (
> id INT,
> name STRING
> )
> STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
> TBLPROPERTIES (
> "hive.sql.database.type" = "POSTGRES",
> "hive.sql.jdbc.driver" = "org.postgresql.Driver",
> "hive.sql.jdbc.url" = "jdbc:postgresql://localhost:5432/test",
> "hive.sql.dbcp.username" = "hiveuser",
> "hive.sql.dbcp.password" = "password",
> "hive.sql.table" = "test",
> "hive.sql.query" = "select id, name from test"
> );{noformat}
> And running:
> {noformat}
> mvn test -pl itests/qtest -Pitests -Dtest=TestMiniLlapLocalCliDriver
> -Dtest.output.overwrite=true -Dqfile="test.q"{noformat}
> In the docs, it is mentioned that user should provide one or the other
> property, as can be seen here:
> [https://hive.apache.org/docs/latest/jdbc-storage-handler_95651916/]
> {noformat}
> hive.sql.table / hive.sql.query: You will need to specify either
> "hive.sql.table" or "hive.sql.query" to tell how to get data from jdbc
> database. "hive.sql.table" denotes a single table, and "hive.sql.query"
> denotes an arbitrary sql query.{noformat}
> Setting both properties together should not be allowed as a user can provide
> two different tables in those properties, but currently it doesn't fail
> gracefully.
--
This message was sent by Atlassian Jira
(v8.20.10#820010)