[ 
https://issues.apache.org/jira/browse/HIVE-28285?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17850412#comment-17850412
 ] 

Stamatis Zampetakis commented on HIVE-28285:
--------------------------------------------

This is a regression caused by HIVE-27487.

> Exception when querying JDBC tables with Hive/DB column types mismatch
> ----------------------------------------------------------------------
>
>                 Key: HIVE-28285
>                 URL: https://issues.apache.org/jira/browse/HIVE-28285
>             Project: Hive
>          Issue Type: Bug
>          Components: HiveServer2
>    Affects Versions: 4.0.0
>            Reporter: Stamatis Zampetakis
>            Assignee: Stamatis Zampetakis
>            Priority: Major
>
> Queries over JDBC tables fail at runtime when the following conditions hold:
>  # there is a mismatch between the Hive type and the database type for some 
> columns
>  # CBO is not used
> CBO may not be used when compiling the query for various reasons:
>  * CBO is explicitly disabled (via hive.cbo.enable property)
>  * Query explicitly not supported in CBO (e.g., contains DISTRIBUTE BY clause)
>  * Problem/bug in compilation that will skip CBO execution
> The examples below demonstrate the problem with Postgres but the problem 
> itself is not database specific (although different errors may pop up 
> depending on the underlying database). Different type mappings may also lead 
> to different errors.
> h3. Map Postgres DATE to Hive TIMESTAMP
> +Postgres+
> {code:sql}
> create table date_table (cdate date);
> insert into date_table values ('2024-05-29');
> {code}
> +Hive+
> {code:sql}
> CREATE TABLE h_type_table (cdate timestamp)
> STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
> TBLPROPERTIES (
>     "hive.sql.database.type" = "POSTGRES",
>     "hive.sql.jdbc.driver" = "org.postgresql.Driver",
>     "hive.sql.jdbc.url" = "jdbc:postgresql://...",
>     "hive.sql.dbcp.username" = "user",
>     "hive.sql.dbcp.password" = "pwd",
>     "hive.sql.table" = "date_table"
> );
> {code}
> +Hive Result (CBO on)+
> |2024-05-29 00:00:00|
> +Error (CBO off)+
> {noformat}
>  java.lang.RuntimeException: java.io.IOException: 
> java.lang.IllegalArgumentException: Cannot create timestamp, parsing error 
> 2024-05-29
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:210)
>       at org.apache.hadoop.hive.ql.exec.FetchTask.execute(FetchTask.java:95)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:212)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:154)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:149)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:230)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356)
>       at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:732)
>       at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:702)
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:116)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
>       at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
> Caused by: java.io.IOException: java.lang.IllegalArgumentException: Cannot 
> create timestamp, parsing error 2024-05-29
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:628)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:535)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:194)
>       ... 55 more
> Caused by: java.lang.IllegalArgumentException: Cannot create timestamp, 
> parsing error 2024-05-29
>       at 
> org.apache.hadoop.hive.common.type.Timestamp.valueOf(Timestamp.java:194)
>       at 
> org.apache.hive.storage.jdbc.JdbcSerDe.deserialize(JdbcSerDe.java:314)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:609)
>       ... 57 more
> Caused by: java.time.format.DateTimeParseException: Text '2024-05-29' could 
> not be parsed: Unable to obtain LocalDateTime from TemporalAccessor: {},ISO 
> resolved to 2024-05-29 of type java.time.format.Parsed
>       at 
> java.time.format.DateTimeFormatter.createError(DateTimeFormatter.java:1920)
>       at java.time.format.DateTimeFormatter.parse(DateTimeFormatter.java:1855)
>       at java.time.LocalDateTime.parse(LocalDateTime.java:492)
>       at 
> org.apache.hadoop.hive.common.type.Timestamp.valueOf(Timestamp.java:188)
>       ... 59 more
> Caused by: java.time.DateTimeException: Unable to obtain LocalDateTime from 
> TemporalAccessor: {},ISO resolved to 2024-05-29 of type 
> java.time.format.Parsed
>       at java.time.LocalDateTime.from(LocalDateTime.java:461)
>       at java.time.format.Parsed.query(Parsed.java:226)
>       at java.time.format.DateTimeFormatter.parse(DateTimeFormatter.java:1851)
>       ... 61 more
> Caused by: java.time.DateTimeException: Unable to obtain LocalTime from 
> TemporalAccessor: {},ISO resolved to 2024-05-29 of type 
> java.time.format.Parsed
>       at java.time.LocalTime.from(LocalTime.java:409)
>       at java.time.LocalDateTime.from(LocalDateTime.java:457)
>       ... 63 more
> {noformat}
> h3. Map Postgres NUMERIC to Hive BIGINT
> +Postgres+
> {code:sql}
> create table numeric_table (cnumeric numeric);
> insert into numeric_table values (60.242);
> insert into numeric_table values (90);
> insert into numeric_table values (50.1);
> {code}
> +Hive+
> {code:sql}
> CREATE TABLE h_type_table (cnumeric bigint)
> STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
> TBLPROPERTIES ( ...,    "hive.sql.table" = "numeric_table");
> SELECT * FROM h_type_table;
> {code}
> +Hive Results (CBO on)+
> |60|
> |90|
> |50|
> +Error (CBO off)+
> {noformat}
> java.lang.RuntimeException: java.io.IOException: java.io.IOException: 
> org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException: 
> Caught exception while trying to execute query: Error while trying to get 
> column names: Decimal precision out of allowed range [1,38]
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:210)
>       at org.apache.hadoop.hive.ql.exec.FetchTask.execute(FetchTask.java:95)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:212)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:154)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:149)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:230)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356)
>       at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:732)
>       at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:702)
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:116)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
>       at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
> Caused by: java.io.IOException: java.io.IOException: 
> org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException: 
> Caught exception while trying to execute query: Error while trying to get 
> column names: Decimal precision out of allowed range [1,38]
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:628)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:535)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:194)
>       ... 55 more
> Caused by: java.io.IOException: 
> org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException: 
> Caught exception while trying to execute query: Error while trying to get 
> column names: Decimal precision out of allowed range [1,38]
>       at 
> org.apache.hive.storage.jdbc.JdbcRecordReader.next(JdbcRecordReader.java:85)
>       at 
> org.apache.hive.storage.jdbc.JdbcRecordReader.next(JdbcRecordReader.java:35)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:595)
>       ... 57 more
> Caused by: 
> org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException: 
> Caught exception while trying to execute query: Error while trying to get 
> column names: Decimal precision out of allowed range [1,38]
>       at 
> org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor.getRecordIterator(GenericJdbcDatabaseAccessor.java:254)
>       at 
> org.apache.hive.storage.jdbc.JdbcRecordReader.next(JdbcRecordReader.java:58)
>       ... 59 more
> Caused by: 
> org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException: Error 
> while trying to get column names: Decimal precision out of allowed range 
> [1,38]
>       at 
> org.apache.hive.storage.jdbc.dao.JdbcRecordIterator.<init>(JdbcRecordIterator.java:85)
>       at 
> org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor.getRecordIterator(GenericJdbcDatabaseAccessor.java:249)
>       ... 60 more
> Caused by: java.lang.IllegalArgumentException: Decimal precision out of 
> allowed range [1,38]
>       at 
> org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils.validateParameter(HiveDecimalUtils.java:44)
>       at 
> org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo.<init>(DecimalTypeInfo.java:36)
>       at 
> org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.createPrimitiveTypeInfo(TypeInfoFactory.java:165)
>       at 
> org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getPrimitiveTypeInfo(TypeInfoFactory.java:117)
>       at 
> org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory.getDecimalTypeInfo(TypeInfoFactory.java:186)
>       at 
> org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor.lambda$static$0(GenericJdbcDatabaseAccessor.java:102)
>       at 
> org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor.getColumnMetadata(GenericJdbcDatabaseAccessor.java:154)
>       at 
> org.apache.hive.storage.jdbc.dao.GenericJdbcDatabaseAccessor.getColTypesFromRS(GenericJdbcDatabaseAccessor.java:174)
>       at 
> org.apache.hive.storage.jdbc.dao.JdbcRecordIterator.<init>(JdbcRecordIterator.java:72)
>       ... 61 more
> {noformat}
> h3. Map Postgres VARCHAR to Hive INT
> +Postgres+
> {code:sql}
> create table varchar_table (cvarchar varchar(20));
> insert into varchar_table values ('10');
> insert into varchar_table values ('20');
> insert into varchar_table values ('30.05');
> {code}
> +Hive+
> {code:sql}
> CREATE TABLE h_type_table (cvarchar bigint)
> STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
> TBLPROPERTIES ( ...,    "hive.sql.table" = "varchar_table");
> SELECT * FROM h_type_table;
> {code}
> +Hive Results (CBO on)+
> |10|
> |20|
> |30|
> +Error (CBO off)+
> {noformat}
> java.lang.RuntimeException: java.io.IOException: 
> java.lang.NumberFormatException: For input string: "30.05"
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:210)
>       at org.apache.hadoop.hive.ql.exec.FetchTask.execute(FetchTask.java:95)
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:212)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:154)
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:149)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185)
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:230)
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201)
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425)
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356)
>       at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:732)
>       at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:702)
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:116)
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157)
>       at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
> Caused by: java.io.IOException: java.lang.NumberFormatException: For input 
> string: "30.05"
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:628)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.pushRow(FetchOperator.java:535)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchTask.executeInner(FetchTask.java:194)
>       ... 55 more
> Caused by: java.lang.NumberFormatException: For input string: "30.05"
>       at 
> java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
>       at java.lang.Integer.parseInt(Integer.java:580)
>       at java.lang.Integer.valueOf(Integer.java:766)
>       at 
> org.apache.hive.storage.jdbc.JdbcSerDe.deserialize(JdbcSerDe.java:242)
>       at 
> org.apache.hadoop.hive.ql.exec.FetchOperator.getNextRow(FetchOperator.java:609)
>       ... 57 more
> {noformat}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to