[ 
https://issues.apache.org/jira/browse/HUDI-7908?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17867461#comment-17867461
 ] 

Vova Kolmakov edited comment on HUDI-7908 at 7/20/24 1:25 AM:
--------------------------------------------------------------

found 2 bugs:
 * dataSchema evaluation is incorrect;
 * options in buildReaderWithPartitionValues actually don't contain 
precombineField.


was (Author: JIRAUSER300830):
found 2 bugs

> HoodieFileGroupReader fails if precombine and partition fields are same
> -----------------------------------------------------------------------
>
>                 Key: HUDI-7908
>                 URL: https://issues.apache.org/jira/browse/HUDI-7908
>             Project: Apache Hudi
>          Issue Type: Bug
>            Reporter: Sagar Sumit
>            Assignee: Vova Kolmakov
>            Priority: Major
>              Labels: pull-request-available
>             Fix For: 1.0.0
>
>
> {code:java}
> test(s"Test INSERT INTO with upsert operation type") {
>   if (HoodieSparkUtils.gteqSpark3_2) {
>     withTempDir { tmp =>
>       Seq("mor").foreach { tableType =>
>         val tableName = generateTableName
>         spark.sql(
>           s"""
>              |create table $tableName (
>              |  id int,
>              |  name string,
>              |  ts long,
>              |  price int
>              |) using hudi
>              |partitioned by (ts)
>              |tblproperties (
>              |  type = '$tableType',
>              |  primaryKey = 'id',
>              |  preCombineField = 'ts'
>              |)
>              |location '${tmp.getCanonicalPath}/$tableName'
>              |""".stripMargin
>         )
>         // Test insert into with upsert operation type
>         spark.sql(
>           s"""
>              | insert into $tableName
>              | values (1, 'a1', 1000, 10), (2, 'a2', 2000, 20), (3, 'a3', 
> 3000, 30), (4, 'a4', 2000, 10), (5, 'a5', 3000, 20), (6, 'a6', 4000, 30)
>              | """.stripMargin
>         )
>         checkAnswer(s"select id, name, price, ts from $tableName where 
> price>3000")(
>           Seq(6, "a6", 4000, 30)
>         )
>         // Test update
>         spark.sql(s"update $tableName set price = price + 1 where id = 6")
>         checkAnswer(s"select id, name, price, ts from $tableName where 
> price>3000")(
>           Seq(6, "a6", 4001, 30)
>         )
>       }
>     }
>   }
> } {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to