amrishlal commented on code in PR #8978: URL: https://github.com/apache/hudi/pull/8978#discussion_r1251488891
########## hudi-spark-datasource/hudi-spark-common/src/main/java/org/apache/hudi/DataSourceUtils.java: ########## @@ -227,9 +231,21 @@ public static HoodieWriteResult doWriteOperation(SparkRDDWriteClient client, Jav } } - public static HoodieWriteResult doDeleteOperation(SparkRDDWriteClient client, JavaRDD<HoodieKey> hoodieKeys, - String instantTime) { - return new HoodieWriteResult(client.delete(hoodieKeys, instantTime)); + public static HoodieWriteResult doDeleteOperation(SparkRDDWriteClient client, JavaRDD<Tuple2<HoodieKey, scala.Option<HoodieRecordLocation>>> hoodieKeysAndLocations, + String instantTime, boolean isPrepped) { + + if (isPrepped) { + JavaRDD<HoodieRecord> records = hoodieKeysAndLocations.map(tuple -> { + HoodieRecord record = client.getConfig().getRecordMerger().getRecordType() == HoodieRecord.HoodieRecordType.AVRO Review Comment: Fixed. Moved client outside of the `map` function. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: commits-unsubscr...@hudi.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org