Github user gvramana commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/1641#discussion_r156700781
  
    --- Diff: 
integration/spark2/src/main/scala/org/apache/carbondata/spark/rdd/CarbonDataRDDFactory.scala
 ---
    @@ -483,20 +491,21 @@ object CarbonDataRDDFactory {
                          s"${ carbonLoadModel.getDatabaseName }.${ 
carbonLoadModel.getTableName }")
             throw new Exception(status(0)._2._2.errorMsg)
           }
    -      // if segment is empty then fail the data load
    +
    +      var newEntryLoadStatus =
           if 
(!carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable.isChildDataMap &&
               !CarbonLoaderUtil.isValidSegment(carbonLoadModel, 
carbonLoadModel.getSegmentId.toInt)) {
    -        // update the load entry in table status file for changing the 
status to marked for delete
    -        CommonUtil.updateTableStatusForFailure(carbonLoadModel)
    -        LOGGER.info("********starting clean up**********")
    -        CarbonLoaderUtil.deleteSegment(carbonLoadModel, 
carbonLoadModel.getSegmentId.toInt)
    -        LOGGER.info("********clean up done**********")
    +
             LOGGER.audit(s"Data load is failed for " +
                          s"${ carbonLoadModel.getDatabaseName }.${ 
carbonLoadModel.getTableName }" +
                          " as there is no data to load")
             LOGGER.warn("Cannot write load metadata file as data load failed")
    -        throw new Exception("No Data to load")
    +
    --- End diff --
    
    write comment 'as no records loaded in new segment, new segment should be 
deleted'


---

Reply via email to