Github user KanakaKumar commented on a diff in the pull request: https://github.com/apache/carbondata/pull/2504#discussion_r202329272 --- Diff: integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonRelation.scala --- @@ -173,15 +172,38 @@ case class CarbonRelation( .getValidAndInvalidSegments.getValidSegments.asScala var size = 0L // for each segment calculate the size - segments.foreach {validSeg => - // for older store - if (null != validSeg.getLoadMetadataDetails.getDataSize && - null != validSeg.getLoadMetadataDetails.getIndexSize) { - size = size + validSeg.getLoadMetadataDetails.getDataSize.toLong + - validSeg.getLoadMetadataDetails.getIndexSize.toLong - } else { - size = size + FileFactory.getDirectorySize( - CarbonTablePath.getSegmentPath(tablePath, validSeg.getSegmentNo)) + if (carbonTable.getTableInfo.getFactTable.getTableProperties.asScala + .get(CarbonCommonConstants.FLAT_FOLDER).isDefined && + carbonTable.getTableInfo.getFactTable.getTableProperties.asScala + (CarbonCommonConstants.FLAT_FOLDER).toBoolean) { + val tableDirectorySize = FileFactory.getDirectorySize(carbonTable.getTablePath) + val metaDirectorySize = FileFactory.getDirectorySize(carbonTable.getMetadataPath) + val factDirectorySize = FileFactory + .getDirectorySize(CarbonTablePath.getFactDir(carbonTable.getTablePath)) + val lockDirSize = FileFactory + .getDirectorySize(CarbonTablePath.getLockFilesDirPath(carbonTable.getTablePath)) + val datamaps = DataMapStoreManager.getInstance().getAllDataMap(carbonTable) + var datamapsDirectorySize = 0L + if (datamaps.size() > 0) { + datamaps.asScala.foreach { datamap => + datamapsDirectorySize = datamapsDirectorySize + FileFactory + .getDirectorySize( + carbonTable.getTablePath + datamap.getDataMapSchema.getDataMapName) --- End diff -- Please use a utility method to make to dm path like org.apache.carbondata.core.util.path.CarbonTablePath#getDataMapStorePath.
---