Indhumathi27 commented on a change in pull request #3688: [CARBONDATA-3765] 
Refactor Index Metadata for CG and FG Indexes
URL: https://github.com/apache/carbondata/pull/3688#discussion_r406071579
 
 

 ##########
 File path: 
integration/spark/src/main/scala/org/apache/spark/sql/execution/command/index/ShowIndexesCommand.scala
 ##########
 @@ -54,69 +62,87 @@ case class ShowIndexesCommand(
   override def processData(sparkSession: SparkSession): Seq[Row] = {
     val carbonTable = CarbonEnv.getCarbonTable(dbNameOp, 
tableName)(sparkSession)
     setAuditTable(carbonTable)
-    getFileIndexInfo(carbonTable) ++ getSIInfo(sparkSession, carbonTable)
-  }
-
-  // get info for 'index datamap'
-  private def getFileIndexInfo(carbonTable: CarbonTable): Seq[Row] = {
-    val indexes = 
DataMapStoreManager.getInstance().getDataMapSchemasOfTable(carbonTable).asScala
-    if (indexes != null && indexes.nonEmpty) {
-      indexes.map { index =>
-        Row(
-          index.getDataMapName,
-          index.getProviderName,
-          index.getIndexColumns.mkString(","),
-          index.getPropertiesAsString,
-          index.getStatus.name(),
-          index.getSyncStatus
-        )
-      }
-    } else {
-      Seq.empty
-    }
+    getIndexInfo(sparkSession, carbonTable)
   }
 
-  // get info for SI
-  private def getSIInfo(sparkSession: SparkSession, carbonTable: CarbonTable): 
Seq[Row] = {
+  private def getIndexInfo(sparkSession: SparkSession, carbonTable: 
CarbonTable): Seq[Row] = {
     CarbonInternalMetastore.refreshIndexInfo(
       carbonTable.getDatabaseName, tableName, carbonTable)(sparkSession)
-    val indexesMap = CarbonInternalScalaUtil.getIndexesMap(carbonTable)
-    if (null == indexesMap) {
-      throw new Exception("Secondary index information is not loaded in main 
table")
-    }
-    val indexTableMap = indexesMap.asScala
-    if (indexTableMap.nonEmpty) {
-      val indexList = indexTableMap.map { indexInfo =>
-        try {
-          val isSITableEnabled = sparkSession.sessionState.catalog
-            .getTableMetadata(TableIdentifier(indexInfo._1, 
dbNameOp)).storage.properties
-            .getOrElse("isSITableEnabled", "true").equalsIgnoreCase("true")
-          if (isSITableEnabled) {
-            (indexInfo._1, indexInfo._2.asScala.mkString(","), "enabled")
-          } else {
-            (indexInfo._1, indexInfo._2.asScala.mkString(","), "disabled")
+    val indexesMap = CarbonIndexUtil.getIndexesMap(carbonTable)
+    if (null != indexesMap) {
+      val indexTableMap = indexesMap.asScala
+      if (indexTableMap.nonEmpty) {
+        val secondaryIndex = 
indexTableMap.get(CarbonIndexProvider.SI.getIndexProviderName)
+        var finalIndexList: Seq[(String, String, String, String, String, 
String)] = Seq.empty
+
+        if (secondaryIndex.isDefined && null != secondaryIndex.get) {
+          val siIterator = secondaryIndex.get.entrySet().iterator()
+          while (siIterator.hasNext) {
+            val indexInfo = siIterator.next()
+            try {
+              val isSITableEnabled = sparkSession.sessionState.catalog
+                .getTableMetadata(TableIdentifier(indexInfo.getKey, 
dbNameOp)).storage.properties
+                .getOrElse("isSITableEnabled", "true").equalsIgnoreCase("true")
+              if (isSITableEnabled) {
+                finalIndexList = finalIndexList :+
+                                 (indexInfo.getKey, "carbondata", 
indexInfo.getValue
+                                   .get(CarbonCommonConstants.INDEX_COLUMNS), 
"NA", "enabled", "NA")
+              } else {
+                finalIndexList = finalIndexList :+
+                                 (indexInfo.getKey, "carbondata", 
indexInfo.getValue
+                                   .get(CarbonCommonConstants
+                                     .INDEX_COLUMNS), "NA", "disabled", "NA")
+              }
+            } catch {
+              case ex: Exception =>
+                LOGGER.error(s"Access storage properties from hive failed for 
index table: ${
+                  indexInfo.getKey
+                }")
+                finalIndexList = finalIndexList :+
+                                 (indexInfo.getKey, "carbondata", 
indexInfo.getValue
+                                   .get(CarbonCommonConstants.INDEX_COLUMNS), 
"NA", "UNKNOWN", "NA")
+            }
+          }
+        }
+
+        indexesMap.asScala
+          .filter(map => 
!map._1.equalsIgnoreCase(CarbonIndexProvider.SI.getIndexProviderName))
 
 Review comment:
   Yes. But if we keep single map with provider and index info, it will be 
easier to validate index, add index info to hive and drop index. Else, 
seperately, we need to do for both

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to