Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16422#discussion_r126344451
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala ---
    @@ -626,6 +624,120 @@ case class DescribeTableCommand(
       }
     }
     
    +/**
    + * A command to list the info for a column, including name, data type, 
column stats and comment.
    + * This function creates a [[DescribeColumnCommand]] logical plan.
    + *
    + * The syntax of using this command in SQL is:
    + * {{{
    + *   DESCRIBE [EXTENDED|FORMATTED] table_name column_name;
    + * }}}
    + */
    +case class DescribeColumnCommand(
    +    table: TableIdentifier,
    +    colNameParts: Seq[String],
    +    isExtended: Boolean)
    +  extends RunnableCommand {
    +
    +  override val output: Seq[Attribute] = {
    +    // The displayed names are based on Hive.
    +    // (Link for the corresponding Hive Jira: 
https://issues.apache.org/jira/browse/HIVE-7050)
    +    if (isExtended) {
    +      Seq(
    +        AttributeReference("col_name", StringType, nullable = false,
    +          new MetadataBuilder().putString("comment", "name of the 
column").build())(),
    +        AttributeReference("data_type", StringType, nullable = false,
    +          new MetadataBuilder().putString("comment", "data type of the 
column").build())(),
    +        AttributeReference("min", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "min value of the 
column").build())(),
    +        AttributeReference("max", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "max value of the 
column").build())(),
    +        AttributeReference("num_nulls", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "number of nulls of 
the column").build())(),
    +        AttributeReference("distinct_count", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "distinct count of 
the column").build())(),
    +        AttributeReference("avg_col_len", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment",
    +            "average length of the values of the column").build())(),
    +        AttributeReference("max_col_len", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment",
    +            "maximum length of the values of the column").build())(),
    +        AttributeReference("comment", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "comment of the 
column").build())())
    +    } else {
    +      Seq(
    +        AttributeReference("col_name", StringType, nullable = false,
    +          new MetadataBuilder().putString("comment", "name of the 
column").build())(),
    +        AttributeReference("data_type", StringType, nullable = false,
    +          new MetadataBuilder().putString("comment", "data type of the 
column").build())(),
    +        AttributeReference("comment", StringType, nullable = true,
    +          new MetadataBuilder().putString("comment", "comment of the 
column").build())())
    +    }
    +  }
    +
    +  override def run(sparkSession: SparkSession): Seq[Row] = {
    +    val catalog = sparkSession.sessionState.catalog
    +    val resolver = sparkSession.sessionState.conf.resolver
    +    val relation = sparkSession.table(table).queryExecution.analyzed
    +    val field = {
    +      relation.resolve(colNameParts, resolver).getOrElse {
    +        throw new AnalysisException(s"Column 
${UnresolvedAttribute(colNameParts).name} does not " +
    +          s"exist")
    +      }
    +    }
    +    if (!field.isInstanceOf[Attribute]) {
    +      // If the field is not an attribute after `resolve`, then it's a 
nested field.
    +      throw new AnalysisException(s"DESC TABLE COLUMN command is not 
supported for nested column:" +
    +        s" ${UnresolvedAttribute(colNameParts).name}")
    +    }
    +
    +    val catalogTable = catalog.getTempViewOrPermanentTableMetadata(table)
    +    val colStats = catalogTable.stats.map(_.colStats).getOrElse(Map.empty)
    +    val cs = colStats.get(field.name)
    --- End diff --
    
    nit: `val colStats = catalogTable.stats.flatMap(_.colStats.get(field.name))`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to