Github user gatorsmile commented on a diff in the pull request: https://github.com/apache/spark/pull/21711#discussion_r202567965 --- Diff: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala --- @@ -138,17 +138,36 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat } /** - * Checks the validity of data column names. Hive metastore disallows the table to use comma in - * data column names. Partition columns do not have such a restriction. Views do not have such - * a restriction. + * Checks the validity of data column names. Hive metastore disallows the table to use some + * special characters (',', ':', and ';') in data column names, including nested column names. + * Partition columns do not have such a restriction. Views do not have such a restriction. */ private def verifyDataSchema( tableName: TableIdentifier, tableType: CatalogTableType, dataSchema: StructType): Unit = { if (tableType != VIEW) { - dataSchema.map(_.name).foreach { colName => - if (colName.contains(",")) { - throw new AnalysisException("Cannot create a table having a column whose name contains " + - s"commas in Hive metastore. Table: $tableName; Column: $colName") + val invalidChars = Seq(",", ":", ";") + def verifyNestedColumnNames(schema: StructType): Unit = schema.foreach { f => + f.dataType match { + case st: StructType => verifyNestedColumnNames(st) + case _ if invalidChars.exists(f.name.contains) => + val errMsg = "Cannot create a table having a nested column whose name contains " + + s"invalid characters (${invalidChars.map(c => s"'$c'").mkString(", ")}) " + --- End diff -- Normally, in this case, what we do is like: ```Scala val invalidCharsString = invalidChars.map(c => s"'$c'").mkString(", ") val errMsg = "Cannot create a table having a nested column whose name contains " + s"invalid characters ($invalidCharsString) in Hive metastore. Table: $tableName; " + s"Column: ${f.name}" ```
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org