Github user hvanhovell commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12271#discussion_r59132688
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala 
---
    @@ -134,82 +136,117 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {
       }
     
       /**
    -   * Create a [[CatalogStorageFormat]]. This is part of the 
[[CreateTableAsSelect]] command.
    +   * Create a [[CatalogStorageFormat]] for creating tables.
        */
       override def visitCreateFileFormat(
           ctx: CreateFileFormatContext): CatalogStorageFormat = 
withOrigin(ctx) {
    -    if (ctx.storageHandler == null) {
    -      typedVisit[CatalogStorageFormat](ctx.fileFormat)
    -    } else {
    -      visitStorageHandler(ctx.storageHandler)
    +    (ctx.fileFormat, ctx.storageHandler) match {
    +      case (fileFormat, null) if fileFormat != null =>
    +        fileFormat match {
    +          // Expected format: INPUTFORMAT input_format OUTPUTFORMAT 
output_format
    +          case c: TableFileFormatContext => visitTableFileFormat(c)
    +          // Expected format: SEQUENCEFILE | TEXTFILE | RCFILE | ORC | 
PARQUET | AVRO
    +          case c: GenericFileFormatContext => visitGenericFileFormat(c)
    +        }
    +      case (null, storageHandler) if storageHandler != null =>
    +        throw new ParseException("Operation not allowed: ... STORED BY 
storage_handler ...", ctx)
    +      case (null, null) =>
    +        throw new ParseException("expected one of STORED AS or STORED BY", 
ctx)
    +      case _ =>
    +        throw new ParseException("expected either STORED AS or STORED BY, 
not both", ctx)
         }
       }
     
       /**
    -   * Create a [[CreateTableAsSelect]] command.
    +   * Create a table. TODO: expand this comment!
    +   *
    +   * For example:
    +   * {{{
    +   *   CREATE [TEMPORARY] [EXTERNAL] TABLE [IF NOT EXISTS] 
[db_name.]table_name
    +   *   [(col1 data_type [COMMENT col_comment], ...)]
    +   *   [COMMENT table_comment]
    +   *   [PARTITIONED BY (col3 data_type [COMMENT col_comment], ...)]
    +   *   [CLUSTERED BY (col1, ...) [SORTED BY (col1 [ASC|DESC], ...)] INTO 
num_buckets BUCKETS]
    +   *   [SKEWED BY (col1, col2, ...) ON ((col_value, col_value, ...), ...)
    +   *   [STORED AS DIRECTORIES]
    +   *   [ROW FORMAT row_format]
    +   *   [STORED AS file_format | STORED BY storage_handler_class [WITH 
SERDEPROPERTIES (...)]]
    +   *   [LOCATION path]
    +   *   [TBLPROPERTIES (property_name=property_value, ...)]
    +   *   [AS select_statement];
    +   * }}}
        */
       override def visitCreateTable(ctx: CreateTableContext): LogicalPlan = {
    --- End diff --
    
    Could you add withOrigin here? I apparently forgot to...


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to