Github user yhuai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16296#discussion_r94698030
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala ---
    @@ -342,42 +342,46 @@ class SparkSqlAstBuilder(conf: SQLConf) extends 
AstBuilder {
       }
     
       /**
    -   * Create a data source table, returning a [[CreateTable]] logical plan.
    +   * Create a table, returning a [[CreateTable]] logical plan.
        *
        * Expected format:
        * {{{
    -   *   CREATE [EXTERNAL] TABLE [IF NOT EXISTS] [db_name.]table_name
    +   *   CREATE [TEMPORARY] TABLE [IF NOT EXISTS] [db_name.]table_name
        *   USING table_provider
        *   [OPTIONS table_property_list]
        *   [PARTITIONED BY (col_name, col_name, ...)]
        *   [CLUSTERED BY (col_name, col_name, ...)
        *    [SORTED BY (col_name [ASC|DESC], ...)]
        *    INTO num_buckets BUCKETS
        *   ]
    +   *   [LOCATION path]
    +   *   [COMMENT table_comment]
        *   [AS select_statement];
        * }}}
        */
    -  override def visitCreateTableUsing(ctx: CreateTableUsingContext): 
LogicalPlan = withOrigin(ctx) {
    +  override def visitCreateTable(ctx: CreateTableContext): LogicalPlan = 
withOrigin(ctx) {
         val (table, temp, ifNotExists, external) = 
visitCreateTableHeader(ctx.createTableHeader)
         if (external) {
           operationNotAllowed("CREATE EXTERNAL TABLE ... USING", ctx)
         }
    -    val options = 
Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty)
    +    val options = 
Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty)
         val provider = ctx.tableProvider.qualifiedName.getText
    -    if (provider.toLowerCase == DDLUtils.HIVE_PROVIDER) {
    -      throw new AnalysisException("Cannot create hive serde table with 
CREATE TABLE USING")
    -    }
         val schema = Option(ctx.colTypeList()).map(createSchema)
         val partitionColumnNames =
           Option(ctx.partitionColumnNames)
             .map(visitIdentifierList(_).toArray)
             .getOrElse(Array.empty[String])
         val bucketSpec = Option(ctx.bucketSpec()).map(visitBucketSpec)
     
    -    // TODO: this may be wrong for non file-based data source like JDBC, 
which should be external
    -    // even there is no `path` in options. We should consider allow the 
EXTERNAL keyword.
    +    val location = Option(ctx.locationSpec).map(visitLocationSpec)
         val storage = DataSource.buildStorageFormatFromOptions(options)
    -    val tableType = if (storage.locationUri.isDefined) {
    +
    +    if (location.isDefined && storage.locationUri.isDefined) {
    +      throw new ParseException("Cannot specify LOCATION when there is 
'path' in OPTIONS.", ctx)
    --- End diff --
    
    Let's be more specific at here. These two approaches are the same and we 
only want users to use one, right? 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to