Github user tejasapatil commented on a diff in the pull request: https://github.com/apache/spark/pull/18975#discussion_r136726866 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala --- @@ -1509,4 +1509,86 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) { query: LogicalPlan): LogicalPlan = { RepartitionByExpression(expressions, query, conf.numShufflePartitions) } + + /** + * Return the parameters for [[InsertIntoDir]] logical plan. + * + * Expected format: + * {{{ + * INSERT OVERWRITE DIRECTORY + * [path] + * [OPTIONS table_property_list] + * select_statement; + * }}} + */ + override def visitInsertOverwriteDir( + ctx: InsertOverwriteDirContext): InsertDirParams = withOrigin(ctx) { + if (ctx.LOCAL != null) { + throw new ParseException( + "LOCAL is not supported in INSERT OVERWRITE DIRECTORY to data source", ctx) + } + + val options = Option(ctx.options).map(visitPropertyKeyValues).getOrElse(Map.empty) + var storage = DataSource.buildStorageFormatFromOptions(options) + + val path = Option(ctx.path).map(string).getOrElse("") + + if (!path.isEmpty && storage.locationUri.isDefined) { + throw new ParseException( + "Directory path and 'path' in OPTIONS are both used to indicate the directory path, " + + "you can only specify one of them.", ctx) + } + if (path.isEmpty && storage.locationUri.isEmpty) { + throw new ParseException( + "You need to specify directory path or 'path' in OPTIONS, but not both", ctx) --- End diff -- nit: `but not both` does not seem in sync with the check because both `path` and `locationUri` are not specified for the check to be true.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org