AngersZhuuuu commented on a change in pull request #30057: URL: https://github.com/apache/spark/pull/30057#discussion_r506791318
########## File path: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/InsertIntoHadoopFsRelationCommand.scala ########## @@ -139,6 +140,34 @@ case class InsertIntoHadoopFsRelationCommand( } if (doInsertion) { + // For dynamic partition overwrite, we do not delete partition directories ahead. + // We write to staging directories and move to final partition directories after writing + // job is done. So it is ok to have outputPath try to overwrite inputpath. + if (mode == SaveMode.Overwrite && !dynamicPartitionOverwrite) { + val inputPaths = child.collect { + case scan: FileSourceScanExec => + scan.dynamicallySelectedPartitions.flatMap(_.files.map { + case fileStatus if !fileStatus.isDirectory => fileStatus.getPath.getParent + case fileStatus => fileStatus.getPath + }) + }.flatten + val finalOutputPath = + if (staticPartitions.nonEmpty && partitionColumns.length == staticPartitions.size) { + val staticPathFragment = + PartitioningUtils.getPathFragment(staticPartitions, partitionColumns) + if (customPartitionLocations.contains(staticPartitions)) { + new Path(customPartitionLocations.getOrElse(staticPartitions, staticPathFragment)) + } else { + new Path(qualifiedOutputPath, staticPathFragment) + } + } else { + outputPath + } + if (inputPaths.exists(isSubDir(_, finalOutputPath, hadoopConf))) { + throw new AnalysisException( + s"Cannot overwrite a path that is also being read from.") + } + } Review comment: If we don't care about dynamicPartitionFilter, we can just add a check rule about this issue on SparkPlan using `selectedpartition`. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org