Github user gengliangwang commented on a diff in the pull request: https://github.com/apache/spark/pull/23208#discussion_r240101369 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala --- @@ -17,52 +17,49 @@ package org.apache.spark.sql.execution.datasources.v2 -import java.util.UUID - -import scala.collection.JavaConverters._ +import java.util.{Optional, UUID} import org.apache.spark.sql.{AnalysisException, SaveMode} import org.apache.spark.sql.catalyst.analysis.{MultiInstanceRelation, NamedRelation} import org.apache.spark.sql.catalyst.expressions.{AttributeReference, Expression} import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics} import org.apache.spark.sql.catalyst.util.truncatedString -import org.apache.spark.sql.sources.DataSourceRegister import org.apache.spark.sql.sources.v2._ import org.apache.spark.sql.sources.v2.reader._ import org.apache.spark.sql.sources.v2.writer.BatchWriteSupport import org.apache.spark.sql.types.StructType /** - * A logical plan representing a data source v2 scan. + * A logical plan representing a data source v2 table. * - * @param source An instance of a [[DataSourceV2]] implementation. - * @param options The options for this scan. Used to create fresh [[BatchWriteSupport]]. - * @param userSpecifiedSchema The user-specified schema for this scan. + * @param table The table that this relation represents. + * @param options The options for this table operation. It's used to create fresh [[ScanBuilder]] + * and [[BatchWriteSupport]]. */ case class DataSourceV2Relation( - // TODO: remove `source` when we finish API refactor for write. - source: TableProvider, - table: SupportsBatchRead, + table: Table, output: Seq[AttributeReference], - options: Map[String, String], - userSpecifiedSchema: Option[StructType] = None) + // TODO: use a simple case insensitive map instead. + options: DataSourceOptions) extends LeafNode with MultiInstanceRelation with NamedRelation { - import DataSourceV2Relation._ - override def name: String = table.name() override def simpleString: String = { s"RelationV2${truncatedString(output, "[", ", ", "]")} $name" } - def newWriteSupport(): BatchWriteSupport = source.createWriteSupport(options, schema) - - def newScanBuilder(): ScanBuilder = { - val dsOptions = new DataSourceOptions(options.asJava) - table.newScanBuilder(dsOptions) + def newWriteSupport(inputSchema: StructType, mode: SaveMode): Optional[BatchWriteSupport] = { --- End diff -- Nit: add comment for the method. Especially when it will return None. Although it is explained in `SupportsBatchWrite.createBatchWriteSupport`
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org