Github user steveloughran commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19269#discussion_r142004778
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2Command.scala
 ---
    @@ -0,0 +1,113 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.sql.execution.datasources.v2
    +
    +import org.apache.spark.{SparkException, TaskContext}
    +import org.apache.spark.internal.Logging
    +import org.apache.spark.sql.{Row, SparkSession}
    +import org.apache.spark.sql.catalyst.InternalRow
    +import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, 
RowEncoder}
    +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
    +import org.apache.spark.sql.execution.SparkPlan
    +import org.apache.spark.sql.execution.command.RunnableCommand
    +import org.apache.spark.sql.sources.v2.writer._
    +import org.apache.spark.sql.types.StructType
    +import org.apache.spark.util.Utils
    +
    +case class WriteToDataSourceV2Command(writer: DataSourceV2Writer, query: 
LogicalPlan)
    +  extends RunnableCommand {
    +  override def children: Seq[LogicalPlan] = query :: Nil
    +
    +  override def run(sparkSession: SparkSession, children: Seq[SparkPlan]): 
Seq[Row] = {
    +    assert(children.length == 1)
    +
    +    val writeTask = writer match {
    +      case w: SupportsWriteInternalRow => 
w.createInternalRowWriterFactory()
    +      case _ => new 
RowToInternalRowDataWriteFactory(writer.createWriterFactory(), query.schema)
    +    }
    +
    +    val rdd = children.head.execute()
    +    val messages = new Array[WriterCommitMessage](rdd.partitions.length)
    +
    +    logInfo(s"Start processing data source writer: $writer")
    +
    +    try {
    +      sparkSession.sparkContext.runJob(
    +        rdd,
    +        (context: TaskContext, iter: Iterator[InternalRow]) =>
    +          DataWritingSparkTask.run(writeTask, context, iter),
    +        rdd.partitions.indices,
    +        (index, message: WriterCommitMessage) => messages(index) = message
    +      )
    +
    +      writer.commit(messages)
    +      logInfo(s"Data source writer $writer committed.")
    +    } catch {
    +      case cause: Throwable =>
    +        writer.abort()
    --- End diff --
    
    this may raise an exception too...better to use 
`Utils.tryWithSafeFinallyAndFailureCallbacks()`


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to