Github user tdas commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20243#discussion_r161969566
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala 
---
    @@ -280,14 +280,12 @@ final class DataStreamWriter[T] private[sql](ds: 
Dataset[T]) {
             useTempCheckpointLocation = true,
             trigger = trigger)
         } else {
    -      val sink = trigger match {
    -        case _: ContinuousTrigger =>
    -          val ds = DataSource.lookupDataSource(source, 
df.sparkSession.sessionState.conf)
    -          ds.newInstance() match {
    -            case w: ContinuousWriteSupport => w
    -            case _ => throw new AnalysisException(
    -              s"Data source $source does not support continuous writing")
    -          }
    +      val ds = DataSource.lookupDataSource(source, 
df.sparkSession.sessionState.conf)
    +      val sink = (ds.newInstance(), trigger) match {
    +        case (w: ContinuousWriteSupport, _: ContinuousTrigger) => w
    +        case (_, _: ContinuousTrigger) => throw new 
UnsupportedOperationException(
    --- End diff --
    
    AnalysisException.
    Incorrect trigger or incompatible data source is not an operation.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to