Github user superbobry commented on a diff in the pull request:

    https://github.com/apache/spark/pull/17230#discussion_r108351585
  
    --- Diff: core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala ---
    @@ -198,17 +183,114 @@ private[spark] class PipedRDD[T: ClassTag](
             val t = childThreadException.get()
             if (t != null) {
               val commandRan = command.mkString(" ")
    -          logError(s"Caught exception while running pipe() operator. 
Command ran: $commandRan. " +
    -            s"Exception: ${t.getMessage}")
    -          proc.destroy()
    +          logError("Caught exception while running pipe() operator. " +
    +              s"Command ran: $commandRan.", t)
               cleanup()
    +          proc.destroy()
               throw t
             }
           }
         }
       }
     }
     
    +/** Specifies how to write the elements of the input [[RDD]] into the 
pipe. */
    +trait InputWriter[T] extends Serializable {
    +  def write(dos: DataOutput, elem: T): Unit
    +}
    +
    +/** Specifies how to read the elements from the pipe into the output 
[[RDD]]. */
    +trait OutputReader[T] extends Serializable {
    +  /**
    +   * Reads the next element.
    +   *
    +   * The input is guaranteed to have at least one byte.
    +   */
    +  def read(dis: DataInput): T
    +}
    +
    +class TextInputWriter[I](
    +    encoding: String = Codec.defaultCharsetCodec.name,
    +    printPipeContext: (String => Unit) => Unit = null,
    +    printRDDElement: (I, String => Unit) => Unit = null
    +) extends InputWriter[I] {
    +
    +  private[this] val lineSeparator = 
System.lineSeparator().getBytes(encoding)
    +  private[this] var initialized = printPipeContext == null
    +
    +  private def writeLine(dos: DataOutput, s: String): Unit = {
    +    dos.write(s.getBytes(encoding))
    +    dos.write(lineSeparator)
    +  }
    +
    +  override def write(dos: DataOutput, elem: I): Unit = {
    +    if (!initialized) {
    +      printPipeContext(writeLine(dos, _))
    +      initialized = true
    +    }
    +
    +    if (printRDDElement == null) {
    +      writeLine(dos, String.valueOf(elem))
    +    } else {
    +      printRDDElement(elem, writeLine(dos, _))
    +    }
    +  }
    +}
    +
    +class TextOutputReader(
    +    encoding: String = Codec.defaultCharsetCodec.name
    +) extends OutputReader[String] {
    +
    +  private[this] val lf = "\n".getBytes(encoding)
    +  private[this] val cr = "\r".getBytes(encoding)
    +  private[this] val crlf = cr ++ lf
    +  private[this] var buf = Array.ofDim[Byte](64)
    +  private[this] var used = 0
    +
    +  @inline
    +  /** Checks that the suffix of [[buf]] matches [[other]]. */
    +  private def endsWith(other: Array[Byte]): Boolean = {
    +    var i = used - 1
    +    var j = other.length - 1
    +    (j <= i) && {
    +      while (j >= 0) {
    +        if (buf(i) != other(j)) {
    +          return false
    +        }
    +        i -= 1
    +        j -= 1
    +      }
    +      true
    +    }
    +  }
    +
    +  override def read(dis: DataInput): String = {
    --- End diff --
    
    I've initially had 
[`readLine`](https://docs.oracle.com/javase/7/docs/api/java/io/DataInput.html#readLine())
 here, but the problem with it is that it assumes ASCII and therefore does not 
work in the case of the general encoding. For example reading a UTF-32 encoded 
"foobar\n" would result in extra zeros in the end of the string. 
    
    I am yet to have another pass over these changes, but previous benchmarking 
suggested that the bottleneck (to my surprise) is the `String(byte[], Charset)` 
constructor. Of course there's always a possibility that the profiler is biased 
:) 
    



---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to