spark git commit: [SPARK-4905][STREAMING] FlumeStreamSuite fix.
Repository: spark Updated Branches: refs/heads/master 6fe70d843 - 0765af9b2 [SPARK-4905][STREAMING] FlumeStreamSuite fix. Using String constructor instead of CharsetDecoder to see if it fixes the issue of empty strings in Flume test output. Author: Hari Shreedharan hshreedha...@apache.org Closes #4371 from harishreedharan/Flume-stream-attempted-fix and squashes the following commits: 550d363 [Hari Shreedharan] Fix imports. 8695950 [Hari Shreedharan] Use Charsets.UTF_8 instead of UTF-8 in String constructors. af3ba14 [Hari Shreedharan] [SPARK-4905][STREAMING] FlumeStreamSuite fix. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0765af9b Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0765af9b Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0765af9b Branch: refs/heads/master Commit: 0765af9b21e9204c410c7a849c7201bc3eda8cc3 Parents: 6fe70d8 Author: Hari Shreedharan hshreedha...@apache.org Authored: Mon Feb 9 14:17:14 2015 -0800 Committer: Josh Rosen joshro...@databricks.com Committed: Mon Feb 9 14:17:14 2015 -0800 -- .../org/apache/spark/streaming/flume/FlumeStreamSuite.scala | 7 +++ 1 file changed, 3 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/0765af9b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala -- diff --git a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala index f333e38..322de7b 100644 --- a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala +++ b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala @@ -19,13 +19,13 @@ package org.apache.spark.streaming.flume import java.net.{InetSocketAddress, ServerSocket} import java.nio.ByteBuffer -import java.nio.charset.Charset import scala.collection.JavaConversions._ import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer} import scala.concurrent.duration._ import scala.language.postfixOps +import com.google.common.base.Charsets import org.apache.avro.ipc.NettyTransceiver import org.apache.avro.ipc.specific.SpecificRequestor import org.apache.flume.source.avro @@ -108,7 +108,7 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L val inputEvents = input.map { item = val event = new AvroFlumeEvent - event.setBody(ByteBuffer.wrap(item.getBytes(UTF-8))) + event.setBody(ByteBuffer.wrap(item.getBytes(Charsets.UTF_8))) event.setHeaders(Map[CharSequence, CharSequence](test - header)) event } @@ -138,14 +138,13 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L status should be (avro.Status.OK) } -val decoder = Charset.forName(UTF-8).newDecoder() eventually(timeout(10 seconds), interval(100 milliseconds)) { val outputEvents = outputBuffer.flatten.map { _.event } outputEvents.foreach { event = event.getHeaders.get(test) should be(header) } - val output = outputEvents.map(event = decoder.decode(event.getBody()).toString) + val output = outputEvents.map(event = new String(event.getBody.array(), Charsets.UTF_8)) output should be (input) } } - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: [SPARK-4905][STREAMING] FlumeStreamSuite fix.
Repository: spark Updated Branches: refs/heads/branch-1.3 6a0144c63 - 18c5a999b [SPARK-4905][STREAMING] FlumeStreamSuite fix. Using String constructor instead of CharsetDecoder to see if it fixes the issue of empty strings in Flume test output. Author: Hari Shreedharan hshreedha...@apache.org Closes #4371 from harishreedharan/Flume-stream-attempted-fix and squashes the following commits: 550d363 [Hari Shreedharan] Fix imports. 8695950 [Hari Shreedharan] Use Charsets.UTF_8 instead of UTF-8 in String constructors. af3ba14 [Hari Shreedharan] [SPARK-4905][STREAMING] FlumeStreamSuite fix. (cherry picked from commit 0765af9b21e9204c410c7a849c7201bc3eda8cc3) Signed-off-by: Josh Rosen joshro...@databricks.com Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/18c5a999 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/18c5a999 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/18c5a999 Branch: refs/heads/branch-1.3 Commit: 18c5a999b94b992dbb6fadf70a04b967c498353d Parents: 6a0144c Author: Hari Shreedharan hshreedha...@apache.org Authored: Mon Feb 9 14:17:14 2015 -0800 Committer: Josh Rosen joshro...@databricks.com Committed: Mon Feb 9 14:17:36 2015 -0800 -- .../org/apache/spark/streaming/flume/FlumeStreamSuite.scala | 7 +++ 1 file changed, 3 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/18c5a999/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala -- diff --git a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala index f333e38..322de7b 100644 --- a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala +++ b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala @@ -19,13 +19,13 @@ package org.apache.spark.streaming.flume import java.net.{InetSocketAddress, ServerSocket} import java.nio.ByteBuffer -import java.nio.charset.Charset import scala.collection.JavaConversions._ import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer} import scala.concurrent.duration._ import scala.language.postfixOps +import com.google.common.base.Charsets import org.apache.avro.ipc.NettyTransceiver import org.apache.avro.ipc.specific.SpecificRequestor import org.apache.flume.source.avro @@ -108,7 +108,7 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L val inputEvents = input.map { item = val event = new AvroFlumeEvent - event.setBody(ByteBuffer.wrap(item.getBytes(UTF-8))) + event.setBody(ByteBuffer.wrap(item.getBytes(Charsets.UTF_8))) event.setHeaders(Map[CharSequence, CharSequence](test - header)) event } @@ -138,14 +138,13 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L status should be (avro.Status.OK) } -val decoder = Charset.forName(UTF-8).newDecoder() eventually(timeout(10 seconds), interval(100 milliseconds)) { val outputEvents = outputBuffer.flatten.map { _.event } outputEvents.foreach { event = event.getHeaders.get(test) should be(header) } - val output = outputEvents.map(event = decoder.decode(event.getBody()).toString) + val output = outputEvents.map(event = new String(event.getBody.array(), Charsets.UTF_8)) output should be (input) } } - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: [SPARK-4905][STREAMING] FlumeStreamSuite fix.
Repository: spark Updated Branches: refs/heads/branch-1.2 97541b22e - 63eee523e [SPARK-4905][STREAMING] FlumeStreamSuite fix. Using String constructor instead of CharsetDecoder to see if it fixes the issue of empty strings in Flume test output. Author: Hari Shreedharan hshreedha...@apache.org Closes #4371 from harishreedharan/Flume-stream-attempted-fix and squashes the following commits: 550d363 [Hari Shreedharan] Fix imports. 8695950 [Hari Shreedharan] Use Charsets.UTF_8 instead of UTF-8 in String constructors. af3ba14 [Hari Shreedharan] [SPARK-4905][STREAMING] FlumeStreamSuite fix. (cherry picked from commit 0765af9b21e9204c410c7a849c7201bc3eda8cc3) Signed-off-by: Josh Rosen joshro...@databricks.com Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/63eee523 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/63eee523 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/63eee523 Branch: refs/heads/branch-1.2 Commit: 63eee523ea0b39b1db1a656a8b28828d93d7 Parents: 97541b2 Author: Hari Shreedharan hshreedha...@apache.org Authored: Mon Feb 9 14:17:14 2015 -0800 Committer: Josh Rosen joshro...@databricks.com Committed: Mon Feb 9 14:18:59 2015 -0800 -- .../org/apache/spark/streaming/flume/FlumeStreamSuite.scala | 7 +++ 1 file changed, 3 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/63eee523/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala -- diff --git a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala index f333e38..322de7b 100644 --- a/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala +++ b/external/flume/src/test/scala/org/apache/spark/streaming/flume/FlumeStreamSuite.scala @@ -19,13 +19,13 @@ package org.apache.spark.streaming.flume import java.net.{InetSocketAddress, ServerSocket} import java.nio.ByteBuffer -import java.nio.charset.Charset import scala.collection.JavaConversions._ import scala.collection.mutable.{ArrayBuffer, SynchronizedBuffer} import scala.concurrent.duration._ import scala.language.postfixOps +import com.google.common.base.Charsets import org.apache.avro.ipc.NettyTransceiver import org.apache.avro.ipc.specific.SpecificRequestor import org.apache.flume.source.avro @@ -108,7 +108,7 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L val inputEvents = input.map { item = val event = new AvroFlumeEvent - event.setBody(ByteBuffer.wrap(item.getBytes(UTF-8))) + event.setBody(ByteBuffer.wrap(item.getBytes(Charsets.UTF_8))) event.setHeaders(Map[CharSequence, CharSequence](test - header)) event } @@ -138,14 +138,13 @@ class FlumeStreamSuite extends FunSuite with BeforeAndAfter with Matchers with L status should be (avro.Status.OK) } -val decoder = Charset.forName(UTF-8).newDecoder() eventually(timeout(10 seconds), interval(100 milliseconds)) { val outputEvents = outputBuffer.flatten.map { _.event } outputEvents.foreach { event = event.getHeaders.get(test) should be(header) } - val output = outputEvents.map(event = decoder.decode(event.getBody()).toString) + val output = outputEvents.map(event = new String(event.getBody.array(), Charsets.UTF_8)) output should be (input) } } - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org