Repository: spark
Updated Branches:
  refs/heads/master 0128905ee -> 3cd5029be


Resolve sbt warnings during build Ⅱ

Author: witgo <wi...@qq.com>

Closes #1153 from witgo/expectResult and squashes the following commits:

97541d8 [witgo] merge master
ead26e7 [witgo] Resolve sbt warnings during build


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3cd5029b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3cd5029b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3cd5029b

Branch: refs/heads/master
Commit: 3cd5029be709307415f911236472a685e406e763
Parents: 0128905
Author: witgo <wi...@qq.com>
Authored: Tue Jul 8 00:31:42 2014 -0700
Committer: Reynold Xin <r...@apache.org>
Committed: Tue Jul 8 00:31:42 2014 -0700

----------------------------------------------------------------------
 .../org/apache/spark/AccumulatorSuite.scala     |  6 +-
 .../apache/spark/util/NextIteratorSuite.scala   | 38 +++++------
 .../apache/spark/util/SizeEstimatorSuite.scala  | 70 ++++++++++----------
 .../spark/sql/columnar/ColumnStatsSuite.scala   |  6 +-
 .../spark/sql/columnar/ColumnTypeSuite.scala    |  8 +--
 .../columnar/NullableColumnBuilderSuite.scala   | 14 ++--
 .../compression/BooleanBitSetSuite.scala        | 10 +--
 .../compression/DictionaryEncodingSuite.scala   | 10 +--
 .../compression/IntegralDeltaSuite.scala        | 16 ++---
 .../compression/RunLengthEncodingSuite.scala    | 10 +--
 10 files changed, 94 insertions(+), 94 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala 
b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
index 3aab88e..52d1d52 100644
--- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala
@@ -61,7 +61,7 @@ class AccumulatorSuite extends FunSuite with Matchers with 
LocalSparkContext {
     val acc : Accumulator[Int] = sc.accumulator(0)
 
     val d = sc.parallelize(1 to 20)
-    evaluating {d.foreach{x => acc.value = x}} should produce [Exception]
+    an [Exception] should be thrownBy {d.foreach{x => acc.value = x}}
   }
 
   test ("add value to collection accumulators") {
@@ -87,11 +87,11 @@ class AccumulatorSuite extends FunSuite with Matchers with 
LocalSparkContext {
       sc = new SparkContext("local[" + nThreads + "]", "test")
       val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new 
mutable.HashSet[Any]())
       val d = sc.parallelize(1 to maxI)
-      evaluating {
+      an [SparkException] should be thrownBy {
         d.foreach {
           x => acc.value += x
         }
-      } should produce [SparkException]
+      }
       resetSparkContext()
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala 
b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
index cf438a3..72e81f3 100644
--- a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala
@@ -27,45 +27,45 @@ import org.scalatest.Matchers
 class NextIteratorSuite extends FunSuite with Matchers {
   test("one iteration") {
     val i = new StubIterator(Buffer(1))
-    i.hasNext should be === true
-    i.next should be === 1
-    i.hasNext should be === false
+    i.hasNext should be (true)
+    i.next should be (1)
+    i.hasNext should be (false)
     intercept[NoSuchElementException] { i.next() }
   }
 
   test("two iterations") {
     val i = new StubIterator(Buffer(1, 2))
-    i.hasNext should be === true
-    i.next should be === 1
-    i.hasNext should be === true
-    i.next should be === 2
-    i.hasNext should be === false
+    i.hasNext should be (true)
+    i.next should be (1)
+    i.hasNext should be (true)
+    i.next should be (2)
+    i.hasNext should be (false)
     intercept[NoSuchElementException] { i.next() }
   }
 
   test("empty iteration") {
     val i = new StubIterator(Buffer())
-    i.hasNext should be === false
+    i.hasNext should be (false)
     intercept[NoSuchElementException] { i.next() }
   }
 
   test("close is called once for empty iterations") {
     val i = new StubIterator(Buffer())
-    i.hasNext should be === false
-    i.hasNext should be === false
-    i.closeCalled should be === 1
+    i.hasNext should be (false)
+    i.hasNext should be (false)
+    i.closeCalled should be (1)
   }
 
   test("close is called once for non-empty iterations") {
     val i = new StubIterator(Buffer(1, 2))
-    i.next should be === 1
-    i.next should be === 2
+    i.next should be (1)
+    i.next should be (2)
     // close isn't called until we check for the next element
-    i.closeCalled should be === 0
-    i.hasNext should be === false
-    i.closeCalled should be === 1
-    i.hasNext should be === false
-    i.closeCalled should be === 1
+    i.closeCalled should be (0)
+    i.hasNext should be (false)
+    i.closeCalled should be (1)
+    i.hasNext should be (false)
+    i.closeCalled should be (1)
   }
 
   class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala 
b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index b583a8b..f9d1af8 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -63,53 +63,53 @@ class SizeEstimatorSuite
   }
 
   test("simple classes") {
-    expectResult(16)(SizeEstimator.estimate(new DummyClass1))
-    expectResult(16)(SizeEstimator.estimate(new DummyClass2))
-    expectResult(24)(SizeEstimator.estimate(new DummyClass3))
-    expectResult(24)(SizeEstimator.estimate(new DummyClass4(null)))
-    expectResult(48)(SizeEstimator.estimate(new DummyClass4(new DummyClass3)))
+    assertResult(16)(SizeEstimator.estimate(new DummyClass1))
+    assertResult(16)(SizeEstimator.estimate(new DummyClass2))
+    assertResult(24)(SizeEstimator.estimate(new DummyClass3))
+    assertResult(24)(SizeEstimator.estimate(new DummyClass4(null)))
+    assertResult(48)(SizeEstimator.estimate(new DummyClass4(new DummyClass3)))
   }
 
   // NOTE: The String class definition varies across JDK versions (1.6 vs. 
1.7) and vendors
   // (Sun vs IBM). Use a DummyString class to make tests deterministic.
   test("strings") {
-    expectResult(40)(SizeEstimator.estimate(DummyString("")))
-    expectResult(48)(SizeEstimator.estimate(DummyString("a")))
-    expectResult(48)(SizeEstimator.estimate(DummyString("ab")))
-    expectResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
+    assertResult(40)(SizeEstimator.estimate(DummyString("")))
+    assertResult(48)(SizeEstimator.estimate(DummyString("a")))
+    assertResult(48)(SizeEstimator.estimate(DummyString("ab")))
+    assertResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
   }
 
   test("primitive arrays") {
-    expectResult(32)(SizeEstimator.estimate(new Array[Byte](10)))
-    expectResult(40)(SizeEstimator.estimate(new Array[Char](10)))
-    expectResult(40)(SizeEstimator.estimate(new Array[Short](10)))
-    expectResult(56)(SizeEstimator.estimate(new Array[Int](10)))
-    expectResult(96)(SizeEstimator.estimate(new Array[Long](10)))
-    expectResult(56)(SizeEstimator.estimate(new Array[Float](10)))
-    expectResult(96)(SizeEstimator.estimate(new Array[Double](10)))
-    expectResult(4016)(SizeEstimator.estimate(new Array[Int](1000)))
-    expectResult(8016)(SizeEstimator.estimate(new Array[Long](1000)))
+    assertResult(32)(SizeEstimator.estimate(new Array[Byte](10)))
+    assertResult(40)(SizeEstimator.estimate(new Array[Char](10)))
+    assertResult(40)(SizeEstimator.estimate(new Array[Short](10)))
+    assertResult(56)(SizeEstimator.estimate(new Array[Int](10)))
+    assertResult(96)(SizeEstimator.estimate(new Array[Long](10)))
+    assertResult(56)(SizeEstimator.estimate(new Array[Float](10)))
+    assertResult(96)(SizeEstimator.estimate(new Array[Double](10)))
+    assertResult(4016)(SizeEstimator.estimate(new Array[Int](1000)))
+    assertResult(8016)(SizeEstimator.estimate(new Array[Long](1000)))
   }
 
   test("object arrays") {
     // Arrays containing nulls should just have one pointer per element
-    expectResult(56)(SizeEstimator.estimate(new Array[String](10)))
-    expectResult(56)(SizeEstimator.estimate(new Array[AnyRef](10)))
+    assertResult(56)(SizeEstimator.estimate(new Array[String](10)))
+    assertResult(56)(SizeEstimator.estimate(new Array[AnyRef](10)))
     // For object arrays with non-null elements, each object should take one 
pointer plus
     // however many bytes that class takes. (Note that Array.fill calls the 
code in its
     // second parameter separately for each object, so we get distinct 
objects.)
-    expectResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass1)))
-    expectResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass2)))
-    expectResult(296)(SizeEstimator.estimate(Array.fill(10)(new DummyClass3)))
-    expectResult(56)(SizeEstimator.estimate(Array(new DummyClass1, new 
DummyClass2)))
+    assertResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass1)))
+    assertResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass2)))
+    assertResult(296)(SizeEstimator.estimate(Array.fill(10)(new DummyClass3)))
+    assertResult(56)(SizeEstimator.estimate(Array(new DummyClass1, new 
DummyClass2)))
 
     // Past size 100, our samples 100 elements, but we should still get the 
right size.
-    expectResult(28016)(SizeEstimator.estimate(Array.fill(1000)(new 
DummyClass3)))
+    assertResult(28016)(SizeEstimator.estimate(Array.fill(1000)(new 
DummyClass3)))
 
     // If an array contains the *same* element many times, we should only 
count it once.
     val d1 = new DummyClass1
-    expectResult(72)(SizeEstimator.estimate(Array.fill(10)(d1))) // 10 
pointers plus 8-byte object
-    expectResult(432)(SizeEstimator.estimate(Array.fill(100)(d1))) // 100 
pointers plus 8-byte object
+    assertResult(72)(SizeEstimator.estimate(Array.fill(10)(d1))) // 10 
pointers plus 8-byte object
+    assertResult(432)(SizeEstimator.estimate(Array.fill(100)(d1))) // 100 
pointers plus 8-byte object
 
     // Same thing with huge array containing the same element many times. Note 
that this won't
     // return exactly 4032 because it can't tell that *all* the elements will 
equal the first
@@ -127,10 +127,10 @@ class SizeEstimatorSuite
     val initialize = PrivateMethod[Unit]('initialize)
     SizeEstimator invokePrivate initialize()
 
-    expectResult(40)(SizeEstimator.estimate(DummyString("")))
-    expectResult(48)(SizeEstimator.estimate(DummyString("a")))
-    expectResult(48)(SizeEstimator.estimate(DummyString("ab")))
-    expectResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
+    assertResult(40)(SizeEstimator.estimate(DummyString("")))
+    assertResult(48)(SizeEstimator.estimate(DummyString("a")))
+    assertResult(48)(SizeEstimator.estimate(DummyString("ab")))
+    assertResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
     resetOrClear("os.arch", arch)
   }
 
@@ -142,10 +142,10 @@ class SizeEstimatorSuite
     val initialize = PrivateMethod[Unit]('initialize)
     SizeEstimator invokePrivate initialize()
 
-    expectResult(56)(SizeEstimator.estimate(DummyString("")))
-    expectResult(64)(SizeEstimator.estimate(DummyString("a")))
-    expectResult(64)(SizeEstimator.estimate(DummyString("ab")))
-    expectResult(72)(SizeEstimator.estimate(DummyString("abcdefgh")))
+    assertResult(56)(SizeEstimator.estimate(DummyString("")))
+    assertResult(64)(SizeEstimator.estimate(DummyString("a")))
+    assertResult(64)(SizeEstimator.estimate(DummyString("ab")))
+    assertResult(72)(SizeEstimator.estimate(DummyString("abcdefgh")))
 
     resetOrClear("os.arch", arch)
     resetOrClear("spark.test.useCompressedOops", oops)

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
index 78640b8..6f0d46d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnStatsSuite.scala
@@ -39,7 +39,7 @@ class ColumnStatsSuite extends FunSuite {
 
     test(s"$columnStatsName: empty") {
       val columnStats = columnStatsClass.newInstance()
-      expectResult(columnStats.initialBounds, "Wrong initial bounds") {
+      assertResult(columnStats.initialBounds, "Wrong initial bounds") {
         (columnStats.lowerBound, columnStats.upperBound)
       }
     }
@@ -54,8 +54,8 @@ class ColumnStatsSuite extends FunSuite {
       val values = rows.map(_.head.asInstanceOf[T#JvmType])
       val ordering = 
columnType.dataType.ordering.asInstanceOf[Ordering[T#JvmType]]
 
-      expectResult(values.min(ordering), "Wrong lower 
bound")(columnStats.lowerBound)
-      expectResult(values.max(ordering), "Wrong upper 
bound")(columnStats.upperBound)
+      assertResult(values.min(ordering), "Wrong lower 
bound")(columnStats.lowerBound)
+      assertResult(values.max(ordering), "Wrong upper 
bound")(columnStats.upperBound)
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
index 71be410..314b7d3 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/ColumnTypeSuite.scala
@@ -35,7 +35,7 @@ class ColumnTypeSuite extends FunSuite with Logging {
       BOOLEAN -> 1, STRING -> 8, BINARY -> 16, GENERIC -> 16)
 
     checks.foreach { case (columnType, expectedSize) =>
-      expectResult(expectedSize, s"Wrong defaultSize for $columnType") {
+      assertResult(expectedSize, s"Wrong defaultSize for $columnType") {
         columnType.defaultSize
       }
     }
@@ -47,7 +47,7 @@ class ColumnTypeSuite extends FunSuite with Logging {
         value: JvmType,
         expected: Int) {
 
-      expectResult(expected, s"Wrong actualSize for $columnType") {
+      assertResult(expected, s"Wrong actualSize for $columnType") {
         columnType.actualSize(value)
       }
     }
@@ -127,7 +127,7 @@ class ColumnTypeSuite extends FunSuite with Logging {
     val length = buffer.getInt()
     assert(length === serializedObj.length)
 
-    expectResult(obj, "Deserialized object didn't equal to the original 
object") {
+    assertResult(obj, "Deserialized object didn't equal to the original 
object") {
       val bytes = new Array[Byte](length)
       buffer.get(bytes, 0, length)
       SparkSqlSerializer.deserialize(bytes)
@@ -136,7 +136,7 @@ class ColumnTypeSuite extends FunSuite with Logging {
     buffer.rewind()
     buffer.putInt(serializedObj.length).put(serializedObj)
 
-    expectResult(obj, "Deserialized object didn't equal to the original 
object") {
+    assertResult(obj, "Deserialized object didn't equal to the original 
object") {
       buffer.rewind()
       SparkSqlSerializer.deserialize(GENERIC.extract(buffer))
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
index d9d1e1b..d889852 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/NullableColumnBuilderSuite.scala
@@ -48,8 +48,8 @@ class NullableColumnBuilderSuite extends FunSuite {
       val columnBuilder = TestNullableColumnBuilder(columnType)
       val buffer = columnBuilder.build()
 
-      expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
-      expectResult(0, "Wrong null count")(buffer.getInt())
+      assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
+      assertResult(0, "Wrong null count")(buffer.getInt())
       assert(!buffer.hasRemaining)
     }
 
@@ -63,8 +63,8 @@ class NullableColumnBuilderSuite extends FunSuite {
 
       val buffer = columnBuilder.build()
 
-      expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
-      expectResult(0, "Wrong null count")(buffer.getInt())
+      assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
+      assertResult(0, "Wrong null count")(buffer.getInt())
     }
 
     test(s"$typeName column builder: null values") {
@@ -79,11 +79,11 @@ class NullableColumnBuilderSuite extends FunSuite {
 
       val buffer = columnBuilder.build()
 
-      expectResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
-      expectResult(4, "Wrong null count")(buffer.getInt())
+      assertResult(columnType.typeId, "Wrong column type ID")(buffer.getInt())
+      assertResult(4, "Wrong null count")(buffer.getInt())
 
       // For null positions
-      (1 to 7 by 2).foreach(expectResult(_, "Wrong null 
position")(buffer.getInt()))
+      (1 to 7 by 2).foreach(assertResult(_, "Wrong null 
position")(buffer.getInt()))
 
       // For non-null values
       (0 until 4).foreach { _ =>

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
index 93259a1..5fba004 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/BooleanBitSetSuite.scala
@@ -48,18 +48,18 @@ class BooleanBitSetSuite extends FunSuite {
     }
 
     // 4 extra bytes for compression scheme type ID
-    expectResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
+    assertResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
 
     // Skips column header
     buffer.position(headerSize)
-    expectResult(BooleanBitSet.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
-    expectResult(count, "Wrong element count")(buffer.getInt())
+    assertResult(BooleanBitSet.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
+    assertResult(count, "Wrong element count")(buffer.getInt())
 
     var word = 0: Long
     for (i <- 0 until count) {
       val bit = i % BITS_PER_LONG
       word = if (bit == 0) buffer.getLong() else word
-      expectResult(values(i), s"Wrong value in compressed buffer, index=$i") {
+      assertResult(values(i), s"Wrong value in compressed buffer, index=$i") {
         (word & ((1: Long) << bit)) != 0
       }
     }
@@ -75,7 +75,7 @@ class BooleanBitSetSuite extends FunSuite {
     if (values.nonEmpty) {
       values.foreach {
         assert(decoder.hasNext)
-        expectResult(_, "Wrong decoded value")(decoder.next())
+        assertResult(_, "Wrong decoded value")(decoder.next())
       }
     }
     assert(!decoder.hasNext)

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
index 198dcd8..d8ae2a2 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/DictionaryEncodingSuite.scala
@@ -71,22 +71,22 @@ class DictionaryEncodingSuite extends FunSuite {
         // 2 bytes for each `Short`
         val compressedSize = 4 + dictionarySize + 2 * inputSeq.length
         // 4 extra bytes for compression scheme type ID
-        expectResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
+        assertResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
 
         // Skips column header
         buffer.position(headerSize)
-        expectResult(DictionaryEncoding.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
+        assertResult(DictionaryEncoding.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
 
         val dictionary = buildDictionary(buffer).toMap
 
         dictValues.foreach { i =>
-          expectResult(i, "Wrong dictionary entry") {
+          assertResult(i, "Wrong dictionary entry") {
             dictionary(values(i))
           }
         }
 
         inputSeq.foreach { i =>
-          expectResult(i.toShort, "Wrong column element 
value")(buffer.getShort())
+          assertResult(i.toShort, "Wrong column element 
value")(buffer.getShort())
         }
 
         // -------------
@@ -101,7 +101,7 @@ class DictionaryEncodingSuite extends FunSuite {
         if (inputSeq.nonEmpty) {
           inputSeq.foreach { i =>
             assert(decoder.hasNext)
-            expectResult(values(i), "Wrong decoded value")(decoder.next())
+            assertResult(values(i), "Wrong decoded value")(decoder.next())
           }
         }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
index 46af6e0..17619dc 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/IntegralDeltaSuite.scala
@@ -69,21 +69,21 @@ class IntegralDeltaSuite extends FunSuite {
       })
 
       // 4 extra bytes for compression scheme type ID
-      expectResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
+      assertResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
 
       buffer.position(headerSize)
-      expectResult(scheme.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
+      assertResult(scheme.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
 
       if (input.nonEmpty) {
-        expectResult(Byte.MinValue, "The first byte should be an escaping 
mark")(buffer.get())
-        expectResult(input.head, "The first value is 
wrong")(columnType.extract(buffer))
+        assertResult(Byte.MinValue, "The first byte should be an escaping 
mark")(buffer.get())
+        assertResult(input.head, "The first value is 
wrong")(columnType.extract(buffer))
 
         (input.tail, deltas).zipped.foreach { (value, delta) =>
           if (math.abs(delta) <= Byte.MaxValue) {
-            expectResult(delta, "Wrong delta")(buffer.get())
+            assertResult(delta, "Wrong delta")(buffer.get())
           } else {
-            expectResult(Byte.MinValue, "Expecting escaping mark 
here")(buffer.get())
-            expectResult(value, "Wrong value")(columnType.extract(buffer))
+            assertResult(Byte.MinValue, "Expecting escaping mark 
here")(buffer.get())
+            assertResult(value, "Wrong value")(columnType.extract(buffer))
           }
         }
       }
@@ -99,7 +99,7 @@ class IntegralDeltaSuite extends FunSuite {
       if (input.nonEmpty) {
         input.foreach{
           assert(decoder.hasNext)
-          expectResult(_, "Wrong decoded value")(decoder.next())
+          assertResult(_, "Wrong decoded value")(decoder.next())
         }
       }
       assert(!decoder.hasNext)

http://git-wip-us.apache.org/repos/asf/spark/blob/3cd5029b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
index d3b73ba..40115be 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/columnar/compression/RunLengthEncodingSuite.scala
@@ -61,15 +61,15 @@ class RunLengthEncodingSuite extends FunSuite {
       }.sum
 
       // 4 extra bytes for compression scheme type ID
-      expectResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
+      assertResult(headerSize + compressedSize, "Wrong buffer 
capacity")(buffer.capacity)
 
       // Skips column header
       buffer.position(headerSize)
-      expectResult(RunLengthEncoding.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
+      assertResult(RunLengthEncoding.typeId, "Wrong compression scheme 
ID")(buffer.getInt())
 
       inputRuns.foreach { case (index, run) =>
-        expectResult(values(index), "Wrong column element 
value")(columnType.extract(buffer))
-        expectResult(run, "Wrong run length")(buffer.getInt())
+        assertResult(values(index), "Wrong column element 
value")(columnType.extract(buffer))
+        assertResult(run, "Wrong run length")(buffer.getInt())
       }
 
       // -------------
@@ -84,7 +84,7 @@ class RunLengthEncodingSuite extends FunSuite {
       if (inputSeq.nonEmpty) {
         inputSeq.foreach { i =>
           assert(decoder.hasNext)
-          expectResult(values(i), "Wrong decoded value")(decoder.next())
+          assertResult(values(i), "Wrong decoded value")(decoder.next())
         }
       }
 

Reply via email to