Github user patrickbrownsync commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22855#discussion_r228759425
  
    --- Diff: 
core/src/test/scala/org/apache/spark/serializer/KryoSerializerBenchmark.scala 
---
    @@ -0,0 +1,90 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.serializer
    +
    +import scala.concurrent._
    +import scala.concurrent.ExecutionContext.Implicits.global
    +import scala.concurrent.duration._
    +
    +import org.apache.spark.{SparkConf, SparkContext}
    +import org.apache.spark.benchmark.{Benchmark, BenchmarkBase}
    +import org.apache.spark.serializer.KryoTest._
    +import org.apache.spark.util.ThreadUtils
    +
    +/**
    + * Benchmark for KryoPool vs old "pool of 1".
    + * To run this benchmark:
    + * {{{
    + *   1. without sbt:
    + *      bin/spark-submit --class <this class> --jars <spark core test jar>
    + *   2. build/sbt "core/test:runMain <this class>"
    + *   3. generate result:
    + *      SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "core/test:runMain 
<this class>"
    + *      Results will be written to 
"benchmarks/KryoSerializerBenchmark-results.txt".
    + * }}}
    + */
    +object KryoSerializerBenchmark extends BenchmarkBase {
    +
    +  var sc: SparkContext = null
    +  val N = 500
    +  override def runBenchmarkSuite(): Unit = {
    +    val name = "Benchmark KryoPool vs old\"pool of 1\" implementation"
    +    runBenchmark(name) {
    +      val benchmark = new Benchmark(name, N, 10, output = output)
    +      Seq(true, false).foreach(usePool => run(usePool, benchmark))
    +      benchmark.run()
    +    }
    +  }
    +
    +  private def run(usePool: Boolean, benchmark: Benchmark): Unit = {
    +    lazy val sc = createSparkContext(usePool)
    +
    +    benchmark.addCase(s"KryoPool:$usePool") { _ =>
    +      val futures = for (_ <- 0 until N) yield {
    +        Future {
    +          sc.parallelize(0 until 10).map(i => i + 1).count()
    +        }
    +      }
    +
    +      val future = Future.sequence(futures)
    +
    +      ThreadUtils.awaitResult(future, 10.minutes)
    +    }
    +  }
    +
    +  def createSparkContext(usePool: Boolean): SparkContext = {
    --- End diff --
    
    I'm not sure I understand the question here, this benchmark class doesn't 
inherit from `SqlBasedBenchmark` it inherits from `BenchmarkBase` which has no 
`getSparkSession` method.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to