spark git commit: [SPARK-5339][BUILD] build/mvn doesn't work because of invalid URL for maven's tgz.
Repository: spark Updated Branches: refs/heads/master 142093179 - c094c7327 [SPARK-5339][BUILD] build/mvn doesn't work because of invalid URL for maven's tgz. build/mvn will automatically download tarball of maven. But currently, the URL is invalid. Author: Kousuke Saruta saru...@oss.nttdata.co.jp Closes #4124 from sarutak/SPARK-5339 and squashes the following commits: 6e96121 [Kousuke Saruta] Merge branch 'master' of git://git.apache.org/spark into SPARK-5339 0e012d1 [Kousuke Saruta] Updated Maven version to 3.2.5 ca26499 [Kousuke Saruta] Fixed URL of the tarball of Maven Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/c094c732 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/c094c732 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/c094c732 Branch: refs/heads/master Commit: c094c73270837602b875b18998133a2364a89e45 Parents: 1420931 Author: Kousuke Saruta saru...@oss.nttdata.co.jp Authored: Mon Jan 26 13:07:49 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 13:07:49 2015 -0800 -- build/mvn | 8 1 file changed, 4 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/c094c732/build/mvn -- diff --git a/build/mvn b/build/mvn index 43471f8..f91e2b4 100755 --- a/build/mvn +++ b/build/mvn @@ -68,10 +68,10 @@ install_app() { # Install maven under the build/ folder install_mvn() { install_app \ -http://apache.claz.org/maven/maven-3/3.2.3/binaries; \ -apache-maven-3.2.3-bin.tar.gz \ -apache-maven-3.2.3/bin/mvn - MVN_BIN=${_DIR}/apache-maven-3.2.3/bin/mvn +http://archive.apache.org/dist/maven/maven-3/3.2.5/binaries; \ +apache-maven-3.2.5-bin.tar.gz \ +apache-maven-3.2.5/bin/mvn + MVN_BIN=${_DIR}/apache-maven-3.2.5/bin/mvn } # Install zinc under the build/ folder - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap
Repository: spark Updated Branches: refs/heads/master 81251682e - 142093179 [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap j.u.c.ConcurrentHashMap is more battle tested. cc rxin JoshRosen pwendell Author: Davies Liu dav...@databricks.com Closes #4208 from davies/safe-conf and squashes the following commits: c2182dc [Davies Liu] address comments, fix tests 3a1d821 [Davies Liu] fix test da14ced [Davies Liu] Merge branch 'master' of github.com:apache/spark into safe-conf ae4d305 [Davies Liu] change to j.u.c.ConcurrentMap f8fa1cf [Davies Liu] change to TrieMap a1d769a [Davies Liu] make SparkConf thread-safe Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/14209317 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/14209317 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/14209317 Branch: refs/heads/master Commit: 142093179a4c40bdd90744191034de7b94a963ff Parents: 8125168 Author: Davies Liu dav...@databricks.com Authored: Mon Jan 26 12:51:32 2015 -0800 Committer: Reynold Xin r...@databricks.com Committed: Mon Jan 26 12:51:32 2015 -0800 -- .../main/scala/org/apache/spark/SparkConf.scala | 38 ++-- .../deploy/worker/WorkerArgumentsTest.scala | 4 +-- .../apache/spark/storage/LocalDirsSuite.scala | 2 +- 3 files changed, 23 insertions(+), 21 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/14209317/core/src/main/scala/org/apache/spark/SparkConf.scala -- diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index f9d4aa4..cd91c8f 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -17,9 +17,11 @@ package org.apache.spark +import java.util.concurrent.ConcurrentHashMap + import scala.collection.JavaConverters._ -import scala.collection.concurrent.TrieMap -import scala.collection.mutable.{HashMap, LinkedHashSet} +import scala.collection.mutable.LinkedHashSet + import org.apache.spark.serializer.KryoSerializer /** @@ -47,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Create a SparkConf that loads defaults from system properties and the classpath */ def this() = this(true) - private[spark] val settings = new TrieMap[String, String]() + private val settings = new ConcurrentHashMap[String, String]() if (loadDefaults) { // Load any spark.* system properties for ((k, v) - System.getProperties.asScala if k.startsWith(spark.)) { - settings(k) = v + set(k, v) } } @@ -64,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { if (value == null) { throw new NullPointerException(null value for + key) } -settings(key) = value +settings.put(key, value) this } @@ -130,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Set multiple parameters together */ def setAll(settings: Traversable[(String, String)]) = { -this.settings ++= settings +this.settings.putAll(settings.toMap.asJava) this } /** Set a parameter if it isn't already configured */ def setIfMissing(key: String, value: String): SparkConf = { -if (!settings.contains(key)) { - settings(key) = value -} +settings.putIfAbsent(key, value) this } @@ -164,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Get a parameter; throws a NoSuchElementException if it's not set */ def get(key: String): String = { -settings.getOrElse(key, throw new NoSuchElementException(key)) +getOption(key).getOrElse(throw new NoSuchElementException(key)) } /** Get a parameter, falling back to a default if not set */ def get(key: String, defaultValue: String): String = { -settings.getOrElse(key, defaultValue) +getOption(key).getOrElse(defaultValue) } /** Get a parameter as an Option */ def getOption(key: String): Option[String] = { -settings.get(key) +Option(settings.get(key)) } /** Get all parameters as a list of pairs */ - def getAll: Array[(String, String)] = settings.toArray + def getAll: Array[(String, String)] = { +settings.entrySet().asScala.map(x = (x.getKey, x.getValue)).toArray + } /** Get a parameter as an integer, falling back to a default if not set */ def getInt(key: String, defaultValue: Int): Int = { @@ -225,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { def getAppId: String = get(spark.app.id) /** Does the configuration contain a given parameter? */ - def contains(key: String):
spark git commit: [SPARK-5119] java.lang.ArrayIndexOutOfBoundsException on trying to train...
Repository: spark Updated Branches: refs/heads/master 661e0fca5 - f2ba5c6fc [SPARK-5119] java.lang.ArrayIndexOutOfBoundsException on trying to train... ... decision tree model Labels loaded from libsvm files are mapped to 0.0 if they are negative labels because they should be nonnegative value. Author: lewuathe lewua...@me.com Closes #3975 from Lewuathe/map-negative-label-to-positive and squashes the following commits: 12d1d59 [lewuathe] [SPARK-5119] Fix code styles 6d9a18a [lewuathe] [SPARK-5119] Organize test codes 62a150c [lewuathe] [SPARK-5119] Modify Impurities throw exceptions with negatie labels 3336c21 [lewuathe] [SPARK-5119] java.lang.ArrayIndexOutOfBoundsException on trying to train decision tree model Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f2ba5c6f Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f2ba5c6f Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f2ba5c6f Branch: refs/heads/master Commit: f2ba5c6fc3dde81a4d234c75dae2d4e3b46512d1 Parents: 661e0fc Author: lewuathe lewua...@me.com Authored: Mon Jan 26 18:03:21 2015 -0800 Committer: Xiangrui Meng m...@databricks.com Committed: Mon Jan 26 18:03:21 2015 -0800 -- .../spark/mllib/tree/impurity/Entropy.scala | 5 +++ .../apache/spark/mllib/tree/impurity/Gini.scala | 5 +++ .../apache/spark/mllib/tree/ImpuritySuite.scala | 42 3 files changed, 52 insertions(+) -- http://git-wip-us.apache.org/repos/asf/spark/blob/f2ba5c6f/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala -- diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala index 0e02345..b7950e0 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Entropy.scala @@ -94,6 +94,10 @@ private[tree] class EntropyAggregator(numClasses: Int) throw new IllegalArgumentException(sEntropyAggregator given label $label + s but requires label numClasses (= $statsSize).) } +if (label 0) { + throw new IllegalArgumentException(sEntropyAggregator given label $label + +sbut requires label is non-negative.) +} allStats(offset + label.toInt) += instanceWeight } @@ -147,6 +151,7 @@ private[tree] class EntropyCalculator(stats: Array[Double]) extends ImpurityCalc val lbl = label.toInt require(lbl stats.length, sEntropyCalculator.prob given invalid label: $lbl (should be ${stats.length}) +require(lbl = 0, Entropy does not support negative labels) val cnt = count if (cnt == 0) { 0 http://git-wip-us.apache.org/repos/asf/spark/blob/f2ba5c6f/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala -- diff --git a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala index 7c83cd4..c946db9 100644 --- a/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala +++ b/mllib/src/main/scala/org/apache/spark/mllib/tree/impurity/Gini.scala @@ -90,6 +90,10 @@ private[tree] class GiniAggregator(numClasses: Int) throw new IllegalArgumentException(sGiniAggregator given label $label + s but requires label numClasses (= $statsSize).) } +if (label 0) { + throw new IllegalArgumentException(sGiniAggregator given label $label + +sbut requires label is non-negative.) +} allStats(offset + label.toInt) += instanceWeight } @@ -143,6 +147,7 @@ private[tree] class GiniCalculator(stats: Array[Double]) extends ImpurityCalcula val lbl = label.toInt require(lbl stats.length, sGiniCalculator.prob given invalid label: $lbl (should be ${stats.length}) +require(lbl = 0, GiniImpurity does not support negative labels) val cnt = count if (cnt == 0) { 0 http://git-wip-us.apache.org/repos/asf/spark/blob/f2ba5c6f/mllib/src/test/scala/org/apache/spark/mllib/tree/ImpuritySuite.scala -- diff --git a/mllib/src/test/scala/org/apache/spark/mllib/tree/ImpuritySuite.scala b/mllib/src/test/scala/org/apache/spark/mllib/tree/ImpuritySuite.scala new file mode 100644 index 000..92b4985 --- /dev/null +++ b/mllib/src/test/scala/org/apache/spark/mllib/tree/ImpuritySuite.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional
spark git commit: [SPARK-5052] Add common/base classes to fix guava methods signatures.
Repository: spark Updated Branches: refs/heads/master 0497ea51a - 661e0fca5 [SPARK-5052] Add common/base classes to fix guava methods signatures. Fixes problems with incorrect method signatures related to shaded classes. For discussion see the jira issue. Author: Elmer Garduno elm...@google.com Closes #3874 from elmer-garduno/fix_guava_signatures and squashes the following commits: aa5d8e0 [Elmer Garduno] Unshade common/base[Function|Supplier] classes to fix guava methods signatures. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/661e0fca Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/661e0fca Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/661e0fca Branch: refs/heads/master Commit: 661e0fca5d5d86efab5fb26da600ac2ac96b09ec Parents: 0497ea5 Author: Elmer Garduno elm...@google.com Authored: Mon Jan 26 17:40:48 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 17:40:48 2015 -0800 -- assembly/pom.xml | 2 ++ core/pom.xml | 2 ++ 2 files changed, 4 insertions(+) -- http://git-wip-us.apache.org/repos/asf/spark/blob/661e0fca/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index b2a9d07..594fa0c 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -142,8 +142,10 @@ /includes excludes excludecom/google/common/base/Absent*/exclude +excludecom/google/common/base/Function/exclude excludecom/google/common/base/Optional*/exclude excludecom/google/common/base/Present*/exclude +excludecom/google/common/base/Supplier/exclude /excludes /relocation /relocations http://git-wip-us.apache.org/repos/asf/spark/blob/661e0fca/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index d9a49c9..1984682 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -372,8 +372,10 @@ artifactcom.google.guava:guava/artifact includes includecom/google/common/base/Absent*/include +includecom/google/common/base/Function/include includecom/google/common/base/Optional*/include includecom/google/common/base/Present*/include +includecom/google/common/base/Supplier/include /includes /filter /filters - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
[1/2] spark git commit: Preparing development version 1.2.2-SNAPSHOT
Repository: spark Updated Branches: refs/heads/branch-1.2 07c0fd195 - adfed7086 Preparing development version 1.2.2-SNAPSHOT Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/adfed708 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/adfed708 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/adfed708 Branch: refs/heads/branch-1.2 Commit: adfed7086f10fa8db4eeac7996c84cf98f625e9a Parents: e87eb2b Author: Ubuntu ubu...@ip-172-31-24-218.us-west-2.compute.internal Authored: Tue Jan 27 00:12:04 2015 + Committer: Ubuntu ubu...@ip-172-31-24-218.us-west-2.compute.internal Committed: Tue Jan 27 00:12:04 2015 + -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index d731003..6889a6c 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index 8374612..f785cf6 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index ceeabd6..9e202f3 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index c7ad4dc..df6975f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index e0b3eae..0002bf2 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/adfed708/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index f9559c1..e783d39 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId
[2/2] spark git commit: Preparing Spark release v1.2.1-rc1
Preparing Spark release v1.2.1-rc1 Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e87eb2b4 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e87eb2b4 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e87eb2b4 Branch: refs/heads/branch-1.2 Commit: e87eb2b42f137c22194cfbca2abf06fecdf943da Parents: 07c0fd1 Author: Ubuntu ubu...@ip-172-31-24-218.us-west-2.compute.internal Authored: Tue Jan 27 00:12:04 2015 + Committer: Ubuntu ubu...@ip-172-31-24-218.us-west-2.compute.internal Committed: Tue Jan 27 00:12:04 2015 + -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index 65e3ddf..d731003 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index 4ead7aa..8374612 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index 155b4c9..ceeabd6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index f5a7ed2..c7ad4dc 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index fe1c8fb..e0b3eae 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e87eb2b4/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index da4bd70..f9559c1 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version
Git Push Summary
Repository: spark Updated Tags: refs/tags/v1.2.1-rc1 [created] e87eb2b42 - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
[2/2] spark git commit: Revert Preparing Spark release v1.2.1-rc1
Revert Preparing Spark release v1.2.1-rc1 This reverts commit e87eb2b42f137c22194cfbca2abf06fecdf943da. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8c461005 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8c461005 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8c461005 Branch: refs/heads/branch-1.2 Commit: 8c46100525b55d3efab9af506e341c3aabdcef2c Parents: e8da342 Author: Patrick Wendell patr...@databricks.com Authored: Mon Jan 26 17:06:22 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 17:06:22 2015 -0800 -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index d731003..65e3ddf 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.1-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index 8374612..4ead7aa 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.1-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index ceeabd6..155b4c9 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.1-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index c7ad4dc..f5a7ed2 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.1-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index e0b3eae..fe1c8fb 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.1-SNAPSHOT/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/8c461005/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index f9559c1..da4bd70 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +
Git Push Summary
Repository: spark Updated Tags: refs/tags/v1.2.1-rc1 [deleted] e87eb2b42 - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
[1/2] spark git commit: Revert Preparing development version 1.2.2-SNAPSHOT
Repository: spark Updated Branches: refs/heads/branch-1.2 adfed7086 - 8c4610052 Revert Preparing development version 1.2.2-SNAPSHOT This reverts commit adfed7086f10fa8db4eeac7996c84cf98f625e9a. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e8da342e Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e8da342e Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e8da342e Branch: refs/heads/branch-1.2 Commit: e8da342ecef307664d37c1dc47d8cb1ab9c52804 Parents: adfed70 Author: Patrick Wendell patr...@databricks.com Authored: Mon Jan 26 17:06:19 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 17:06:19 2015 -0800 -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index 6889a6c..d731003 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.2-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index f785cf6..8374612 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.2-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index 9e202f3..ceeabd6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.2-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index df6975f..c7ad4dc 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.2-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index 0002bf2..e0b3eae 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.2-SNAPSHOT/version +version1.2.1/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/e8da342e/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index e783d39..f9559c1 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent
[1/2] spark git commit: Preparing development version 1.2.2-SNAPSHOT
Repository: spark Updated Branches: refs/heads/branch-1.2 8c4610052 - f53a4319b Preparing development version 1.2.2-SNAPSHOT Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f53a4319 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f53a4319 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f53a4319 Branch: refs/heads/branch-1.2 Commit: f53a4319ba5f0843c077e64ae5a41e2fac835a5b Parents: 3e2d7d3 Author: Patrick Wendell patr...@databricks.com Authored: Tue Jan 27 01:07:29 2015 + Committer: Patrick Wendell patr...@databricks.com Committed: Tue Jan 27 01:07:29 2015 + -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index d731003..6889a6c 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index 8374612..f785cf6 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index ceeabd6..9e202f3 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index c7ad4dc..df6975f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index e0b3eae..0002bf2 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +version1.2.2-SNAPSHOT/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/f53a4319/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index f9559c1..e783d39 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1/version +
[2/2] spark git commit: Preparing Spark release v1.2.1-rc1
Preparing Spark release v1.2.1-rc1 Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3e2d7d31 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3e2d7d31 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3e2d7d31 Branch: refs/heads/branch-1.2 Commit: 3e2d7d310b76c293b9ac787f204e6880f508f6ec Parents: 8c46100 Author: Patrick Wendell patr...@databricks.com Authored: Tue Jan 27 01:07:29 2015 + Committer: Patrick Wendell patr...@databricks.com Committed: Tue Jan 27 01:07:29 2015 + -- assembly/pom.xml | 2 +- bagel/pom.xml | 2 +- core/pom.xml | 2 +- examples/pom.xml | 2 +- external/flume-sink/pom.xml | 2 +- external/flume/pom.xml| 2 +- external/kafka/pom.xml| 2 +- external/mqtt/pom.xml | 2 +- external/twitter/pom.xml | 2 +- external/zeromq/pom.xml | 2 +- extras/java8-tests/pom.xml| 2 +- extras/kinesis-asl/pom.xml| 2 +- extras/spark-ganglia-lgpl/pom.xml | 2 +- graphx/pom.xml| 2 +- mllib/pom.xml | 2 +- network/common/pom.xml| 2 +- network/shuffle/pom.xml | 2 +- network/yarn/pom.xml | 2 +- pom.xml | 2 +- repl/pom.xml | 2 +- sql/catalyst/pom.xml | 2 +- sql/core/pom.xml | 2 +- sql/hive-thriftserver/pom.xml | 2 +- sql/hive/pom.xml | 2 +- streaming/pom.xml | 2 +- tools/pom.xml | 2 +- yarn/alpha/pom.xml| 2 +- yarn/pom.xml | 2 +- yarn/stable/pom.xml | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/assembly/pom.xml -- diff --git a/assembly/pom.xml b/assembly/pom.xml index 65e3ddf..d731003 100644 --- a/assembly/pom.xml +++ b/assembly/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/bagel/pom.xml -- diff --git a/bagel/pom.xml b/bagel/pom.xml index 4ead7aa..8374612 100644 --- a/bagel/pom.xml +++ b/bagel/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/core/pom.xml -- diff --git a/core/pom.xml b/core/pom.xml index 155b4c9..ceeabd6 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/examples/pom.xml -- diff --git a/examples/pom.xml b/examples/pom.xml index f5a7ed2..c7ad4dc 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/external/flume-sink/pom.xml -- diff --git a/external/flume-sink/pom.xml b/external/flume-sink/pom.xml index fe1c8fb..e0b3eae 100644 --- a/external/flume-sink/pom.xml +++ b/external/flume-sink/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../../pom.xml/relativePath /parent http://git-wip-us.apache.org/repos/asf/spark/blob/3e2d7d31/external/flume/pom.xml -- diff --git a/external/flume/pom.xml b/external/flume/pom.xml index da4bd70..f9559c1 100644 --- a/external/flume/pom.xml +++ b/external/flume/pom.xml @@ -21,7 +21,7 @@ parent groupIdorg.apache.spark/groupId artifactIdspark-parent/artifactId -version1.2.1-SNAPSHOT/version +version1.2.1/version relativePath../../pom.xml/relativePath /parent
Git Push Summary
Repository: spark Updated Tags: refs/tags/v1.2.1-rc1 [created] 3e2d7d310 - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: SPARK-4147 [CORE] Reduce log4j dependency
Repository: spark Updated Branches: refs/heads/master c094c7327 - 54e7b456d SPARK-4147 [CORE] Reduce log4j dependency Defer use of log4j class until it's known that log4j 1.2 is being used. This may avoid dealing with log4j dependencies for callers that reroute slf4j to another logging framework. The only change is to push one half of the check in the original `if` condition inside. This is a trivial change, may or may not actually solve a problem, but I think it's all that makes sense to do for SPARK-4147. Author: Sean Owen so...@cloudera.com Closes #4190 from srowen/SPARK-4147 and squashes the following commits: 4e99942 [Sean Owen] Defer use of log4j class until it's known that log4j 1.2 is being used. This may avoid dealing with log4j dependencies for callers that reroute slf4j to another logging framework. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/54e7b456 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/54e7b456 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/54e7b456 Branch: refs/heads/master Commit: 54e7b456dd56c9e52132154e699abca87563465b Parents: c094c73 Author: Sean Owen so...@cloudera.com Authored: Mon Jan 26 14:23:42 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 14:23:42 2015 -0800 -- .../main/scala/org/apache/spark/Logging.scala | 20 +++- 1 file changed, 11 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/54e7b456/core/src/main/scala/org/apache/spark/Logging.scala -- diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index d4f2624..419d093 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -118,15 +118,17 @@ trait Logging { // org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently // org.apache.logging.slf4j.Log4jLoggerFactory val usingLog4j12 = org.slf4j.impl.Log4jLoggerFactory.equals(binderClass) -val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements -if (!log4j12Initialized usingLog4j12) { - val defaultLogProps = org/apache/spark/log4j-defaults.properties - Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { -case Some(url) = - PropertyConfigurator.configure(url) - System.err.println(sUsing Spark's default log4j profile: $defaultLogProps) -case None = - System.err.println(sSpark was unable to load $defaultLogProps) +if (usingLog4j12) { + val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements + if (!log4j12Initialized) { +val defaultLogProps = org/apache/spark/log4j-defaults.properties +Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { + case Some(url) = +PropertyConfigurator.configure(url) +System.err.println(sUsing Spark's default log4j profile: $defaultLogProps) + case None = +System.err.println(sSpark was unable to load $defaultLogProps) +} } } Logging.initialized = true - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: SPARK-4147 [CORE] Reduce log4j dependency
Repository: spark Updated Branches: refs/heads/branch-1.2 ef6fe84dc - b378e9a74 SPARK-4147 [CORE] Reduce log4j dependency Defer use of log4j class until it's known that log4j 1.2 is being used. This may avoid dealing with log4j dependencies for callers that reroute slf4j to another logging framework. The only change is to push one half of the check in the original `if` condition inside. This is a trivial change, may or may not actually solve a problem, but I think it's all that makes sense to do for SPARK-4147. Author: Sean Owen so...@cloudera.com Closes #4190 from srowen/SPARK-4147 and squashes the following commits: 4e99942 [Sean Owen] Defer use of log4j class until it's known that log4j 1.2 is being used. This may avoid dealing with log4j dependencies for callers that reroute slf4j to another logging framework. (cherry picked from commit 54e7b456dd56c9e52132154e699abca87563465b) Signed-off-by: Patrick Wendell patr...@databricks.com Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b378e9a7 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b378e9a7 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b378e9a7 Branch: refs/heads/branch-1.2 Commit: b378e9a747ab8b13f49b4fc2b6c1df6fa43cc2a2 Parents: ef6fe84 Author: Sean Owen so...@cloudera.com Authored: Mon Jan 26 14:23:42 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 14:23:56 2015 -0800 -- .../main/scala/org/apache/spark/Logging.scala | 20 +++- 1 file changed, 11 insertions(+), 9 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/b378e9a7/core/src/main/scala/org/apache/spark/Logging.scala -- diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index d4f2624..419d093 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -118,15 +118,17 @@ trait Logging { // org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently // org.apache.logging.slf4j.Log4jLoggerFactory val usingLog4j12 = org.slf4j.impl.Log4jLoggerFactory.equals(binderClass) -val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements -if (!log4j12Initialized usingLog4j12) { - val defaultLogProps = org/apache/spark/log4j-defaults.properties - Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { -case Some(url) = - PropertyConfigurator.configure(url) - System.err.println(sUsing Spark's default log4j profile: $defaultLogProps) -case None = - System.err.println(sSpark was unable to load $defaultLogProps) +if (usingLog4j12) { + val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements + if (!log4j12Initialized) { +val defaultLogProps = org/apache/spark/log4j-defaults.properties +Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { + case Some(url) = +PropertyConfigurator.configure(url) +System.err.println(sUsing Spark's default log4j profile: $defaultLogProps) + case None = +System.err.println(sSpark was unable to load $defaultLogProps) +} } } Logging.initialized = true - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: Fix command spaces issue in make-distribution.sh
Repository: spark Updated Branches: refs/heads/master 54e7b456d - b38034e87 Fix command spaces issue in make-distribution.sh Storing command in variables is tricky in bash, use an array to handle all issues with spaces, quoting, etc. See: http://mywiki.wooledge.org/BashFAQ/050 Author: David Y. Ross dyr...@gmail.com Closes #4126 from dyross/dyr-fix-make-distribution and squashes the following commits: 4ce522b [David Y. Ross] Fix command spaces issue in make-distribution.sh Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b38034e8 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b38034e8 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b38034e8 Branch: refs/heads/master Commit: b38034e878546a12c6d52f17fc961fd1a2453b97 Parents: 54e7b45 Author: David Y. Ross dyr...@gmail.com Authored: Mon Jan 26 14:26:10 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 14:26:10 2015 -0800 -- make-distribution.sh | 11 +++ 1 file changed, 7 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/b38034e8/make-distribution.sh -- diff --git a/make-distribution.sh b/make-distribution.sh index 4e2f400..0adca78 100755 --- a/make-distribution.sh +++ b/make-distribution.sh @@ -115,7 +115,7 @@ if which git /dev/null; then unset GITREV fi -if ! which $MVN /dev/null; then +if ! which $MVN /dev/null; then echo -e Could not locate Maven command: '$MVN'. echo -e Specify the Maven command with the --mvn flag exit -1; @@ -171,13 +171,16 @@ cd $SPARK_HOME export MAVEN_OPTS=-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m -BUILD_COMMAND=$MVN clean package -DskipTests $@ +# Store the command as an array because $MVN variable might have spaces in it. +# Normal quoting tricks don't work. +# See: http://mywiki.wooledge.org/BashFAQ/050 +BUILD_COMMAND=($MVN clean package -DskipTests $@) # Actually build the jar echo -e \nBuilding with... -echo -e \$ $BUILD_COMMAND\n +echo -e \$ ${BUILD_COMMAND[@]}\n -${BUILD_COMMAND} +${BUILD_COMMAND[@]} # Make directories rm -rf $DISTDIR - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: SPARK-960 [CORE] [TEST] JobCancellationSuite two jobs sharing the same stage is broken
Repository: spark Updated Branches: refs/heads/master b38034e87 - 0497ea51a SPARK-960 [CORE] [TEST] JobCancellationSuite two jobs sharing the same stage is broken This reenables and fixes this test, after addressing two issues: - The Semaphore that was intended to be shared locally was being serialized and copied; it's now a static member in the companion object as in other tests - Later changes to Spark means that cancelling the first task will not cancel the shared stage and therefore the second task should succeed Author: Sean Owen so...@cloudera.com Closes #4180 from srowen/SPARK-960 and squashes the following commits: 43da66f [Sean Owen] Fix 'two jobs sharing the same stage' test and reenable it: truly share a Semaphore locally as intended, and update expectation of failure in non-cancelled task Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0497ea51 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0497ea51 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0497ea51 Branch: refs/heads/master Commit: 0497ea51ac345f8057d222a18dbbf8eae78f5b92 Parents: b38034e Author: Sean Owen so...@cloudera.com Authored: Mon Jan 26 14:32:27 2015 -0800 Committer: Josh Rosen joshro...@databricks.com Committed: Mon Jan 26 14:32:27 2015 -0800 -- .../org/apache/spark/JobCancellationSuite.scala| 17 + 1 file changed, 9 insertions(+), 8 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/0497ea51/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala -- diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala index 7584ae7..21487bc 100644 --- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala +++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala @@ -171,11 +171,11 @@ class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter assert(jobB.get() === 100) } - ignore(two jobs sharing the same stage) { + test(two jobs sharing the same stage) { // sem1: make sure cancel is issued after some tasks are launched -// sem2: make sure the first stage is not finished until cancel is issued +// twoJobsSharingStageSemaphore: +// make sure the first stage is not finished until cancel is issued val sem1 = new Semaphore(0) -val sem2 = new Semaphore(0) sc = new SparkContext(local[2], test) sc.addSparkListener(new SparkListener { @@ -186,7 +186,7 @@ class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter // Create two actions that would share the some stages. val rdd = sc.parallelize(1 to 10, 2).map { i = - sem2.acquire() + JobCancellationSuite.twoJobsSharingStageSemaphore.acquire() (i, i) }.reduceByKey(_+_) val f1 = rdd.collectAsync() @@ -196,13 +196,13 @@ class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter future { sem1.acquire() f1.cancel() - sem2.release(10) + JobCancellationSuite.twoJobsSharingStageSemaphore.release(10) } -// Expect both to fail now. -// TODO: update this test when we change Spark so cancelling f1 wouldn't affect f2. +// Expect f1 to fail due to cancellation, intercept[SparkException] { f1.get() } -intercept[SparkException] { f2.get() } +// but f2 should not be affected +f2.get() } def testCount() { @@ -268,4 +268,5 @@ class JobCancellationSuite extends FunSuite with Matchers with BeforeAndAfter object JobCancellationSuite { val taskStartedSemaphore = new Semaphore(0) val taskCancelledSemaphore = new Semaphore(0) + val twoJobsSharingStageSemaphore = new Semaphore(0) } - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: Updating versions for Spark 1.2.1
Repository: spark Updated Branches: refs/heads/branch-1.2 b378e9a74 - 07c0fd195 Updating versions for Spark 1.2.1 Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/07c0fd19 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/07c0fd19 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/07c0fd19 Branch: refs/heads/branch-1.2 Commit: 07c0fd195eaddf57b3fcfffbde2a5817dff967bf Parents: b378e9a Author: Patrick Wendell patr...@databricks.com Authored: Mon Jan 26 16:09:22 2015 -0800 Committer: Patrick Wendell patr...@databricks.com Committed: Mon Jan 26 16:09:22 2015 -0800 -- core/src/main/scala/org/apache/spark/package.scala | 2 +- docs/_config.yml | 4 ++-- ec2/spark_ec2.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/07c0fd19/core/src/main/scala/org/apache/spark/package.scala -- diff --git a/core/src/main/scala/org/apache/spark/package.scala b/core/src/main/scala/org/apache/spark/package.scala index 873ec3c..9bb5f5e 100644 --- a/core/src/main/scala/org/apache/spark/package.scala +++ b/core/src/main/scala/org/apache/spark/package.scala @@ -44,5 +44,5 @@ package org.apache package object spark { // For package docs only - val SPARK_VERSION = 1.2.0 + val SPARK_VERSION = 1.2.1 } http://git-wip-us.apache.org/repos/asf/spark/blob/07c0fd19/docs/_config.yml -- diff --git a/docs/_config.yml b/docs/_config.yml index 5724be8..a6c176c 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -13,8 +13,8 @@ include: # These allow the documentation to be updated with newer releases # of Spark, Scala, and Mesos. -SPARK_VERSION: 1.2.0 -SPARK_VERSION_SHORT: 1.2.0 +SPARK_VERSION: 1.2.1 +SPARK_VERSION_SHORT: 1.2.1 SCALA_BINARY_VERSION: 2.10 SCALA_VERSION: 2.10.4 MESOS_VERSION: 0.18.1 http://git-wip-us.apache.org/repos/asf/spark/blob/07c0fd19/ec2/spark_ec2.py -- diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py index 815186e..a137f89 100755 --- a/ec2/spark_ec2.py +++ b/ec2/spark_ec2.py @@ -39,7 +39,7 @@ import boto from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType, EBSBlockDeviceType from boto import ec2 -DEFAULT_SPARK_VERSION = 1.2.0 +DEFAULT_SPARK_VERSION = 1.2.1 SPARK_EC2_DIR = os.path.dirname(os.path.realpath(__file__)) MESOS_SPARK_EC2_BRANCH = v4 @@ -217,6 +217,7 @@ def get_spark_shark_version(opts): 1.1.0: 1.1.0, 1.1.1: 1.1.1, 1.2.0: 1.2.0, +1.2.1: 1.2.1, } version = opts.spark_version.replace(v, ) if version not in spark_shark_map: - To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org
spark git commit: [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap
Repository: spark Updated Branches: refs/heads/branch-1.2 cf65620f5 - ef6fe84dc [SPARK-5355] use j.u.c.ConcurrentHashMap instead of TrieMap j.u.c.ConcurrentHashMap is more battle tested. cc rxin JoshRosen pwendell Author: Davies Liu dav...@databricks.com Closes #4208 from davies/safe-conf and squashes the following commits: c2182dc [Davies Liu] address comments, fix tests 3a1d821 [Davies Liu] fix test da14ced [Davies Liu] Merge branch 'master' of github.com:apache/spark into safe-conf ae4d305 [Davies Liu] change to j.u.c.ConcurrentMap f8fa1cf [Davies Liu] change to TrieMap a1d769a [Davies Liu] make SparkConf thread-safe (cherry picked from commit 142093179a4c40bdd90744191034de7b94a963ff) Signed-off-by: Josh Rosen joshro...@databricks.com Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ef6fe84d Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ef6fe84d Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ef6fe84d Branch: refs/heads/branch-1.2 Commit: ef6fe84dcbfff2d937088ea179889752317896e5 Parents: cf65620 Author: Davies Liu dav...@databricks.com Authored: Mon Jan 26 12:51:32 2015 -0800 Committer: Josh Rosen joshro...@databricks.com Committed: Mon Jan 26 13:22:17 2015 -0800 -- .../main/scala/org/apache/spark/SparkConf.scala | 38 ++-- .../deploy/worker/WorkerArgumentsTest.scala | 4 +-- .../apache/spark/storage/LocalDirsSuite.scala | 2 +- 3 files changed, 23 insertions(+), 21 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/ef6fe84d/core/src/main/scala/org/apache/spark/SparkConf.scala -- diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index dd80013..3337974 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -17,9 +17,11 @@ package org.apache.spark +import java.util.concurrent.ConcurrentHashMap + import scala.collection.JavaConverters._ -import scala.collection.concurrent.TrieMap -import scala.collection.mutable.{HashMap, LinkedHashSet} +import scala.collection.mutable.LinkedHashSet + import org.apache.spark.serializer.KryoSerializer /** @@ -47,12 +49,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Create a SparkConf that loads defaults from system properties and the classpath */ def this() = this(true) - private[spark] val settings = new TrieMap[String, String]() + private val settings = new ConcurrentHashMap[String, String]() if (loadDefaults) { // Load any spark.* system properties for ((k, v) - System.getProperties.asScala if k.startsWith(spark.)) { - settings(k) = v + set(k, v) } } @@ -64,7 +66,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { if (value == null) { throw new NullPointerException(null value) } -settings(key) = value +settings.put(key, value) this } @@ -130,15 +132,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Set multiple parameters together */ def setAll(settings: Traversable[(String, String)]) = { -this.settings ++= settings +this.settings.putAll(settings.toMap.asJava) this } /** Set a parameter if it isn't already configured */ def setIfMissing(key: String, value: String): SparkConf = { -if (!settings.contains(key)) { - settings(key) = value -} +settings.putIfAbsent(key, value) this } @@ -164,21 +164,23 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { /** Get a parameter; throws a NoSuchElementException if it's not set */ def get(key: String): String = { -settings.getOrElse(key, throw new NoSuchElementException(key)) +getOption(key).getOrElse(throw new NoSuchElementException(key)) } /** Get a parameter, falling back to a default if not set */ def get(key: String, defaultValue: String): String = { -settings.getOrElse(key, defaultValue) +getOption(key).getOrElse(defaultValue) } /** Get a parameter as an Option */ def getOption(key: String): Option[String] = { -settings.get(key) +Option(settings.get(key)) } /** Get all parameters as a list of pairs */ - def getAll: Array[(String, String)] = settings.toArray + def getAll: Array[(String, String)] = { +settings.entrySet().asScala.map(x = (x.getKey, x.getValue)).toArray + } /** Get a parameter as an integer, falling back to a default if not set */ def getInt(key: String, defaultValue: Int): Int = { @@ -225,11 +227,11 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging { def getAppId: