spark git commit: [SPARK-22176][SQL] Fix overflow issue in Dataset.show

2017-10-02 Thread lixiao
Repository: spark
Updated Branches:
  refs/heads/master 4329eb2e7 -> fa225da74


[SPARK-22176][SQL] Fix overflow issue in Dataset.show

## What changes were proposed in this pull request?
This pr fixed an overflow issue below in `Dataset.show`:
```
scala> Seq((1, 2), (3, 4)).toDF("a", "b").show(Int.MaxValue)
org.apache.spark.sql.AnalysisException: The limit expression must be equal to 
or greater than 0, but got -2147483648;;
GlobalLimit -2147483648
+- LocalLimit -2147483648
   +- Project [_1#27218 AS a#27221, _2#27219 AS b#27222]
  +- LocalRelation [_1#27218, _2#27219]

  at 
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.failAnalysis(CheckAnalysis.scala:41)
  at 
org.apache.spark.sql.catalyst.analysis.Analyzer.failAnalysis(Analyzer.scala:89)
  at 
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.org$apache$spark$sql$catalyst$analysis$CheckAnalysis$$checkLimitClause(CheckAnalysis.scala:70)
  at 
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:234)
  at 
org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:80)
  at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:127)
```

## How was this patch tested?
Added tests in `DataFrameSuite`.

Author: Takeshi Yamamuro 

Closes #19401 from maropu/MaxValueInShowString.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fa225da7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fa225da7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fa225da7

Branch: refs/heads/master
Commit: fa225da7463e384529da14706e44f4a09772e5c1
Parents: 4329eb2
Author: Takeshi Yamamuro 
Authored: Mon Oct 2 15:25:33 2017 -0700
Committer: gatorsmile 
Committed: Mon Oct 2 15:25:33 2017 -0700

--
 .../src/main/scala/org/apache/spark/sql/Dataset.scala   |  2 +-
 .../scala/org/apache/spark/sql/DataFrameSuite.scala | 12 
 2 files changed, 13 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/fa225da7/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
--
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
index f2a76a5..b70dfc0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -237,7 +237,7 @@ class Dataset[T] private[sql](
*/
   private[sql] def showString(
   _numRows: Int, truncate: Int = 20, vertical: Boolean = false): String = {
-val numRows = _numRows.max(0)
+val numRows = _numRows.max(0).min(Int.MaxValue - 1)
 val takeResult = toDF().take(numRows + 1)
 val hasMoreData = takeResult.length > numRows
 val data = takeResult.take(numRows)

http://git-wip-us.apache.org/repos/asf/spark/blob/fa225da7/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
--
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 672deea..dd8f54b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -1045,6 +1045,18 @@ class DataFrameSuite extends QueryTest with 
SharedSQLContext {
 assert(testData.select($"*").showString(0) === expectedAnswer)
   }
 
+  test("showString(Int.MaxValue)") {
+val df = Seq((1, 2), (3, 4)).toDF("a", "b")
+val expectedAnswer = """+---+---+
+   ||  a|  b|
+   |+---+---+
+   ||  1|  2|
+   ||  3|  4|
+   |+---+---+
+   |""".stripMargin
+assert(df.showString(Int.MaxValue) === expectedAnswer)
+  }
+
   test("showString(0), vertical = true") {
 val expectedAnswer = "(0 rows)\n"
 assert(testData.select($"*").showString(0, vertical = true) === 
expectedAnswer)


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-16944][Mesos] Improve data locality when launching new executors when dynamic allocation is enabled

2017-10-02 Thread haoyuan
Repository: spark
Updated Branches:
  refs/heads/master e5431f2cf -> 4329eb2e7


[SPARK-16944][Mesos] Improve data locality when launching new executors when 
dynamic allocation is enabled

## What changes were proposed in this pull request?

Improve the Spark-Mesos coarse-grained scheduler to consider the preferred 
locations when dynamic allocation is enabled.

## How was this patch tested?

Added a unittest, and performed manual testing on AWS.

Author: Gene Pang 

Closes #18098 from gpang/mesos_data_locality.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4329eb2e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4329eb2e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4329eb2e

Branch: refs/heads/master
Commit: 4329eb2e73181819bb712f57ca9c7feac0d640ea
Parents: e5431f2
Author: Gene Pang 
Authored: Mon Oct 2 15:09:11 2017 -0700
Committer: haoyuan 
Committed: Mon Oct 2 15:09:11 2017 -0700

--
 .../apache/spark/internal/config/package.scala  |  4 ++
 .../apache/spark/scheduler/TaskSetManager.scala |  6 +-
 .../MesosCoarseGrainedSchedulerBackend.scala| 52 ++--
 ...esosCoarseGrainedSchedulerBackendSuite.scala | 62 
 .../spark/scheduler/cluster/mesos/Utils.scala   |  6 ++
 5 files changed, 123 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/4329eb2e/core/src/main/scala/org/apache/spark/internal/config/package.scala
--
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala 
b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 44a2815..d85b6a0 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -72,6 +72,10 @@ package object config {
   private[spark] val DYN_ALLOCATION_MAX_EXECUTORS =
 
ConfigBuilder("spark.dynamicAllocation.maxExecutors").intConf.createWithDefault(Int.MaxValue)
 
+  private[spark] val LOCALITY_WAIT = ConfigBuilder("spark.locality.wait")
+.timeConf(TimeUnit.MILLISECONDS)
+.createWithDefaultString("3s")
+
   private[spark] val SHUFFLE_SERVICE_ENABLED =
 
ConfigBuilder("spark.shuffle.service.enabled").booleanConf.createWithDefault(false)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/4329eb2e/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
--
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala 
b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
index bb86741..3bdede6 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala
@@ -27,7 +27,7 @@ import scala.util.control.NonFatal
 
 import org.apache.spark._
 import org.apache.spark.TaskState.TaskState
-import org.apache.spark.internal.Logging
+import org.apache.spark.internal.{config, Logging}
 import org.apache.spark.scheduler.SchedulingMode._
 import org.apache.spark.util.{AccumulatorV2, Clock, SystemClock, Utils}
 import org.apache.spark.util.collection.MedianHeap
@@ -980,7 +980,7 @@ private[spark] class TaskSetManager(
   }
 
   private def getLocalityWait(level: TaskLocality.TaskLocality): Long = {
-val defaultWait = conf.get("spark.locality.wait", "3s")
+val defaultWait = conf.get(config.LOCALITY_WAIT)
 val localityWaitKey = level match {
   case TaskLocality.PROCESS_LOCAL => "spark.locality.wait.process"
   case TaskLocality.NODE_LOCAL => "spark.locality.wait.node"
@@ -989,7 +989,7 @@ private[spark] class TaskSetManager(
 }
 
 if (localityWaitKey != null) {
-  conf.getTimeAsMs(localityWaitKey, defaultWait)
+  conf.getTimeAsMs(localityWaitKey, defaultWait.toString)
 } else {
   0L
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/4329eb2e/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
--
diff --git 
a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
 
b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
index 2669987..80c0a04 100644
--- 
a/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
+++ 
b/resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosCoarseGrainedSchedulerBackend.scala
@@ 

spark git commit: [SPARK-22158][SQL] convertMetastore should not ignore table property

2017-10-02 Thread lixiao
Repository: spark
Updated Branches:
  refs/heads/master 8fab7995d -> e5431f2cf


[SPARK-22158][SQL] convertMetastore should not ignore table property

## What changes were proposed in this pull request?

>From the beginning, convertMetastoreOrc ignores table properties and use an 
>empty map instead. This PR fixes that. For the diff, please see 
>[this](https://github.com/apache/spark/pull/19382/files?w=1). 
>convertMetastoreParquet also ignore.

```scala
val options = Map[String, String]()
```

- [SPARK-14070: 
HiveMetastoreCatalog.scala](https://github.com/apache/spark/pull/11891/files#diff-ee66e11b56c21364760a5ed2b783f863R650)
- [Master branch: 
HiveStrategies.scala](https://github.com/apache/spark/blob/master/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala#L197
)

## How was this patch tested?

Pass the Jenkins with an updated test suite.

Author: Dongjoon Hyun 

Closes #19382 from dongjoon-hyun/SPARK-22158.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e5431f2c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e5431f2c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e5431f2c

Branch: refs/heads/master
Commit: e5431f2cfddc8e96194827a2123b92716c7a1467
Parents: 8fab799
Author: Dongjoon Hyun 
Authored: Mon Oct 2 15:00:26 2017 -0700
Committer: gatorsmile 
Committed: Mon Oct 2 15:00:26 2017 -0700

--
 .../apache/spark/sql/hive/HiveStrategies.scala  |  4 +-
 .../spark/sql/hive/execution/HiveDDLSuite.scala | 54 +---
 2 files changed, 50 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e5431f2c/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
--
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
index 805b317..3592b8f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
@@ -189,12 +189,12 @@ case class RelationConversions(
   private def convert(relation: HiveTableRelation): LogicalRelation = {
 val serde = 
relation.tableMeta.storage.serde.getOrElse("").toLowerCase(Locale.ROOT)
 if (serde.contains("parquet")) {
-  val options = Map(ParquetOptions.MERGE_SCHEMA ->
+  val options = relation.tableMeta.storage.properties + 
(ParquetOptions.MERGE_SCHEMA ->
 
conf.getConf(HiveUtils.CONVERT_METASTORE_PARQUET_WITH_SCHEMA_MERGING).toString)
   sessionCatalog.metastoreCatalog
 .convertToLogicalRelation(relation, options, 
classOf[ParquetFileFormat], "parquet")
 } else {
-  val options = Map[String, String]()
+  val options = relation.tableMeta.storage.properties
   sessionCatalog.metastoreCatalog
 .convertToLogicalRelation(relation, options, classOf[OrcFileFormat], 
"orc")
 }

http://git-wip-us.apache.org/repos/asf/spark/blob/e5431f2c/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
--
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 668da5f..02e26bb 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -23,6 +23,8 @@ import java.net.URI
 import scala.language.existentials
 
 import org.apache.hadoop.fs.Path
+import org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER
+import org.apache.parquet.hadoop.ParquetFileReader
 import org.scalatest.BeforeAndAfterEach
 
 import org.apache.spark.SparkException
@@ -32,6 +34,7 @@ import 
org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, TableAl
 import org.apache.spark.sql.catalyst.catalog._
 import org.apache.spark.sql.execution.command.{DDLSuite, DDLUtils}
 import org.apache.spark.sql.hive.HiveExternalCatalog
+import org.apache.spark.sql.hive.HiveUtils.{CONVERT_METASTORE_ORC, 
CONVERT_METASTORE_PARQUET}
 import org.apache.spark.sql.hive.orc.OrcFileOperator
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
@@ -1455,12 +1458,8 @@ class HiveDDLSuite
 sql("INSERT INTO t SELECT 1")
 checkAnswer(spark.table("t"), Row(1))
 // Check if this is compressed as ZLIB.
-val maybeOrcFile = path.listFiles().find(!_.getName.endsWith(".crc"))
-assert(maybeOrcFile.isDefined)
-val 

[2/2] spark git commit: Preparing development version 2.1.3-SNAPSHOT

2017-10-02 Thread holden
Preparing development version 2.1.3-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/49e8ccc4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/49e8ccc4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/49e8ccc4

Branch: refs/heads/branch-2.1
Commit: 49e8ccc436a41b6ca3aa3122acf9b891e0acd048
Parents: 2abaea9
Author: Holden Karau 
Authored: Mon Oct 2 11:57:24 2017 -0700
Committer: Holden Karau 
Committed: Mon Oct 2 11:57:24 2017 -0700

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/java8-tests/pom.xml  | 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mesos/pom.xml | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 yarn/pom.xml  | 2 +-
 39 files changed, 40 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/49e8ccc4/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 899d410..6c380b6 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.1.2
+Version: 2.1.3
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/49e8ccc4/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 133f8e6..e9f915a 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.2
+2.1.3-SNAPSHOT
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/49e8ccc4/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index d2631e4..7e203e7 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.2
+2.1.3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/49e8ccc4/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index c12d480..92dd275 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.2
+2.1.3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/49e8ccc4/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index d22db36..abca418 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.2
+2.1.3-SNAPSHOT
 

[spark] Git Push Summary

2017-10-02 Thread holden
Repository: spark
Updated Tags:  refs/tags/v2.1.2-rc4 [created] 2abaea9e4

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[1/2] spark git commit: Preparing Spark release v2.1.2-rc4

2017-10-02 Thread holden
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 ff4179bba -> 49e8ccc43


Preparing Spark release v2.1.2-rc4


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2abaea9e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2abaea9e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2abaea9e

Branch: refs/heads/branch-2.1
Commit: 2abaea9e40fce81cd4626498e0f5c28a70917499
Parents: ff4179b
Author: Holden Karau 
Authored: Mon Oct 2 11:57:15 2017 -0700
Committer: Holden Karau 
Committed: Mon Oct 2 11:57:15 2017 -0700

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/java8-tests/pom.xml  | 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mesos/pom.xml | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 yarn/pom.xml  | 2 +-
 39 files changed, 40 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/2abaea9e/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 6c380b6..899d410 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.1.3
+Version: 2.1.2
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/2abaea9e/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index e9f915a..133f8e6 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.2
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2abaea9e/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 7e203e7..d2631e4 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2abaea9e/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index 92dd275..c12d480 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/2abaea9e/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index abca418..d22db36 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 

spark git commit: [SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

2017-10-02 Thread holden
Repository: spark
Updated Branches:
  refs/heads/branch-2.2 7bf25e086 -> b9adddb6a


[SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

## What changes were proposed in this pull request?

When zinc is running the pwd might be in the root of the project. A quick 
solution to this is to not go a level up incase we are in the root rather than 
root/core/. If we are in the root everything works fine, if we are in core add 
a script which goes and runs the level up

## How was this patch tested?

set -x in the SparkR install scripts.

Author: Holden Karau 

Closes #19402 from holdenk/SPARK-22167-sparkr-packaging-issue-allow-zinc.

(cherry picked from commit 8fab7995d36c7bc4524393b20a4e524dbf6bbf62)
Signed-off-by: Holden Karau 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b9adddb6
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b9adddb6
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b9adddb6

Branch: refs/heads/branch-2.2
Commit: b9adddb6a3d4f35c7ad67f2e4ec903ede05893b6
Parents: 7bf25e0
Author: Holden Karau 
Authored: Mon Oct 2 11:46:51 2017 -0700
Committer: Holden Karau 
Committed: Mon Oct 2 11:47:11 2017 -0700

--
 R/install-dev.sh | 1 +
 core/pom.xml | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/b9adddb6/R/install-dev.sh
--
diff --git a/R/install-dev.sh b/R/install-dev.sh
index d613552..9fbc999 100755
--- a/R/install-dev.sh
+++ b/R/install-dev.sh
@@ -28,6 +28,7 @@
 
 set -o pipefail
 set -e
+set -x
 
 FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
 LIB_DIR="$FWDIR/lib"

http://git-wip-us.apache.org/repos/asf/spark/blob/b9adddb6/core/pom.xml
--
diff --git a/core/pom.xml b/core/pom.xml
index 254a9b9..977396c 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -464,7 +464,7 @@
   
 
 
-  
..${file.separator}R${file.separator}install-dev${script.extension}
+  
${project.basedir}${file.separator}..${file.separator}R${file.separator}install-dev${script.extension}
 
   
 


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

2017-10-02 Thread holden
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 81e4008c2 -> ff4179bba


[SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

## What changes were proposed in this pull request?

When zinc is running the pwd might be in the root of the project. A quick 
solution to this is to not go a level up incase we are in the root rather than 
root/core/. If we are in the root everything works fine, if we are in core add 
a script which goes and runs the level up

## How was this patch tested?

set -x in the SparkR install scripts.

Author: Holden Karau 

Closes #19402 from holdenk/SPARK-22167-sparkr-packaging-issue-allow-zinc.

(cherry picked from commit 8fab7995d36c7bc4524393b20a4e524dbf6bbf62)
Signed-off-by: Holden Karau 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ff4179bb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ff4179bb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ff4179bb

Branch: refs/heads/branch-2.1
Commit: ff4179bba7651f8645840c81db80335388986b29
Parents: 81e4008
Author: Holden Karau 
Authored: Mon Oct 2 11:46:51 2017 -0700
Committer: Holden Karau 
Committed: Mon Oct 2 11:47:36 2017 -0700

--
 R/install-dev.sh | 1 +
 core/pom.xml | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/ff4179bb/R/install-dev.sh
--
diff --git a/R/install-dev.sh b/R/install-dev.sh
index 0f88120..43bb1bc 100755
--- a/R/install-dev.sh
+++ b/R/install-dev.sh
@@ -28,6 +28,7 @@
 
 set -o pipefail
 set -e
+set -x
 
 FWDIR="$(cd `dirname $0`; pwd)"
 LIB_DIR="$FWDIR/lib"

http://git-wip-us.apache.org/repos/asf/spark/blob/ff4179bb/core/pom.xml
--
diff --git a/core/pom.xml b/core/pom.xml
index 8fe1288..fbb0eda 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -460,7 +460,7 @@
   
 
 
-  
..${file.separator}R${file.separator}install-dev${script.extension}
+  
${project.basedir}${file.separator}..${file.separator}R${file.separator}install-dev${script.extension}
 
   
 


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

2017-10-02 Thread holden
Repository: spark
Updated Branches:
  refs/heads/master 405c0e99e -> 8fab7995d


[SPARK-22167][R][BUILD] sparkr packaging issue allow zinc

## What changes were proposed in this pull request?

When zinc is running the pwd might be in the root of the project. A quick 
solution to this is to not go a level up incase we are in the root rather than 
root/core/. If we are in the root everything works fine, if we are in core add 
a script which goes and runs the level up

## How was this patch tested?

set -x in the SparkR install scripts.

Author: Holden Karau 

Closes #19402 from holdenk/SPARK-22167-sparkr-packaging-issue-allow-zinc.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8fab7995
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8fab7995
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8fab7995

Branch: refs/heads/master
Commit: 8fab7995d36c7bc4524393b20a4e524dbf6bbf62
Parents: 405c0e9
Author: Holden Karau 
Authored: Mon Oct 2 11:46:51 2017 -0700
Committer: Holden Karau 
Committed: Mon Oct 2 11:46:51 2017 -0700

--
 R/install-dev.sh | 1 +
 core/pom.xml | 2 +-
 2 files changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/8fab7995/R/install-dev.sh
--
diff --git a/R/install-dev.sh b/R/install-dev.sh
index d613552..9fbc999 100755
--- a/R/install-dev.sh
+++ b/R/install-dev.sh
@@ -28,6 +28,7 @@
 
 set -o pipefail
 set -e
+set -x
 
 FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)"
 LIB_DIR="$FWDIR/lib"

http://git-wip-us.apache.org/repos/asf/spark/blob/8fab7995/core/pom.xml
--
diff --git a/core/pom.xml b/core/pom.xml
index 0966914..54f7a34 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -499,7 +499,7 @@
   
 
 
-  
..${file.separator}R${file.separator}install-dev${script.extension}
+  
${project.basedir}${file.separator}..${file.separator}R${file.separator}install-dev${script.extension}
 
   
 


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-18971][CORE] Upgrade Netty to 4.0.43.Final

2017-10-02 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 78661f95e -> 81e4008c2


[SPARK-18971][CORE] Upgrade Netty to 4.0.43.Final

## What changes were proposed in this pull request?

Upgrade Netty to `4.0.43.Final` to add the fix for 
https://github.com/netty/netty/issues/6153

## How was this patch tested?

Jenkins

Author: Shixiong Zhu 

Closes #16568 from zsxwing/SPARK-18971.

(cherry picked from commit a8567e34dc77a32ddeb280f8f9f603f301722059)
Signed-off-by: Sean Owen 

# Conflicts:
#   dev/deps/spark-deps-hadoop-2.2
#   dev/deps/spark-deps-hadoop-2.3
#   dev/deps/spark-deps-hadoop-2.4
#   dev/deps/spark-deps-hadoop-2.6
#   dev/deps/spark-deps-hadoop-2.7


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/81e4008c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/81e4008c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/81e4008c

Branch: refs/heads/branch-2.1
Commit: 81e4008c2a759060cb177349317d8a0b23c4732d
Parents: 78661f9
Author: Shixiong Zhu 
Authored: Sun Jan 15 11:15:35 2017 +
Committer: Sean Owen 
Committed: Mon Oct 2 19:33:21 2017 +0100

--
 dev/deps/spark-deps-hadoop-2.2 | 2 +-
 dev/deps/spark-deps-hadoop-2.3 | 2 +-
 dev/deps/spark-deps-hadoop-2.4 | 2 +-
 dev/deps/spark-deps-hadoop-2.6 | 2 +-
 dev/deps/spark-deps-hadoop-2.7 | 2 +-
 pom.xml| 2 +-
 6 files changed, 6 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/dev/deps/spark-deps-hadoop-2.2
--
diff --git a/dev/deps/spark-deps-hadoop-2.2 b/dev/deps/spark-deps-hadoop-2.2
index da17020..635b3a6 100644
--- a/dev/deps/spark-deps-hadoop-2.2
+++ b/dev/deps/spark-deps-hadoop-2.2
@@ -123,7 +123,7 @@ metrics-json-3.1.2.jar
 metrics-jvm-3.1.2.jar
 minlog-1.3.0.jar
 netty-3.8.0.Final.jar
-netty-all-4.0.42.Final.jar
+netty-all-4.0.43.Final.jar
 objenesis-2.1.jar
 opencsv-2.3.jar
 oro-2.0.8.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/dev/deps/spark-deps-hadoop-2.3
--
diff --git a/dev/deps/spark-deps-hadoop-2.3 b/dev/deps/spark-deps-hadoop-2.3
index 92746f0..472fe68 100644
--- a/dev/deps/spark-deps-hadoop-2.3
+++ b/dev/deps/spark-deps-hadoop-2.3
@@ -130,7 +130,7 @@ metrics-jvm-3.1.2.jar
 minlog-1.3.0.jar
 mx4j-3.0.2.jar
 netty-3.8.0.Final.jar
-netty-all-4.0.42.Final.jar
+netty-all-4.0.43.Final.jar
 objenesis-2.1.jar
 opencsv-2.3.jar
 oro-2.0.8.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/dev/deps/spark-deps-hadoop-2.4
--
diff --git a/dev/deps/spark-deps-hadoop-2.4 b/dev/deps/spark-deps-hadoop-2.4
index 49d99ae..5f9fa97 100644
--- a/dev/deps/spark-deps-hadoop-2.4
+++ b/dev/deps/spark-deps-hadoop-2.4
@@ -130,7 +130,7 @@ metrics-jvm-3.1.2.jar
 minlog-1.3.0.jar
 mx4j-3.0.2.jar
 netty-3.8.0.Final.jar
-netty-all-4.0.42.Final.jar
+netty-all-4.0.43.Final.jar
 objenesis-2.1.jar
 opencsv-2.3.jar
 oro-2.0.8.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/dev/deps/spark-deps-hadoop-2.6
--
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 652fcb2..80f8b5b 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -138,7 +138,7 @@ metrics-jvm-3.1.2.jar
 minlog-1.3.0.jar
 mx4j-3.0.2.jar
 netty-3.8.0.Final.jar
-netty-all-4.0.42.Final.jar
+netty-all-4.0.43.Final.jar
 objenesis-2.1.jar
 opencsv-2.3.jar
 oro-2.0.8.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/dev/deps/spark-deps-hadoop-2.7
--
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index 16b5c82..8d150ff 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -139,7 +139,7 @@ metrics-jvm-3.1.2.jar
 minlog-1.3.0.jar
 mx4j-3.0.2.jar
 netty-3.8.0.Final.jar
-netty-all-4.0.42.Final.jar
+netty-all-4.0.43.Final.jar
 objenesis-2.1.jar
 opencsv-2.3.jar
 oro-2.0.8.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/81e4008c/pom.xml
--
diff --git a/pom.xml b/pom.xml
index c6d404a..85f3145 100644
--- a/pom.xml
+++ b/pom.xml
@@ -558,7 +558,7 @@
   
 io.netty
 netty-all
-4.0.42.Final
+4.0.43.Final
   
   
 io.netty


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: 

spark git commit: [SPARK-22173][WEB-UI] Table CSS style needs to be adjusted in History Page and in Executors Page.

2017-10-02 Thread srowen
Repository: spark
Updated Branches:
  refs/heads/master 3ca367083 -> 405c0e99e


[SPARK-22173][WEB-UI] Table CSS style needs to be adjusted in History Page and 
in Executors Page.

## What changes were proposed in this pull request?

There is a problem with table CSS style.

1. At present, table CSS style is too crowded, and the table width cannot adapt 
itself.

2. Table CSS style is different from job page, stage page, task page, master 
page, worker page, etc. The Spark web UI needs to be consistent.

fix before:
![01](https://user-images.githubusercontent.com/26266482/31041261-c6766c3a-a5c4-11e7-97a7-96bd51ef12bd.png)

![02](https://user-images.githubusercontent.com/26266482/31041266-d75b6a32-a5c4-11e7-8071-e3a39b80.png)

--

fix after:
![1](https://user-images.githubusercontent.com/26266482/31041162-808a5a3e-a5c3-11e7-8d92-d763b500ce53.png)

![2](https://user-images.githubusercontent.com/26266482/31041166-86e583e0-a5c3-11e7-949c-11c370db9e27.png)

## How was this patch tested?

manual tests

Please review http://spark.apache.org/contributing.html before opening a pull 
request.

Author: guoxiaolong 

Closes #19397 from guoxiaolongzte/SPARK-22173.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/405c0e99
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/405c0e99
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/405c0e99

Branch: refs/heads/master
Commit: 405c0e99e7697bfa88aa4abc9a55ce5e043e48b1
Parents: 3ca3670
Author: guoxiaolong 
Authored: Mon Oct 2 08:07:56 2017 +0100
Committer: Sean Owen 
Committed: Mon Oct 2 08:07:56 2017 +0100

--
 .../main/scala/org/apache/spark/deploy/history/HistoryPage.scala | 4 ++--
 core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/405c0e99/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
--
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
index af14717..6399dcc 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryPage.scala
@@ -37,7 +37,7 @@ private[history] class HistoryPage(parent: HistoryServer) 
extends WebUIPage("")
 val content =
   
   
-  
+  
 
   {providerConfig.map { case (k, v) => {k}: 
{v} }}
 
@@ -58,7 +58,7 @@ private[history] class HistoryPage(parent: HistoryServer) 
extends WebUIPage("")
 {
 if (allAppsSize > 0) {
++
- ++
+ ++
  ++
  ++
 setAppLimit({parent.maxApplications})

http://git-wip-us.apache.org/repos/asf/spark/blob/405c0e99/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala
--
diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala 
b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala
index d63381c..7b2767f 100644
--- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala
@@ -82,7 +82,7 @@ private[ui] class ExecutorsPage(
   
 
++
-   ++
+   ++
++
++
   setThreadDumpEnabled({threadDumpEnabled})


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org