svn commit: r27550 - in /dev/spark/2.3.2-SNAPSHOT-2018_06_18_22_01-50cdb41-docs: ./ _site/ _site/api/ _site/api/R/ _site/api/java/ _site/api/java/lib/ _site/api/java/org/ _site/api/java/org/apache/ _s

2018-06-18 Thread pwendell
Author: pwendell
Date: Tue Jun 19 05:16:22 2018
New Revision: 27550

Log:
Apache Spark 2.3.2-SNAPSHOT-2018_06_18_22_01-50cdb41 docs


[This commit notification would consist of 1443 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-24542][SQL] UDF series UDFXPathXXXX allow users to pass carefully crafted XML to access arbitrary files

2018-06-18 Thread wenchen
Repository: spark
Updated Branches:
  refs/heads/branch-2.3 b8dbfcc57 -> 50cdb4138


[SPARK-24542][SQL] UDF series UDFXPath allow users to pass carefully 
crafted XML to access arbitrary files

## What changes were proposed in this pull request?

UDF series UDFXPath allow users to pass carefully crafted XML to access 
arbitrary files. Spark does not have built-in access control. When users use 
the external access control library, users might bypass them and access the 
file contents.

This PR basically patches the Hive fix to Apache Spark. 
https://issues.apache.org/jira/browse/HIVE-18879

## How was this patch tested?

A unit test case

Author: Xiao Li 

Closes #21549 from gatorsmile/xpathSecurity.

(cherry picked from commit 9a75c18290fff7d116cf88a44f9120bf67d8bd27)
Signed-off-by: Wenchen Fan 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/50cdb413
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/50cdb413
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/50cdb413

Branch: refs/heads/branch-2.3
Commit: 50cdb4138e5cb0e0d15f739db8066f3ea86ef037
Parents: b8dbfcc
Author: Xiao Li 
Authored: Mon Jun 18 20:17:04 2018 -0700
Committer: Wenchen Fan 
Committed: Mon Jun 18 20:17:32 2018 -0700

--
 .../catalyst/expressions/xml/UDFXPathUtil.java  | 28 +++-
 .../expressions/xml/UDFXPathUtilSuite.scala | 21 +++
 .../expressions/xml/XPathExpressionSuite.scala  |  5 ++--
 3 files changed, 51 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/50cdb413/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
--
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
index d224332..023ec13 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
@@ -21,6 +21,9 @@ import java.io.IOException;
 import java.io.Reader;
 
 import javax.xml.namespace.QName;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpression;
@@ -37,9 +40,15 @@ import org.xml.sax.InputSource;
  * This is based on Hive's UDFXPathUtil implementation.
  */
 public class UDFXPathUtil {
+  public static final String SAX_FEATURE_PREFIX = 
"http://xml.org/sax/features/;;
+  public static final String EXTERNAL_GENERAL_ENTITIES_FEATURE = 
"external-general-entities";
+  public static final String EXTERNAL_PARAMETER_ENTITIES_FEATURE = 
"external-parameter-entities";
+  private DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+  private DocumentBuilder builder = null;
   private XPath xpath = XPathFactory.newInstance().newXPath();
   private ReusableStringReader reader = new ReusableStringReader();
   private InputSource inputSource = new InputSource(reader);
+
   private XPathExpression expression = null;
   private String oldPath = null;
 
@@ -65,14 +74,31 @@ public class UDFXPathUtil {
   return null;
 }
 
+if (builder == null){
+  try {
+initializeDocumentBuilderFactory();
+builder = dbf.newDocumentBuilder();
+  } catch (ParserConfigurationException e) {
+throw new RuntimeException(
+  "Error instantiating DocumentBuilder, cannot build xml parser", e);
+  }
+}
+
 reader.set(xml);
 try {
-  return expression.evaluate(inputSource, qname);
+  return expression.evaluate(builder.parse(inputSource), qname);
 } catch (XPathExpressionException e) {
   throw new RuntimeException("Invalid XML document: " + e.getMessage() + 
"\n" + xml, e);
+} catch (Exception e) {
+  throw new RuntimeException("Error loading expression '" + oldPath + "'", 
e);
 }
   }
 
+  private void initializeDocumentBuilderFactory() throws 
ParserConfigurationException {
+dbf.setFeature(SAX_FEATURE_PREFIX + EXTERNAL_GENERAL_ENTITIES_FEATURE, 
false);
+dbf.setFeature(SAX_FEATURE_PREFIX + EXTERNAL_PARAMETER_ENTITIES_FEATURE, 
false);
+  }
+
   public Boolean evalBoolean(String xml, String path) throws 
XPathExpressionException {
 return (Boolean) eval(xml, path, XPathConstants.BOOLEAN);
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/50cdb413/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtilSuite.scala

spark git commit: [SPARK-24542][SQL] UDF series UDFXPathXXXX allow users to pass carefully crafted XML to access arbitrary files

2018-06-18 Thread wenchen
Repository: spark
Updated Branches:
  refs/heads/master 1737d45e0 -> 9a75c1829


[SPARK-24542][SQL] UDF series UDFXPath allow users to pass carefully 
crafted XML to access arbitrary files

## What changes were proposed in this pull request?

UDF series UDFXPath allow users to pass carefully crafted XML to access 
arbitrary files. Spark does not have built-in access control. When users use 
the external access control library, users might bypass them and access the 
file contents.

This PR basically patches the Hive fix to Apache Spark. 
https://issues.apache.org/jira/browse/HIVE-18879

## How was this patch tested?

A unit test case

Author: Xiao Li 

Closes #21549 from gatorsmile/xpathSecurity.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9a75c182
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9a75c182
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9a75c182

Branch: refs/heads/master
Commit: 9a75c18290fff7d116cf88a44f9120bf67d8bd27
Parents: 1737d45
Author: Xiao Li 
Authored: Mon Jun 18 20:17:04 2018 -0700
Committer: Wenchen Fan 
Committed: Mon Jun 18 20:17:04 2018 -0700

--
 .../catalyst/expressions/xml/UDFXPathUtil.java  | 28 +++-
 .../expressions/xml/UDFXPathUtilSuite.scala | 21 +++
 .../expressions/xml/XPathExpressionSuite.scala  |  5 ++--
 3 files changed, 51 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/9a75c182/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
--
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
index d224332..023ec13 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtil.java
@@ -21,6 +21,9 @@ import java.io.IOException;
 import java.io.Reader;
 
 import javax.xml.namespace.QName;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPath;
 import javax.xml.xpath.XPathConstants;
 import javax.xml.xpath.XPathExpression;
@@ -37,9 +40,15 @@ import org.xml.sax.InputSource;
  * This is based on Hive's UDFXPathUtil implementation.
  */
 public class UDFXPathUtil {
+  public static final String SAX_FEATURE_PREFIX = 
"http://xml.org/sax/features/;;
+  public static final String EXTERNAL_GENERAL_ENTITIES_FEATURE = 
"external-general-entities";
+  public static final String EXTERNAL_PARAMETER_ENTITIES_FEATURE = 
"external-parameter-entities";
+  private DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+  private DocumentBuilder builder = null;
   private XPath xpath = XPathFactory.newInstance().newXPath();
   private ReusableStringReader reader = new ReusableStringReader();
   private InputSource inputSource = new InputSource(reader);
+
   private XPathExpression expression = null;
   private String oldPath = null;
 
@@ -65,14 +74,31 @@ public class UDFXPathUtil {
   return null;
 }
 
+if (builder == null){
+  try {
+initializeDocumentBuilderFactory();
+builder = dbf.newDocumentBuilder();
+  } catch (ParserConfigurationException e) {
+throw new RuntimeException(
+  "Error instantiating DocumentBuilder, cannot build xml parser", e);
+  }
+}
+
 reader.set(xml);
 try {
-  return expression.evaluate(inputSource, qname);
+  return expression.evaluate(builder.parse(inputSource), qname);
 } catch (XPathExpressionException e) {
   throw new RuntimeException("Invalid XML document: " + e.getMessage() + 
"\n" + xml, e);
+} catch (Exception e) {
+  throw new RuntimeException("Error loading expression '" + oldPath + "'", 
e);
 }
   }
 
+  private void initializeDocumentBuilderFactory() throws 
ParserConfigurationException {
+dbf.setFeature(SAX_FEATURE_PREFIX + EXTERNAL_GENERAL_ENTITIES_FEATURE, 
false);
+dbf.setFeature(SAX_FEATURE_PREFIX + EXTERNAL_PARAMETER_ENTITIES_FEATURE, 
false);
+  }
+
   public Boolean evalBoolean(String xml, String path) throws 
XPathExpressionException {
 return (Boolean) eval(xml, path, XPathConstants.BOOLEAN);
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/9a75c182/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/xml/UDFXPathUtilSuite.scala
--
diff --git 

spark git commit: [SPARK-24478][SQL][FOLLOWUP] Move projection and filter push down to physical conversion

2018-06-18 Thread wenchen
Repository: spark
Updated Branches:
  refs/heads/master 8f225e055 -> 1737d45e0


[SPARK-24478][SQL][FOLLOWUP] Move projection and filter push down to physical 
conversion

## What changes were proposed in this pull request?

This is a followup of https://github.com/apache/spark/pull/21503, to completely 
move operator pushdown to the planner rule.

The code are mostly from https://github.com/apache/spark/pull/21319

## How was this patch tested?

existing tests

Author: Wenchen Fan 

Closes #21574 from cloud-fan/followup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1737d45e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1737d45e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1737d45e

Branch: refs/heads/master
Commit: 1737d45e08a5f1fb78515b14321721d7197b443a
Parents: 8f225e0
Author: Wenchen Fan 
Authored: Mon Jun 18 20:15:01 2018 -0700
Committer: Wenchen Fan 
Committed: Mon Jun 18 20:15:01 2018 -0700

--
 .../v2/reader/SupportsReportStatistics.java |   7 +-
 .../datasources/v2/DataSourceV2Relation.scala   | 109 
 .../datasources/v2/DataSourceV2Strategy.scala   | 124 ++-
 3 files changed, 123 insertions(+), 117 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/1737d45e/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsReportStatistics.java
--
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsReportStatistics.java
 
b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsReportStatistics.java
index a79080a..9263964 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsReportStatistics.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/sources/v2/reader/SupportsReportStatistics.java
@@ -23,10 +23,9 @@ import org.apache.spark.annotation.InterfaceStability;
  * A mix in interface for {@link DataSourceReader}. Data source readers can 
implement this
  * interface to report statistics to Spark.
  *
- * Statistics are reported to the optimizer before a projection or any filters 
are pushed to the
- * DataSourceReader. Implementations that return more accurate statistics 
based on projection and
- * filters will not improve query performance until the planner can push 
operators before getting
- * stats.
+ * Statistics are reported to the optimizer before any operator is pushed to 
the DataSourceReader.
+ * Implementations that return more accurate statistics based on pushed 
operators will not improve
+ * query performance until the planner can push operators before getting stats.
  */
 @InterfaceStability.Evolving
 public interface SupportsReportStatistics extends DataSourceReader {

http://git-wip-us.apache.org/repos/asf/spark/blob/1737d45e/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
--
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
index e08af21..7613eb2 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala
@@ -23,17 +23,24 @@ import org.apache.spark.sql.AnalysisException
 import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
 import org.apache.spark.sql.catalyst.expressions.{AttributeReference, 
Expression}
 import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, 
Statistics}
-import org.apache.spark.sql.execution.datasources.DataSourceStrategy
-import org.apache.spark.sql.sources.{DataSourceRegister, Filter}
+import org.apache.spark.sql.sources.DataSourceRegister
 import org.apache.spark.sql.sources.v2.{DataSourceOptions, DataSourceV2, 
ReadSupport, ReadSupportWithSchema}
-import org.apache.spark.sql.sources.v2.reader.{DataSourceReader, 
SupportsPushDownCatalystFilters, SupportsPushDownFilters, 
SupportsPushDownRequiredColumns, SupportsReportStatistics}
+import org.apache.spark.sql.sources.v2.reader.{DataSourceReader, 
SupportsReportStatistics}
 import org.apache.spark.sql.types.StructType
 
+/**
+ * A logical plan representing a data source v2 scan.
+ *
+ * @param source An instance of a [[DataSourceV2]] implementation.
+ * @param options The options for this scan. Used to create fresh 
[[DataSourceReader]].
+ * @param userSpecifiedSchema The user-specified schema for this scan. Used to 
create fresh
+ *[[DataSourceReader]].
+ */
 case class 

svn commit: r27544 - in /dev/spark/2.3.2-SNAPSHOT-2018_06_18_18_01-b8dbfcc-docs: ./ _site/ _site/api/ _site/api/R/ _site/api/java/ _site/api/java/lib/ _site/api/java/org/ _site/api/java/org/apache/ _s

2018-06-18 Thread pwendell
Author: pwendell
Date: Tue Jun 19 01:15:42 2018
New Revision: 27544

Log:
Apache Spark 2.3.2-SNAPSHOT-2018_06_18_18_01-b8dbfcc docs


[This commit notification would consist of 1443 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



svn commit: r27542 - in /dev/spark/v2.1.3-rc1-docs: ./ _site/ _site/api/ _site/api/R/ _site/api/java/ _site/api/java/lib/ _site/api/java/org/ _site/api/java/org/apache/ _site/api/java/org/apache/spark

2018-06-18 Thread vanzin
Author: vanzin
Date: Mon Jun 18 23:23:28 2018
New Revision: 27542

Log:
Apache Spark v2.1.3-rc1 docs


[This commit notification would consist of 1254 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



svn commit: r27541 - /dev/spark/v2.1.3-rc1-bin/

2018-06-18 Thread vanzin
Author: vanzin
Date: Mon Jun 18 22:44:03 2018
New Revision: 27541

Log:
Apache Spark v2.1.3-rc1

Added:
dev/spark/v2.1.3-rc1-bin/
dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz   (with props)
dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.asc
dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.sha512
dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz   (with props)
dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.asc
dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.sha512
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.6.tgz   (with props)
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.6.tgz.asc
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.6.tgz.sha512
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.7.tgz   (with props)
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.7.tgz.asc
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-hadoop2.7.tgz.sha512
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-without-hadoop.tgz   (with props)
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-without-hadoop.tgz.asc
dev/spark/v2.1.3-rc1-bin/spark-2.1.3-bin-without-hadoop.tgz.sha512
dev/spark/v2.1.3-rc1-bin/spark-2.1.3.tgz   (with props)
dev/spark/v2.1.3-rc1-bin/spark-2.1.3.tgz.asc
dev/spark/v2.1.3-rc1-bin/spark-2.1.3.tgz.sha512

Added: dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz
==
Binary file - no diff available.

Propchange: dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.asc
==
--- dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.asc (added)
+++ dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.asc Mon Jun 18 22:44:03 2018
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v1
+
+iQIcBAABAgAGBQJbKDHkAAoJEP2P/Uw6DVVkRLMP/2wE1QcPUv9p8zOCFA08M3nl
+jI38h9VVbjx4bwntqIrZK2ZCuMoKzSj8Ba3SmROwJr2MiY644VZroGAwdaVMhvEL
+/n50vn5AUH/354GNxFCSdR9Wipo0JFHTae5YBNb3Itsnz3CjGvPEQD89WqW2OJHj
+NVhLyxfBHnmX3BHjjgWHirAAIOEXSkz/LEJq+F6Hvo5OrNVs/outJ+MVBpJ4ZSGx
+Uyly0cS9iqBzw1saiHGzx73Vo6FWIVwmYDJaQO2HeCNE0Bv9uu1Eivx0MSgEIjEO
+gXCeOnRSgAApbvdRls41dY4el8uzVxVR8/2oo3GEBt+jamSkNZnoulPZPGvsxYWC
+pYmEyRwpsmaWvhv8BA+WuTL0pnfOsJIG8/GyLCAYPvvGkQAvNakaTiSEdhNsLqXx
+yxiE4uWOZCxsYRYNDnINT1pLWb/VCDloLONR1eAlQl/3b3n+cgbuvDxk8rV7v8D+
+bCdrBZEWqV+cVmBhgKrGAp2NgPoyrYGTjbpYDTsAj80EE1UsrKWewV/gyzaIkgw+
+TTYALNrJXFkzBzOnxXvImqJMXadvfvfBGp9EJQjb5UEoxiiJyYPIgnFXUaI4bhEc
+SBdKfEyEQmzwsXv00EYuyiXvWa3oLH8XxG9lI+TlxSij7Ln4rOPOlgld6SC6pzvF
+n464UnG8e+SbrevMUur3
+=2mRx
+-END PGP SIGNATURE-

Added: dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.sha512
==
--- dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.sha512 (added)
+++ dev/spark/v2.1.3-rc1-bin/SparkR_2.1.3.tar.gz.sha512 Mon Jun 18 22:44:03 2018
@@ -0,0 +1,3 @@
+SparkR_2.1.3.tar.gz: A8CD3747 AAD8FA0E B422F2D3 F1F3E467 B6D1D484 25BD6917
+ 5EF68FFB DBFFDE12 0254A929 645740E4 445F4A38 6A1C1131
+ 45D32EB3 0A803F3D 89863A0D F621CB16

Added: dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz
==
Binary file - no diff available.

Propchange: dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz
--
svn:mime-type = application/octet-stream

Added: dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.asc
==
--- dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.asc (added)
+++ dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.asc Mon Jun 18 22:44:03 2018
@@ -0,0 +1,17 @@
+-BEGIN PGP SIGNATURE-
+Version: GnuPG v1
+
+iQIcBAABAgAGBQJbKDOaAAoJEP2P/Uw6DVVkdw4P/i+9NVNi4feRnqyrlVPbCF9L
+YUhpfTCqnCFHkg39x/CubHOejSwVmkW4yC4P4AyKfhwKsxx04fFK3WxZhwz/8daS
+dt42HcrlrDwwQb6mLvE6+nLMJaslPshHt+Uj5jeJxJCcXnyGMW0mKxeEfTtfA6eF
+Ufkx1YojOxxgyAbGW+c9sgKOryo4MyTaV+0UOSfEX8k3MxOQG8Gcm5ZcXa+n0rMX
+iojr+b5BuOQur/0fICC5XuYLIgGDv/AilMGZDXAFxUWmjYB1AIjpiFVzLhvfBIzZ
+YigcU9P5flTr0GmEk9UW2U2SRFzDQiFwjKdKdlJfkjetvyx6irk42ypJzC4J++Rn
+t3cfsC+XiHUDP3OSnCeaQgPcJm0LhTRUrHb1P4lpl4az0UyUkKUhklF46WjfobGt
+pu9TGk4JZ88Lt9Ls2ouYsbv3KbB9EIbgaDk6AS0cmGtgmmcjB/9aWoeBSKnnKBZG
+TVMEMCsHGyLH8d5nFXZCmic+yYOO9t5tV07Py1m/aNfhsKW1JasH/0eGU+t8agDM
+wV41PafxP9si7KOPbjqrAkxPL9EoquLb8s2jD/D2MSZNXX0oq2w0nJ4UNEUBmS3G
+hRGR2Pwi6mksNDxnGmmXR0+RndJUW3QPQi4k8WsvMtNeQHMdT5WYWN5IcXXs0uYN
+l/vvRmU1Hj6QHJhGQfOC
+=Ku91
+-END PGP SIGNATURE-

Added: dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.sha512
==
--- dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.sha512 (added)
+++ dev/spark/v2.1.3-rc1-bin/pyspark-2.1.3.tar.gz.sha512 Mon 

spark git commit: Fix issue in 'docker-image-tool.sh'

2018-06-18 Thread mcheah
Repository: spark
Updated Branches:
  refs/heads/branch-2.3 9d63e540e -> b8dbfcc57


Fix issue in 'docker-image-tool.sh'

Because of the missing assignment of the variable `BUILD_ARGS` the command 
`./bin/docker-image-tool.sh -r docker.io/myrepo -t v2.3.1 build` fails:

```
docker build" requires exactly 1 argument.
See 'docker build --help'.

Usage:  docker build [OPTIONS] PATH | URL | - [flags]

Build an image from a Dockerfile
```

This has been fixed on the `master` already but, apparently, it has not been 
ported back to the branch `2.3`, leading to the same error even on the latest 
`2.3.1` release (dated 8 June 2018).

Author: Fabrizio Cucci 

Closes #21551 from fabriziocucci/patch-1.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b8dbfcc5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b8dbfcc5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b8dbfcc5

Branch: refs/heads/branch-2.3
Commit: b8dbfcc572bf5f386d21ae0be7b3439d1abed860
Parents: 9d63e54
Author: Fabrizio Cucci 
Authored: Mon Jun 18 14:40:24 2018 -0700
Committer: mcheah 
Committed: Mon Jun 18 14:40:24 2018 -0700

--
 bin/docker-image-tool.sh | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/b8dbfcc5/bin/docker-image-tool.sh
--
diff --git a/bin/docker-image-tool.sh b/bin/docker-image-tool.sh
index 0714063..0d0f564 100755
--- a/bin/docker-image-tool.sh
+++ b/bin/docker-image-tool.sh
@@ -57,6 +57,7 @@ function build {
   else
 # Not passed as an argument to docker, but used to validate the Spark 
directory.
 IMG_PATH="kubernetes/dockerfiles"
+BUILD_ARGS=()
   fi
 
   if [ ! -d "$IMG_PATH" ]; then


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[2/2] spark git commit: Preparing development version 2.1.4-SNAPSHOT

2018-06-18 Thread vanzin
Preparing development version 2.1.4-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/77d11df4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/77d11df4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/77d11df4

Branch: refs/heads/branch-2.1
Commit: 77d11df4f33995c4e8f3e421c4bd728ec62681aa
Parents: bbec382
Author: Marcelo Vanzin 
Authored: Mon Jun 18 13:53:52 2018 -0700
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 13:53:52 2018 -0700

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/java8-tests/pom.xml  | 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mesos/pom.xml | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 yarn/pom.xml  | 2 +-
 39 files changed, 40 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/77d11df4/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 6c380b6..e47b124 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.1.3
+Version: 2.1.4
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/77d11df4/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 038b040..f675987 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3
+2.1.4-SNAPSHOT
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/77d11df4/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index c455303..3fa4acd 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3
+2.1.4-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/77d11df4/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index 2671bb9..d960d13 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3
+2.1.4-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/77d11df4/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index 35925b2..8d2d37f 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3
+2.1.4-SNAPSHOT
 ../../pom.xml
   
 


[spark] Git Push Summary

2018-06-18 Thread vanzin
Repository: spark
Updated Tags:  refs/tags/v2.1.3-rc1 [created] bbec3820c

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[1/2] spark git commit: Preparing Spark release v2.1.3-rc1

2018-06-18 Thread vanzin
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 09d62531b -> 77d11df4f


Preparing Spark release v2.1.3-rc1


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bbec3820
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bbec3820
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bbec3820

Branch: refs/heads/branch-2.1
Commit: bbec3820c39746ce569668762fb74b328b0e6c75
Parents: 09d6253
Author: Marcelo Vanzin 
Authored: Mon Jun 18 13:53:49 2018 -0700
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 13:53:49 2018 -0700

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/java8-tests/pom.xml  | 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mesos/pom.xml | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 yarn/pom.xml  | 2 +-
 39 files changed, 40 insertions(+), 40 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/bbec3820/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index e47b124..6c380b6 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.1.4
+Version: 2.1.3
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/bbec3820/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index e9f915a..038b040 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.3
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bbec3820/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 7e203e7..c455303 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.3
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bbec3820/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index 92dd275..2671bb9 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.1.3-SNAPSHOT
+2.1.3
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/bbec3820/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index abca418..35925b2 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
- 

[spark] Git Push Summary

2018-06-18 Thread vanzin
Repository: spark
Updated Tags:  refs/tags/v2.1.3-rc1 [deleted] 9a3a34c5a

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[1/2] spark git commit: Preparing Spark release v2.1.3-rc1

2018-06-18 Thread vanzin
Repository: spark
Updated Branches:
  refs/heads/branch-2.1 858e89b43 -> 09d62531b


Preparing Spark release v2.1.3-rc1


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9a3a34c5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9a3a34c5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9a3a34c5

Branch: refs/heads/branch-2.1
Commit: 9a3a34c5a1115b4ddae4c81705d75976f86ab32f
Parents: 858e89b
Author: Marcelo Vanzin 
Authored: Mon Jun 18 20:37:08 2018 +
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 20:37:08 2018 +

--
 docs/_config.yml  | 2 +-
 python/pyspark/version.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/9a3a34c5/docs/_config.yml
--
diff --git a/docs/_config.yml b/docs/_config.yml
index ebe765a..0085284 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -14,7 +14,7 @@ include:
 
 # These allow the documentation to be updated with newer releases
 # of Spark, Scala, and Mesos.
-SPARK_VERSION: 2.1.3-SNAPSHOT
+SPARK_VERSION: 2.1.3
 SPARK_VERSION_SHORT: 2.1.3
 SCALA_BINARY_VERSION: "2.11"
 SCALA_VERSION: "2.11.8"

http://git-wip-us.apache.org/repos/asf/spark/blob/9a3a34c5/python/pyspark/version.py
--
diff --git a/python/pyspark/version.py b/python/pyspark/version.py
index d278412..30fba69 100644
--- a/python/pyspark/version.py
+++ b/python/pyspark/version.py
@@ -16,4 +16,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-__version__ = "2.1.3.dev0"
+__version__ = "2.1.3"


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[spark] Git Push Summary

2018-06-18 Thread vanzin
Repository: spark
Updated Tags:  refs/tags/v2.1.3-rc1 [created] 9a3a34c5a

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



[2/2] spark git commit: Preparing development version 2.1.4-SNAPSHOT

2018-06-18 Thread vanzin
Preparing development version 2.1.4-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/09d62531
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/09d62531
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/09d62531

Branch: refs/heads/branch-2.1
Commit: 09d62531b5a4485d22579a5d1a6b86ba32dced50
Parents: 9a3a34c
Author: Marcelo Vanzin 
Authored: Mon Jun 18 20:37:10 2018 +
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 20:37:10 2018 +

--
 R/pkg/DESCRIPTION | 2 +-
 docs/_config.yml  | 4 ++--
 python/pyspark/version.py | 2 +-
 3 files changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/09d62531/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 6c380b6..e47b124 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.1.3
+Version: 2.1.4
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/09d62531/docs/_config.yml
--
diff --git a/docs/_config.yml b/docs/_config.yml
index 0085284..13b5bfb 100644
--- a/docs/_config.yml
+++ b/docs/_config.yml
@@ -14,8 +14,8 @@ include:
 
 # These allow the documentation to be updated with newer releases
 # of Spark, Scala, and Mesos.
-SPARK_VERSION: 2.1.3
-SPARK_VERSION_SHORT: 2.1.3
+SPARK_VERSION: 2.1.4-SNAPSHOT
+SPARK_VERSION_SHORT: 2.1.4
 SCALA_BINARY_VERSION: "2.11"
 SCALA_VERSION: "2.11.8"
 MESOS_VERSION: 1.0.0

http://git-wip-us.apache.org/repos/asf/spark/blob/09d62531/python/pyspark/version.py
--
diff --git a/python/pyspark/version.py b/python/pyspark/version.py
index 30fba69..de159fe 100644
--- a/python/pyspark/version.py
+++ b/python/pyspark/version.py
@@ -16,4 +16,4 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-__version__ = "2.1.3"
+__version__ = "2.1.4.dev0"


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark-website git commit: Spark pride

2018-06-18 Thread vanzin
Repository: spark-website
Updated Branches:
  refs/heads/asf-site fc84c6950 -> 3d00a9da2


Spark pride

Change to the Spark Pride logo as discussed. Once finished we can git mv the 
-back files over it until we want to swap in a special logo again. Also adds 
spark-logo-pride.png & xcf for permalinks.
![result](https://user-images.githubusercontent.com/59893/41512057-ad2dac6c-7237-11e8-80c0-c164b7a0dec1.png)

Author: Holden Karau 

Closes #118 from holdenk/spark-pride.


Project: http://git-wip-us.apache.org/repos/asf/spark-website/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark-website/commit/3d00a9da
Tree: http://git-wip-us.apache.org/repos/asf/spark-website/tree/3d00a9da
Diff: http://git-wip-us.apache.org/repos/asf/spark-website/diff/3d00a9da

Branch: refs/heads/asf-site
Commit: 3d00a9da220ea17481f2512312f61bfd545df5b4
Parents: fc84c69
Author: Holden Karau 
Authored: Mon Jun 18 13:02:13 2018 -0700
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 13:02:13 2018 -0700

--
 images/spark-logo-back.png| Bin 0 -> 26999 bytes
 images/spark-logo-pride.png   | Bin 0 -> 49720 bytes
 images/spark-logo-pride.xcf   | Bin 0 -> 85481 bytes
 images/spark-logo-trademark-back.png  | Bin 0 -> 26999 bytes
 images/spark-logo-trademark.png   | Bin 26999 -> 49720 bytes
 images/spark-logo.png | Bin 26999 -> 49720 bytes
 site/images/spark-logo-back.png   | Bin 0 -> 26999 bytes
 site/images/spark-logo-pride.png  | Bin 0 -> 49720 bytes
 site/images/spark-logo-pride.xcf  | Bin 0 -> 85481 bytes
 site/images/spark-logo-trademark-back.png | Bin 0 -> 26999 bytes
 site/images/spark-logo-trademark.png  | Bin 26999 -> 49720 bytes
 site/images/spark-logo.png| Bin 26999 -> 49720 bytes
 12 files changed, 0 insertions(+), 0 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo-back.png
--
diff --git a/images/spark-logo-back.png b/images/spark-logo-back.png
new file mode 100644
index 000..16702a9
Binary files /dev/null and b/images/spark-logo-back.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo-pride.png
--
diff --git a/images/spark-logo-pride.png b/images/spark-logo-pride.png
new file mode 100644
index 000..eab639f
Binary files /dev/null and b/images/spark-logo-pride.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo-pride.xcf
--
diff --git a/images/spark-logo-pride.xcf b/images/spark-logo-pride.xcf
new file mode 100644
index 000..8307c47
Binary files /dev/null and b/images/spark-logo-pride.xcf differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo-trademark-back.png
--
diff --git a/images/spark-logo-trademark-back.png 
b/images/spark-logo-trademark-back.png
new file mode 100644
index 000..16702a9
Binary files /dev/null and b/images/spark-logo-trademark-back.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo-trademark.png
--
diff --git a/images/spark-logo-trademark.png b/images/spark-logo-trademark.png
index 16702a9..eab639f 100644
Binary files a/images/spark-logo-trademark.png and 
b/images/spark-logo-trademark.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/images/spark-logo.png
--
diff --git a/images/spark-logo.png b/images/spark-logo.png
index 16702a9..eab639f 100644
Binary files a/images/spark-logo.png and b/images/spark-logo.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/site/images/spark-logo-back.png
--
diff --git a/site/images/spark-logo-back.png b/site/images/spark-logo-back.png
new file mode 100644
index 000..16702a9
Binary files /dev/null and b/site/images/spark-logo-back.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/site/images/spark-logo-pride.png
--
diff --git a/site/images/spark-logo-pride.png b/site/images/spark-logo-pride.png
new file mode 100644
index 000..eab639f
Binary files /dev/null and b/site/images/spark-logo-pride.png differ

http://git-wip-us.apache.org/repos/asf/spark-website/blob/3d00a9da/site/images/spark-logo-pride.xcf

spark-website git commit: Update list of mirrored releases, add more instructions to release guide.

2018-06-18 Thread vanzin
Repository: spark-website
Updated Branches:
  refs/heads/asf-site e6166eabf -> fc84c6950


Update list of mirrored releases, add more instructions to release guide.

Author: Marcelo Vanzin 

Closes #117 from vanzin/more-rm-stuff.


Project: http://git-wip-us.apache.org/repos/asf/spark-website/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark-website/commit/fc84c695
Tree: http://git-wip-us.apache.org/repos/asf/spark-website/tree/fc84c695
Diff: http://git-wip-us.apache.org/repos/asf/spark-website/diff/fc84c695

Branch: refs/heads/asf-site
Commit: fc84c69504f562f45cf2a1f8a09a6c37c31e0424
Parents: e6166ea
Author: Marcelo Vanzin 
Authored: Mon Jun 18 12:42:31 2018 -0700
Committer: Marcelo Vanzin 
Committed: Mon Jun 18 12:42:31 2018 -0700

--
 js/downloads.js   | 54 --
 release-process.md| 34 +-
 site/js/downloads.js  | 54 --
 site/release-process.html | 30 +++
 4 files changed, 95 insertions(+), 77 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark-website/blob/fc84c695/js/downloads.js
--
diff --git a/js/downloads.js b/js/downloads.js
index 80dbdcb..d817dbd 100644
--- a/js/downloads.js
+++ b/js/downloads.js
@@ -3,8 +3,13 @@
 
 releases = {};
 
-function addRelease(version, releaseDate, packages, stable) {
-  releases[version] = {released: releaseDate, packages: packages, stable: 
stable};
+function addRelease(version, releaseDate, packages, stable, mirrored) {
+  releases[version] = {
+released: releaseDate,
+packages: packages,
+stable: stable,
+mirrored: mirrored
+  };
 }
 
 var sources = {pretty: "Source Code", tag: "sources"};
@@ -24,26 +29,21 @@ var packagesV7 = [hadoop2p7, hadoop2p6, hadoop2p4, 
hadoop2p3, hadoopFree, source
 // 2.2.0+
 var packagesV8 = [hadoop2p7, hadoop2p6, hadoopFree, sources];
 
-addRelease("2.3.1", new Date("06/08/2018"), packagesV8, true);
-addRelease("2.3.0", new Date("02/28/2018"), packagesV8, true);
-addRelease("2.2.1", new Date("12/01/2017"), packagesV8, true);
-addRelease("2.2.0", new Date("07/11/2017"), packagesV8, true);
-addRelease("2.1.2", new Date("10/09/2017"), packagesV7, true);
-addRelease("2.1.1", new Date("05/02/2017"), packagesV7, true);
-addRelease("2.1.0", new Date("12/28/2016"), packagesV7, true);
-addRelease("2.0.2", new Date("11/14/2016"), packagesV7, true);
-addRelease("2.0.1", new Date("10/03/2016"), packagesV7, true);
-addRelease("2.0.0", new Date("07/26/2016"), packagesV7, true);
+addRelease("2.3.1", new Date("06/08/2018"), packagesV8, true, true);
+addRelease("2.3.0", new Date("02/28/2018"), packagesV8, true, true);
+addRelease("2.2.1", new Date("12/01/2017"), packagesV8, true, true);
+addRelease("2.2.0", new Date("07/11/2017"), packagesV8, true, false);
+addRelease("2.1.2", new Date("10/09/2017"), packagesV7, true, true);
+addRelease("2.1.1", new Date("05/02/2017"), packagesV7, true, false);
+addRelease("2.1.0", new Date("12/28/2016"), packagesV7, true, false);
+addRelease("2.0.2", new Date("11/14/2016"), packagesV7, true, true);
+addRelease("2.0.1", new Date("10/03/2016"), packagesV7, true, false);
+addRelease("2.0.0", new Date("07/26/2016"), packagesV7, true, false);
 //addRelease("2.0.0-preview", new Date("05/24/2016"), 
sources.concat(packagesV7), true, false);
-addRelease("1.6.3", new Date("11/07/2016"), packagesV6, true);
-addRelease("1.6.2", new Date("06/25/2016"), packagesV6, true);
-addRelease("1.6.1", new Date("03/09/2016"), packagesV6, true);
-addRelease("1.6.0", new Date("01/04/2016"), packagesV6, true);
-//addRelease("1.5.2", new Date("11/09/2015"), packagesV6, true);
-//addRelease("1.5.1", new Date("10/02/2015"), packagesV6, true);
-//addRelease("1.5.0", new Date("9/09/2015"), packagesV6, true);
-//addRelease("1.4.1", new Date("7/15/2015"), packagesV6, true);
-//addRelease("1.4.0", new Date("6/11/2015"), packagesV6, true);
+addRelease("1.6.3", new Date("11/07/2016"), packagesV6, true, true);
+addRelease("1.6.2", new Date("06/25/2016"), packagesV6, true, false);
+addRelease("1.6.1", new Date("03/09/2016"), packagesV6, true, false);
+addRelease("1.6.0", new Date("01/04/2016"), packagesV6, true, false);
 
 function append(el, contents) {
   el.innerHTML += contents;
@@ -128,10 +128,10 @@ function onVersionSelect() {
   append(verifyLink, link);
 
   // Populate releases
-  updateDownloadLink();
+  updateDownloadLink(releases[version].mirrored);
 }
 
-function updateDownloadLink() {
+function updateDownloadLink(isMirrored) {
   var versionSelect = document.getElementById("sparkVersionSelect");
   var packageSelect = document.getElementById("sparkPackageSelect");
   var downloadLink = document.getElementById("spanDownloadLink");
@@ -147,12 +147,10 @@ function 

svn commit: r27534 - in /dev/spark/2.4.0-SNAPSHOT-2018_06_18_12_02-8f225e0-docs: ./ _site/ _site/api/ _site/api/R/ _site/api/java/ _site/api/java/lib/ _site/api/java/org/ _site/api/java/org/apache/ _s

2018-06-18 Thread pwendell
Author: pwendell
Date: Mon Jun 18 19:17:24 2018
New Revision: 27534

Log:
Apache Spark 2.4.0-SNAPSHOT-2018_06_18_12_02-8f225e0 docs


[This commit notification would consist of 1468 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-24548][SQL] Fix incorrect schema of Dataset with tuple encoders

2018-06-18 Thread wenchen
Repository: spark
Updated Branches:
  refs/heads/master bce177552 -> 8f225e055


[SPARK-24548][SQL] Fix incorrect schema of Dataset with tuple encoders

## What changes were proposed in this pull request?

When creating tuple expression encoders, we should give the serializer 
expressions of tuple items correct names, so we can have correct output schema 
when we use such tuple encoders.

## How was this patch tested?

Added test.

Author: Liang-Chi Hsieh 

Closes #21576 from viirya/SPARK-24548.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8f225e05
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8f225e05
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8f225e05

Branch: refs/heads/master
Commit: 8f225e055c2031ca85d61721ab712170ab4e50c1
Parents: bce1775
Author: Liang-Chi Hsieh 
Authored: Mon Jun 18 11:01:17 2018 -0700
Committer: Wenchen Fan 
Committed: Mon Jun 18 11:01:17 2018 -0700

--
 .../sql/catalyst/encoders/ExpressionEncoder.scala |  3 ++-
 .../org/apache/spark/sql/JavaDatasetSuite.java| 18 ++
 .../scala/org/apache/spark/sql/DatasetSuite.scala | 13 +
 3 files changed, 33 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/8f225e05/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala
index efc2882..cbea3c0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoder.scala
@@ -128,7 +128,7 @@ object ExpressionEncoder {
 case b: BoundReference if b == originalInputObject => newInputObject
   })
 
-  if (enc.flat) {
+  val serializerExpr = if (enc.flat) {
 newSerializer.head
   } else {
 // For non-flat encoder, the input object is not top level anymore 
after being combined to
@@ -146,6 +146,7 @@ object ExpressionEncoder {
   Invoke(Literal.fromObject(None), "equals", BooleanType, 
newInputObject :: Nil))
 If(nullCheck, Literal.create(null, struct.dataType), struct)
   }
+  Alias(serializerExpr, s"_${index + 1}")()
 }
 
 val childrenDeserializers = encoders.zipWithIndex.map { case (enc, index) 
=>

http://git-wip-us.apache.org/repos/asf/spark/blob/8f225e05/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
--
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index c132cab..2c695fc 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -34,6 +34,7 @@ import com.google.common.base.Objects;
 import org.junit.*;
 import org.junit.rules.ExpectedException;
 
+import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.*;
 import org.apache.spark.sql.*;
@@ -337,6 +338,23 @@ public class JavaDatasetSuite implements Serializable {
   }
 
   @Test
+  public void testTupleEncoderSchema() {
+Encoder>> encoder =
+  Encoders.tuple(Encoders.STRING(), Encoders.tuple(Encoders.STRING(), 
Encoders.STRING()));
+List>> data = 
Arrays.asList(tuple2("1", tuple2("a", "b")),
+  tuple2("2", tuple2("c", "d")));
+Dataset ds1 = spark.createDataset(data, encoder).toDF("value1", 
"value2");
+
+JavaPairRDD> pairRDD = 
jsc.parallelizePairs(data);
+Dataset ds2 = spark.createDataset(JavaPairRDD.toRDD(pairRDD), encoder)
+  .toDF("value1", "value2");
+
+Assert.assertEquals(ds1.schema(), ds2.schema());
+Assert.assertEquals(ds1.select(expr("value2._1")).collectAsList(),
+  ds2.select(expr("value2._1")).collectAsList());
+  }
+
+  @Test
   public void testNestedTupleEncoder() {
 // test ((int, string), string)
 Encoder, String>> encoder =

http://git-wip-us.apache.org/repos/asf/spark/blob/8f225e05/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
--
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index d477d78..093cee9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ 

spark git commit: [SPARK-24526][BUILD][TEST-MAVEN] Spaces in the build dir causes failures in the build/mvn script

2018-06-18 Thread gurwls223
Repository: spark
Updated Branches:
  refs/heads/master e219e692e -> bce177552


[SPARK-24526][BUILD][TEST-MAVEN] Spaces in the build dir causes failures in the 
build/mvn script

## What changes were proposed in this pull request?

Fix the call to ${MVN_BIN} to be wrapped in quotes so it will handle having 
spaces in the path.

## How was this patch tested?

Ran the following to confirm using the build/mvn tool with a space in the build 
dir now works without error

```
mkdir /tmp/test\ spaces
cd /tmp/test\ spaces
git clone https://github.com/apache/spark.git
cd spark
# Remove all mvn references in PATH so the script will download mvn to the 
local dir
./build/mvn -DskipTests clean package
```

Please review http://spark.apache.org/contributing.html before opening a pull 
request.

Author: trystanleftwich 

Closes #21534 from trystanleftwich/SPARK-24526.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bce17755
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bce17755
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bce17755

Branch: refs/heads/master
Commit: bce177552564a4862bc979d39790cf553a477d74
Parents: e219e69
Author: trystanleftwich 
Authored: Tue Jun 19 00:34:24 2018 +0800
Committer: hyukjinkwon 
Committed: Tue Jun 19 00:34:24 2018 +0800

--
 build/mvn | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/bce17755/build/mvn
--
diff --git a/build/mvn b/build/mvn
index efa4f93..1405983 100755
--- a/build/mvn
+++ b/build/mvn
@@ -154,4 +154,4 @@ export MAVEN_OPTS=${MAVEN_OPTS:-"$_COMPILE_JVM_OPTS"}
 echo "Using \`mvn\` from path: $MVN_BIN" 1>&2
 
 # Last, call the `mvn` command as usual
-${MVN_BIN} -DzincPort=${ZINC_PORT} "$@"
+"${MVN_BIN}" -DzincPort=${ZINC_PORT} "$@"


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-23772][SQL] Provide an option to ignore column of all null values or empty array during JSON schema inference

2018-06-18 Thread gurwls223
Repository: spark
Updated Branches:
  refs/heads/master b0a935255 -> e219e692e


[SPARK-23772][SQL] Provide an option to ignore column of all null values or 
empty array during JSON schema inference

## What changes were proposed in this pull request?
This pr added a new JSON option `dropFieldIfAllNull ` to ignore column of all 
null values or empty array/struct during JSON schema inference.

## How was this patch tested?
Added tests in `JsonSuite`.

Author: Takeshi Yamamuro 
Author: Xiangrui Meng 

Closes #20929 from maropu/SPARK-23772.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e219e692
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e219e692
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e219e692

Branch: refs/heads/master
Commit: e219e692ef70c161f37a48bfdec2a94b29260004
Parents: b0a9352
Author: Takeshi Yamamuro 
Authored: Tue Jun 19 00:24:54 2018 +0800
Committer: hyukjinkwon 
Committed: Tue Jun 19 00:24:54 2018 +0800

--
 python/pyspark/sql/readwriter.py|  5 +-
 .../spark/sql/catalyst/json/JSONOptions.scala   |  3 ++
 .../org/apache/spark/sql/DataFrameReader.scala  |  2 +
 .../datasources/json/JsonInferSchema.scala  | 40 
 .../spark/sql/streaming/DataStreamReader.scala  |  2 +
 .../execution/datasources/json/JsonSuite.scala  | 49 
 6 files changed, 80 insertions(+), 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e219e692/python/pyspark/sql/readwriter.py
--
diff --git a/python/pyspark/sql/readwriter.py b/python/pyspark/sql/readwriter.py
index a0e20d3..3efe2ad 100644
--- a/python/pyspark/sql/readwriter.py
+++ b/python/pyspark/sql/readwriter.py
@@ -177,7 +177,7 @@ class DataFrameReader(OptionUtils):
  allowNumericLeadingZero=None, 
allowBackslashEscapingAnyCharacter=None,
  mode=None, columnNameOfCorruptRecord=None, dateFormat=None, 
timestampFormat=None,
  multiLine=None, allowUnquotedControlChars=None, lineSep=None, 
samplingRatio=None,
- encoding=None):
+ dropFieldIfAllNull=None, encoding=None):
 """
 Loads JSON files and returns the results as a :class:`DataFrame`.
 
@@ -246,6 +246,9 @@ class DataFrameReader(OptionUtils):
 set, it covers all ``\\r``, ``\\r\\n`` and ``\\n``.
 :param samplingRatio: defines fraction of input JSON objects used for 
schema inferring.
   If None is set, it uses the default value, 
``1.0``.
+:param dropFieldIfAllNull: whether to ignore column of all null values 
or empty
+   array/struct during schema inference. If 
None is set, it
+   uses the default value, ``false``.
 
 >>> df1 = spark.read.json('python/test_support/sql/people.json')
 >>> df1.dtypes

http://git-wip-us.apache.org/repos/asf/spark/blob/e219e692/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
index 2ff12ac..c081772 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JSONOptions.scala
@@ -73,6 +73,9 @@ private[sql] class JSONOptions(
   val columnNameOfCorruptRecord =
 parameters.getOrElse("columnNameOfCorruptRecord", 
defaultColumnNameOfCorruptRecord)
 
+  // Whether to ignore column of all null values or empty array/struct during 
schema inference
+  val dropFieldIfAllNull = 
parameters.get("dropFieldIfAllNull").map(_.toBoolean).getOrElse(false)
+
   val timeZone: TimeZone = DateTimeUtils.getTimeZone(
 parameters.getOrElse(DateTimeUtils.TIMEZONE_OPTION, defaultTimeZoneId))
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e219e692/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
--
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
index de6be5f..ec9352a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala
@@ -381,6 +381,8 @@ class DataFrameReader private[sql](sparkSession: 
SparkSession) extends Logging {
* that should be used for parsing.
* `samplingRatio` (default is 1.0): defines fraction of input JSON 
objects used
* 

spark git commit: Preparing development version 2.2.3-SNAPSHOT

2018-06-18 Thread tgraves
Repository: spark
Updated Branches:
  refs/heads/branch-2.2 e2e4d5849 -> 7bfefc928


Preparing development version 2.2.3-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7bfefc92
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7bfefc92
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7bfefc92

Branch: refs/heads/branch-2.2
Commit: 7bfefc9283ffaec756eaf7f13c0b09025945446b
Parents: e2e4d58
Author: Thomas Graves 
Authored: Mon Jun 18 11:21:21 2018 -0500
Committer: Thomas Graves 
Committed: Mon Jun 18 11:21:21 2018 -0500

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 resource-managers/mesos/pom.xml   | 2 +-
 resource-managers/yarn/pom.xml| 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 38 files changed, 39 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/7bfefc92/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index e335948..ad72330 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.2-3
+Version: 2.2.3
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/7bfefc92/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 6db6a22..f9ec6e7 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2-3-SNAPSHOT
+2.2.3-SNAPSHOT
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7bfefc92/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index f8b1b59..55d29d5 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2-3-SNAPSHOT
+2.2.3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7bfefc92/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index cd56f2f..6d84766 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2-3-SNAPSHOT
+2.2.3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7bfefc92/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index e8c8177..6228be6 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-

[1/2] spark git commit: Preparing Spark release v2.2.2-rc1

2018-06-18 Thread tgraves
Repository: spark
Updated Branches:
  refs/heads/branch-2.2 090b883fa -> e2e4d5849


Preparing Spark release v2.2.2-rc1


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/8ce9e2a4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/8ce9e2a4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/8ce9e2a4

Branch: refs/heads/branch-2.2
Commit: 8ce9e2a4afa7414339bf98dbe4f3e83cde0ac149
Parents: 090b883
Author: Thomas Graves 
Authored: Mon Jun 18 14:45:11 2018 +
Committer: Thomas Graves 
Committed: Mon Jun 18 14:45:11 2018 +

--
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 2 +-
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 resource-managers/mesos/pom.xml   | 2 +-
 resource-managers/yarn/pom.xml| 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 37 files changed, 37 insertions(+), 37 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index eeb75e9..775d9f8 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2-SNAPSHOT
+2.2.2
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 9d83ad8..017f0f1 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2-SNAPSHOT
+2.2.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index f841f93..17d5216 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2-SNAPSHOT
+2.2.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index c1c2ebb..8d03c86 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2-SNAPSHOT
+2.2.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/common/sketch/pom.xml
--
diff --git a/common/sketch/pom.xml b/common/sketch/pom.xml
index 846051f..dadc0bc 100644
--- a/common/sketch/pom.xml
+++ b/common/sketch/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2-SNAPSHOT
+2.2.2
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/8ce9e2a4/common/tags/pom.xml
--

[2/2] spark git commit: Preparing development version 2.2-3-SNAPSHOT

2018-06-18 Thread tgraves
Preparing development version 2.2-3-SNAPSHOT


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e2e4d584
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e2e4d584
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e2e4d584

Branch: refs/heads/branch-2.2
Commit: e2e4d5849fb85410dc489abbeb4da428ee1274f5
Parents: 8ce9e2a
Author: Thomas Graves 
Authored: Mon Jun 18 14:45:19 2018 +
Committer: Thomas Graves 
Committed: Mon Jun 18 14:45:19 2018 +

--
 R/pkg/DESCRIPTION | 2 +-
 assembly/pom.xml  | 2 +-
 common/network-common/pom.xml | 2 +-
 common/network-shuffle/pom.xml| 2 +-
 common/network-yarn/pom.xml   | 2 +-
 common/sketch/pom.xml | 2 +-
 common/tags/pom.xml   | 2 +-
 common/unsafe/pom.xml | 2 +-
 core/pom.xml  | 2 +-
 docs/_config.yml  | 4 ++--
 examples/pom.xml  | 2 +-
 external/docker-integration-tests/pom.xml | 2 +-
 external/flume-assembly/pom.xml   | 2 +-
 external/flume-sink/pom.xml   | 2 +-
 external/flume/pom.xml| 2 +-
 external/kafka-0-10-assembly/pom.xml  | 2 +-
 external/kafka-0-10-sql/pom.xml   | 2 +-
 external/kafka-0-10/pom.xml   | 2 +-
 external/kafka-0-8-assembly/pom.xml   | 2 +-
 external/kafka-0-8/pom.xml| 2 +-
 external/kinesis-asl-assembly/pom.xml | 2 +-
 external/kinesis-asl/pom.xml  | 2 +-
 external/spark-ganglia-lgpl/pom.xml   | 2 +-
 graphx/pom.xml| 2 +-
 launcher/pom.xml  | 2 +-
 mllib-local/pom.xml   | 2 +-
 mllib/pom.xml | 2 +-
 pom.xml   | 2 +-
 python/pyspark/version.py | 2 +-
 repl/pom.xml  | 2 +-
 resource-managers/mesos/pom.xml   | 2 +-
 resource-managers/yarn/pom.xml| 2 +-
 sql/catalyst/pom.xml  | 2 +-
 sql/core/pom.xml  | 2 +-
 sql/hive-thriftserver/pom.xml | 2 +-
 sql/hive/pom.xml  | 2 +-
 streaming/pom.xml | 2 +-
 tools/pom.xml | 2 +-
 38 files changed, 39 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e2e4d584/R/pkg/DESCRIPTION
--
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index 380b3ef..e335948 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -1,6 +1,6 @@
 Package: SparkR
 Type: Package
-Version: 2.2.2
+Version: 2.2-3
 Title: R Frontend for Apache Spark
 Description: Provides an R Frontend for Apache Spark.
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),

http://git-wip-us.apache.org/repos/asf/spark/blob/e2e4d584/assembly/pom.xml
--
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 775d9f8..6db6a22 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -21,7 +21,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2
+2.2-3-SNAPSHOT
 ../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e2e4d584/common/network-common/pom.xml
--
diff --git a/common/network-common/pom.xml b/common/network-common/pom.xml
index 017f0f1..f8b1b59 100644
--- a/common/network-common/pom.xml
+++ b/common/network-common/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2
+2.2-3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e2e4d584/common/network-shuffle/pom.xml
--
diff --git a/common/network-shuffle/pom.xml b/common/network-shuffle/pom.xml
index 17d5216..cd56f2f 100644
--- a/common/network-shuffle/pom.xml
+++ b/common/network-shuffle/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2
+2.2-3-SNAPSHOT
 ../../pom.xml
   
 

http://git-wip-us.apache.org/repos/asf/spark/blob/e2e4d584/common/network-yarn/pom.xml
--
diff --git a/common/network-yarn/pom.xml b/common/network-yarn/pom.xml
index 8d03c86..e8c8177 100644
--- a/common/network-yarn/pom.xml
+++ b/common/network-yarn/pom.xml
@@ -22,7 +22,7 @@
   
 org.apache.spark
 spark-parent_2.11
-2.2.2
+2.2-3-SNAPSHOT
 ../../pom.xml
   
 


[spark] Git Push Summary

2018-06-18 Thread tgraves
Repository: spark
Updated Tags:  refs/tags/v2.2.2-rc1 [created] 8ce9e2a4a

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



svn commit: r27527 - in /dev/spark/2.4.0-SNAPSHOT-2018_06_18_05_17-b0a9352-docs: ./ _site/ _site/api/ _site/api/R/ _site/api/java/ _site/api/java/lib/ _site/api/java/org/ _site/api/java/org/apache/ _s

2018-06-18 Thread pwendell
Author: pwendell
Date: Mon Jun 18 12:32:31 2018
New Revision: 27527

Log:
Apache Spark 2.4.0-SNAPSHOT-2018_06_18_05_17-b0a9352 docs


[This commit notification would consist of 1468 parts, 
which exceeds the limit of 50 ones, so it was shortened to the summary.]

-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-24573][INFRA] Runs SBT checkstyle after the build to work around a side-effect

2018-06-18 Thread gurwls223
Repository: spark
Updated Branches:
  refs/heads/master c7c0b086a -> b0a935255


[SPARK-24573][INFRA] Runs SBT checkstyle after the build to work around a 
side-effect

## What changes were proposed in this pull request?

Seems checkstyle affects the build in the PR builder in Jenkins. I can't 
reproduce in my local and seems it can only be reproduced in the PR builder.

I was checking the places it goes through and this is just a speculation that 
checkstyle's compilation in SBT has a side effect to the assembly build.

This PR proposes to run the SBT checkstyle after the build.

## How was this patch tested?

Jenkins tests.

Author: hyukjinkwon 

Closes #21579 from HyukjinKwon/investigate-javastyle.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b0a93525
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b0a93525
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b0a93525

Branch: refs/heads/master
Commit: b0a935255951280b49c39968f6234163e2f0e379
Parents: c7c0b08
Author: hyukjinkwon 
Authored: Mon Jun 18 15:32:34 2018 +0800
Committer: hyukjinkwon 
Committed: Mon Jun 18 15:32:34 2018 +0800

--
 dev/run-tests.py | 11 ---
 1 file changed, 8 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/b0a93525/dev/run-tests.py
--
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 5e8c859..cd45908 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -357,7 +357,7 @@ def build_spark_unidoc_sbt(hadoop_version):
 exec_sbt(profiles_and_goals)
 
 
-def build_spark_assembly_sbt(hadoop_version):
+def build_spark_assembly_sbt(hadoop_version, checkstyle=False):
 # Enable all of the profiles for the build:
 build_profiles = get_hadoop_profiles(hadoop_version) + 
modules.root.build_profile_flags
 sbt_goals = ["assembly/package"]
@@ -366,6 +366,9 @@ def build_spark_assembly_sbt(hadoop_version):
   " ".join(profiles_and_goals))
 exec_sbt(profiles_and_goals)
 
+if checkstyle:
+run_java_style_checks()
+
 # Note that we skip Unidoc build only if Hadoop 2.6 is explicitly set in 
this SBT build.
 # Due to a different dependency resolution in SBT & Unidoc by an unknown 
reason, the
 # documentation build fails on a specific machine & environment in Jenkins 
but it was unable
@@ -570,11 +573,13 @@ def main():
 or f.endswith("scalastyle-config.xml")
 for f in changed_files):
 run_scala_style_checks()
+should_run_java_style_checks = False
 if not changed_files or any(f.endswith(".java")
 or f.endswith("checkstyle.xml")
 or f.endswith("checkstyle-suppressions.xml")
 for f in changed_files):
-run_java_style_checks()
+# Run SBT Checkstyle after the build to prevent a side-effect to the 
build.
+should_run_java_style_checks = True
 if not changed_files or any(f.endswith("lint-python")
 or f.endswith("tox.ini")
 or f.endswith(".py")
@@ -603,7 +608,7 @@ def main():
 detect_binary_inop_with_mima(hadoop_version)
 # Since we did not build assembly/package before running dev/mima, we 
need to
 # do it here because the tests still rely on it; see SPARK-13294 for 
details.
-build_spark_assembly_sbt(hadoop_version)
+build_spark_assembly_sbt(hadoop_version, should_run_java_style_checks)
 
 # run the test suites
 run_scala_tests(build_tool, hadoop_version, test_modules, excluded_tags)


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org