[spark] branch master updated: [MINOR] Fix typos

2023-10-21 Thread srowen
This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
 new 920fb673b26 [MINOR] Fix typos
920fb673b26 is described below

commit 920fb673b264c0bdcad0426020dedf57d8b11cc7
Author: shuoer86 <129674997+shuoe...@users.noreply.github.com>
AuthorDate: Sat Oct 21 16:37:27 2023 -0500

[MINOR] Fix typos

Closes #43434 from shuoer86/master.

Authored-by: shuoer86 <129674997+shuoe...@users.noreply.github.com>
Signed-off-by: Sean Owen 
---
 binder/postBuild| 4 ++--
 .../scala/org/apache/spark/sql/connect/service/SessionHolder.scala  | 2 +-
 .../spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala  | 2 +-
 core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala | 2 +-
 core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala | 6 +++---
 .../main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala| 2 +-
 .../scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala   | 2 +-
 docs/sql-ref-syntax-ddl-declare-variable.md | 2 +-
 8 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/binder/postBuild b/binder/postBuild
index 70ae23b3937..b6bdf72324c 100644
--- a/binder/postBuild
+++ b/binder/postBuild
@@ -38,7 +38,7 @@ else
   pip install plotly "pandas<2.0.0" 
"pyspark[sql,ml,mllib,pandas_on_spark]$SPECIFIER$VERSION"
 fi
 
-# Set 'PYARROW_IGNORE_TIMEZONE' to surpress warnings from PyArrow.
+# Set 'PYARROW_IGNORE_TIMEZONE' to suppress warnings from PyArrow.
 echo "export PYARROW_IGNORE_TIMEZONE=1" >> ~/.profile
 
 # Add sbin to PATH to run `start-connect-server.sh`.
@@ -50,7 +50,7 @@ echo "export SPARK_HOME=${SPARK_HOME}" >> ~/.profile
 SPARK_VERSION=$(python -c "import pyspark; print(pyspark.__version__)")
 echo "export SPARK_VERSION=${SPARK_VERSION}" >> ~/.profile
 
-# Surpress warnings from Spark jobs, and UI progress bar.
+# Suppress warnings from Spark jobs, and UI progress bar.
 mkdir -p ~/.ipython/profile_default/startup
 echo """from pyspark.sql import SparkSession
 SparkSession.builder.config('spark.ui.showConsoleProgress', 
'false').getOrCreate().sparkContext.setLogLevel('FATAL')
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
index 27f471233f1..dcced21f371 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
@@ -77,7 +77,7 @@ case class SessionHolder(userId: String, sessionId: String, 
session: SparkSessio
   private[service] def addExecuteHolder(executeHolder: ExecuteHolder): Unit = {
 val oldExecute = executions.putIfAbsent(executeHolder.operationId, 
executeHolder)
 if (oldExecute != null) {
-  // the existance of this should alrady be checked by 
SparkConnectExecutionManager
+  // the existence of this should alrady be checked by 
SparkConnectExecutionManager
   throw new IllegalStateException(
 s"ExecuteHolder with opId=${executeHolder.operationId} already 
exists!")
 }
diff --git 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
index ea9ae3ed9d9..e1de6b04d21 100644
--- 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
+++ 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
@@ -226,7 +226,7 @@ class SparkConnectPluginRegistrySuite extends 
SharedSparkSession with SparkConne
 }
   }
 
-  test("Emtpy registries are really empty and work") {
+  test("Empty registries are really empty and work") {
 assert(SparkConnectPluginRegistry.loadRelationPlugins().isEmpty)
 assert(SparkConnectPluginRegistry.loadExpressionPlugins().isEmpty)
 assert(SparkConnectPluginRegistry.loadCommandPlugins().isEmpty)
diff --git 
a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala 
b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
index f80190c96e8..73e72b7f1df 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
@@ -259,7 +259,7 @@ private[storage] class 
BlockInfoManager(trackingCacheVisibility: Boolean = false
   }
 
   /**
-   * Apply function `f` on the [[BlockInfo]] object and the aquisition 
[[Condition]] for `blockId`.
+   * Apply function `f` on the [[BlockInfo]] object and 

[spark] branch master updated: [MINOR] Fix typos

2023-01-02 Thread gurwls223
This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
 new a09e9dc1531 [MINOR] Fix typos
a09e9dc1531 is described below

commit a09e9dc1531bdef905d4609945c7747622928905
Author: smallzhongfeng 
AuthorDate: Tue Jan 3 09:57:51 2023 +0900

[MINOR] Fix typos

### What changes were proposed in this pull request?

Fix typo in ReceiverSupervisorImpl.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

No need.

Closes #39340 from smallzhongfeng/fix-typos.

Authored-by: smallzhongfeng 
Signed-off-by: Hyukjin Kwon 
---
 core/src/main/scala/org/apache/spark/SparkContext.scala   | 4 ++--
 .../main/scala/org/apache/spark/ml/evaluation/ClusteringMetrics.scala | 4 ++--
 mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala| 2 +-
 .../src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala   | 2 +-
 .../spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala   | 2 +-
 .../spark/sql/catalyst/expressions/HigherOrderFunctionsSuite.scala| 2 +-
 .../org/apache/spark/streaming/receiver/ReceiverSupervisorImpl.scala  | 2 +-
 7 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5cbf2e83371..62e652ff9bb 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -3173,7 +3173,7 @@ object WritableConverter {
 
   implicit val bytesWritableConverterFn: () => WritableConverter[Array[Byte]] 
= {
 () => simpleWritableConverter[Array[Byte], BytesWritable] { bw =>
-  // getBytes method returns array which is longer then data to be returned
+  // getBytes method returns array which is longer than data to be returned
   Arrays.copyOfRange(bw.getBytes, 0, bw.getLength)
 }
   }
@@ -3204,7 +3204,7 @@ object WritableConverter {
 
   implicit def bytesWritableConverter(): WritableConverter[Array[Byte]] = {
 simpleWritableConverter[Array[Byte], BytesWritable] { bw =>
-  // getBytes method returns array which is longer then data to be returned
+  // getBytes method returns array which is longer than data to be returned
   Arrays.copyOfRange(bw.getBytes, 0, bw.getLength)
 }
   }
diff --git 
a/mllib/src/main/scala/org/apache/spark/ml/evaluation/ClusteringMetrics.scala 
b/mllib/src/main/scala/org/apache/spark/ml/evaluation/ClusteringMetrics.scala
index 0106c872297..b8563bed601 100644
--- 
a/mllib/src/main/scala/org/apache/spark/ml/evaluation/ClusteringMetrics.scala
+++ 
b/mllib/src/main/scala/org/apache/spark/ml/evaluation/ClusteringMetrics.scala
@@ -397,7 +397,7 @@ private[evaluation] object SquaredEuclideanSilhouette 
extends Silhouette {
 val clustersStatsMap = SquaredEuclideanSilhouette
   .computeClusterStats(dfWithSquaredNorm, predictionCol, featuresCol, 
weightCol)
 
-// Silhouette is reasonable only when the number of clusters is greater 
then 1
+// Silhouette is reasonable only when the number of clusters is greater 
than 1
 assert(clustersStatsMap.size > 1, "Number of clusters must be greater than 
one.")
 
 val bClustersStatsMap = 
dataset.sparkSession.sparkContext.broadcast(clustersStatsMap)
@@ -604,7 +604,7 @@ private[evaluation] object CosineSilhouette extends 
Silhouette {
 val clustersStatsMap = computeClusterStats(dfWithNormalizedFeatures, 
featuresCol,
   predictionCol, weightCol)
 
-// Silhouette is reasonable only when the number of clusters is greater 
then 1
+// Silhouette is reasonable only when the number of clusters is greater 
than 1
 assert(clustersStatsMap.size > 1, "Number of clusters must be greater than 
one.")
 
 val bClustersStatsMap = 
dataset.sparkSession.sparkContext.broadcast(clustersStatsMap)
diff --git a/mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala 
b/mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala
index bf9d07338db..8a124ae4f4c 100644
--- a/mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala
+++ b/mllib/src/main/scala/org/apache/spark/ml/stat/Summarizer.scala
@@ -105,7 +105,7 @@ object Summarizer extends Logging {
* @return a builder.
* @throws IllegalArgumentException if one of the metric names is not 
understood.
*
-   * Note: Currently, the performance of this interface is about 2x~3x slower 
then using the RDD
+   * Note: Currently, the performance of this interface is about 2x~3x slower 
than using the RDD
* interface.
*/
   @Since("2.3.0")
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala
 

[spark] branch master updated: [MINOR] Fix typos in comments and replace an explicit type with <>

2019-08-10 Thread srowen
This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
 new 8535df7  [MINOR] Fix typos in comments and replace an explicit type 
with <>
8535df7 is described below

commit 8535df72614800ba789286e569a39ea6e84b3354
Author: younggyu chun 
AuthorDate: Sat Aug 10 16:47:11 2019 -0500

[MINOR] Fix typos in comments and replace an explicit type with <>

## What changes were proposed in this pull request?
This PR fixed typos in comments and replace the explicit type with '<>' for 
Java 8+.

## How was this patch tested?
Manually tested.

Closes #25338 from younggyuchun/younggyu.

Authored-by: younggyu chun 
Signed-off-by: Sean Owen 
---
 appveyor.yml  | 2 +-
 .../java/org/apache/spark/network/ChunkFetchIntegrationSuite.java | 6 +++---
 .../test/java/org/apache/spark/network/RpcIntegrationSuite.java   | 8 
 .../org/apache/spark/network/TransportClientFactorySuite.java | 2 +-
 .../spark/network/shuffle/ExternalShuffleIntegrationSuite.java| 6 +++---
 .../test/java/org/apache/spark/unsafe/types/UTF8StringSuite.java  | 2 +-
 .../org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java   | 4 ++--
 .../org/apache/spark/metrics/source/AccumulatorSourceSuite.scala  | 2 +-
 .../examples/sql/streaming/JavaStructuredSessionization.java  | 2 +-
 .../spark/streaming/kafka010/JavaConsumerStrategySuite.java   | 2 +-
 .../spark/streaming/kafka010/JavaDirectKafkaStreamSuite.java  | 2 +-
 .../java/org/apache/spark/launcher/CommandBuilderUtilsSuite.java  | 2 +-
 .../test/java/org/apache/spark/ml/stat/JavaSummarizerSuite.java   | 2 +-
 .../apache/spark/mllib/regression/JavaRidgeRegressionSuite.java   | 4 ++--
 .../apache/spark/sql/execution/python/WindowInPandasExec.scala| 2 +-
 .../test/org/apache/spark/sql/JavaDataFrameReaderWriterSuite.java | 2 +-
 .../java/org/apache/spark/streaming/JavaMapWithStateSuite.java| 4 ++--
 17 files changed, 27 insertions(+), 27 deletions(-)

diff --git a/appveyor.yml b/appveyor.yml
index 8fb090c..b0e946c 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -55,7 +55,7 @@ build_script:
 
 environment:
   NOT_CRAN: true
-  # See SPARK-27848. Currently installing some dependent packagess causes
+  # See SPARK-27848. Currently installing some dependent packages causes
   # "(converted from warning) unable to identify current timezone 'C':" for an 
unknown reason.
   # This environment variable works around to test SparkR against a higher 
version.
   R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/ChunkFetchIntegrationSuite.java
 
b/common/network-common/src/test/java/org/apache/spark/network/ChunkFetchIntegrationSuite.java
index 5999b62..a818fe4 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/ChunkFetchIntegrationSuite.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/ChunkFetchIntegrationSuite.java
@@ -151,9 +151,9 @@ public class ChunkFetchIntegrationSuite {
   clientFactory.createClient(TestUtils.getLocalHost(), server.getPort())) {
   final Semaphore sem = new Semaphore(0);
 
-  res.successChunks = Collections.synchronizedSet(new HashSet());
-  res.failedChunks = Collections.synchronizedSet(new HashSet());
-  res.buffers = Collections.synchronizedList(new 
LinkedList());
+  res.successChunks = Collections.synchronizedSet(new HashSet<>());
+  res.failedChunks = Collections.synchronizedSet(new HashSet<>());
+  res.buffers = Collections.synchronizedList(new LinkedList<>());
 
   ChunkReceivedCallback callback = new ChunkReceivedCallback() {
 @Override
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
 
b/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
index 117f1e4..498dc51 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/RpcIntegrationSuite.java
@@ -175,8 +175,8 @@ public class RpcIntegrationSuite {
 final Semaphore sem = new Semaphore(0);
 
 final RpcResult res = new RpcResult();
-res.successMessages = Collections.synchronizedSet(new HashSet());
-res.errorMessages = Collections.synchronizedSet(new HashSet());
+res.successMessages = Collections.synchronizedSet(new HashSet<>());
+res.errorMessages = Collections.synchronizedSet(new HashSet<>());
 
 RpcResponseCallback callback = new RpcResponseCallback() {
   @Override
@@ -208,8 +208,8 @@ public class RpcIntegrationSuite {
 TransportClient client = 
clientFactory.createClient(TestUtils.getLocalHost(), server.getPort());
 final Semaphore