This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 920fb673b26 [MINOR] Fix typos
920fb673b26 is described below

commit 920fb673b264c0bdcad0426020dedf57d8b11cc7
Author: shuoer86 <129674997+shuoe...@users.noreply.github.com>
AuthorDate: Sat Oct 21 16:37:27 2023 -0500

    [MINOR] Fix typos
    
    Closes #43434 from shuoer86/master.
    
    Authored-by: shuoer86 <129674997+shuoe...@users.noreply.github.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 binder/postBuild                                                    | 4 ++--
 .../scala/org/apache/spark/sql/connect/service/SessionHolder.scala  | 2 +-
 .../spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala  | 2 +-
 core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala | 2 +-
 core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala | 6 +++---
 .../main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala    | 2 +-
 .../scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala   | 2 +-
 docs/sql-ref-syntax-ddl-declare-variable.md                         | 2 +-
 8 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/binder/postBuild b/binder/postBuild
index 70ae23b3937..b6bdf72324c 100644
--- a/binder/postBuild
+++ b/binder/postBuild
@@ -38,7 +38,7 @@ else
   pip install plotly "pandas<2.0.0" 
"pyspark[sql,ml,mllib,pandas_on_spark]$SPECIFIER$VERSION"
 fi
 
-# Set 'PYARROW_IGNORE_TIMEZONE' to surpress warnings from PyArrow.
+# Set 'PYARROW_IGNORE_TIMEZONE' to suppress warnings from PyArrow.
 echo "export PYARROW_IGNORE_TIMEZONE=1" >> ~/.profile
 
 # Add sbin to PATH to run `start-connect-server.sh`.
@@ -50,7 +50,7 @@ echo "export SPARK_HOME=${SPARK_HOME}" >> ~/.profile
 SPARK_VERSION=$(python -c "import pyspark; print(pyspark.__version__)")
 echo "export SPARK_VERSION=${SPARK_VERSION}" >> ~/.profile
 
-# Surpress warnings from Spark jobs, and UI progress bar.
+# Suppress warnings from Spark jobs, and UI progress bar.
 mkdir -p ~/.ipython/profile_default/startup
 echo """from pyspark.sql import SparkSession
 SparkSession.builder.config('spark.ui.showConsoleProgress', 
'false').getOrCreate().sparkContext.setLogLevel('FATAL')
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
index 27f471233f1..dcced21f371 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SessionHolder.scala
@@ -77,7 +77,7 @@ case class SessionHolder(userId: String, sessionId: String, 
session: SparkSessio
   private[service] def addExecuteHolder(executeHolder: ExecuteHolder): Unit = {
     val oldExecute = executions.putIfAbsent(executeHolder.operationId, 
executeHolder)
     if (oldExecute != null) {
-      // the existance of this should alrady be checked by 
SparkConnectExecutionManager
+      // the existence of this should alrady be checked by 
SparkConnectExecutionManager
       throw new IllegalStateException(
         s"ExecuteHolder with opId=${executeHolder.operationId} already 
exists!")
     }
diff --git 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
index ea9ae3ed9d9..e1de6b04d21 100644
--- 
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
+++ 
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
@@ -226,7 +226,7 @@ class SparkConnectPluginRegistrySuite extends 
SharedSparkSession with SparkConne
     }
   }
 
-  test("Emtpy registries are really empty and work") {
+  test("Empty registries are really empty and work") {
     assert(SparkConnectPluginRegistry.loadRelationPlugins().isEmpty)
     assert(SparkConnectPluginRegistry.loadExpressionPlugins().isEmpty)
     assert(SparkConnectPluginRegistry.loadCommandPlugins().isEmpty)
diff --git 
a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala 
b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
index f80190c96e8..73e72b7f1df 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockInfoManager.scala
@@ -259,7 +259,7 @@ private[storage] class 
BlockInfoManager(trackingCacheVisibility: Boolean = false
   }
 
   /**
-   * Apply function `f` on the [[BlockInfo]] object and the aquisition 
[[Condition]] for `blockId`.
+   * Apply function `f` on the [[BlockInfo]] object and the acquisition 
[[Condition]] for `blockId`.
    * Function `f` will be executed while holding the lock for the 
[[BlockInfo]] object. If `blockId`
    * was not registered, an error will be thrown.
    */
diff --git 
a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala 
b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
index 1e8289287cb..6118631e549 100644
--- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
+++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
@@ -79,9 +79,9 @@ private[spark] class DiskBlockManager(
   private val shutdownHook = addShutdownHook()
 
   // If either of these features are enabled, we must change permissions on 
block manager
-  // directories and files to accomodate the shuffle service deleting files in 
a secure environment.
-  // Parent directories are assumed to be restrictive to prevent unauthorized 
users from accessing
-  // or modifying world readable files.
+  // directories and files to accommodate the shuffle service deleting files 
in a secure
+  // environment. Parent directories are assumed to be restrictive to prevent 
unauthorized users
+  // from accessing or modifying world readable files.
   private val permissionChangingRequired = 
conf.get(config.SHUFFLE_SERVICE_ENABLED) && (
     conf.get(config.SHUFFLE_SERVICE_REMOVE_SHUFFLE_ENABLED) ||
     conf.get(config.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)
diff --git 
a/core/src/main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala 
b/core/src/main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala
index 5349361e014..7b6da1147c5 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/TaskThreadDumpPage.scala
@@ -102,7 +102,7 @@ private[spark] class TaskThreadDumpPage(
         </div>
       </div>
     }.getOrElse{
-      Text(s"Task $taskId finished or some error occured during dumping 
thread")
+      Text(s"Task $taskId finished or some error occurred during dumping 
thread")
     }
     UIUtils.headerSparkPage(request, s"Thread dump for task $taskId", content, 
parent)
   }
diff --git 
a/core/src/test/scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala 
b/core/src/test/scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala
index f717299a1ed..4ada18e8d93 100644
--- 
a/core/src/test/scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/status/AutoCleanupLiveUIDirSuite.scala
@@ -36,7 +36,7 @@ class AutoCleanupLiveUIDirSuite extends SparkFunSuite {
           _ + _
         }
         .collect()
-      // `baseUIDir` should exists and not emtpy before SparkContext stop.
+      // `baseUIDir` should exists and not empty before SparkContext stop.
       assert(baseUIDir.exists())
       val subDirs = baseUIDir.listFiles()
       assert(subDirs.nonEmpty)
diff --git a/docs/sql-ref-syntax-ddl-declare-variable.md 
b/docs/sql-ref-syntax-ddl-declare-variable.md
index eea6222646f..f4daeb25579 100644
--- a/docs/sql-ref-syntax-ddl-declare-variable.md
+++ b/docs/sql-ref-syntax-ddl-declare-variable.md
@@ -69,7 +69,7 @@ DECLARE [ OR REPLACE ] [ VARIABLE ]
 ### Examples
 
 ```sql
--- The dense form of declaring a variabel with default
+-- The dense form of declaring a variable with default
 DECLARE five = 5;
 
 -- STRING variable initialialized to `NULL`


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to