This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0a9bc446163 [MINOR][CONNECT] Fix some typos in connect server module
0a9bc446163 is described below

commit 0a9bc446163e0d3b06c8bf774d728d3770ac405b
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Wed Aug 2 10:52:32 2023 +0800

    [MINOR][CONNECT] Fix some typos in connect server module
    
    ### What changes were proposed in this pull request?
    This pr just fix some typos in connect server module
    
    ### Why are the changes needed?
    Fix typos
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    Closes #42259 from LuciferYang/connect-server-typo.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .../src/main/scala/org/apache/spark/sql/connect/dsl/package.scala     | 2 +-
 .../org/apache/spark/sql/connect/execution/ExecuteThreadRunner.scala  | 4 ++--
 .../spark/sql/connect/planner/StreamingForeachBatchHelper.scala       | 4 ++--
 .../scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala    | 2 +-
 .../spark/sql/connect/service/SparkConnectInterceptorRegistry.scala   | 2 +-
 .../org/apache/spark/sql/connect/service/SparkConnectService.scala    | 4 ++--
 .../spark/sql/connect/service/SparkConnectStreamingQueryCache.scala   | 2 +-
 .../org/apache/spark/sql/connect/ui/SparkConnectServerListener.scala  | 2 +-
 8 files changed, 11 insertions(+), 11 deletions(-)

diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
index 25d722cf58d..86c38277c1b 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/dsl/package.scala
@@ -1056,7 +1056,7 @@ package object dsl {
       def randomSplit(weights: Array[Double], seed: Long): Array[Relation] = {
         require(
           weights.forall(_ >= 0),
-          s"Weights must be nonnegative, but got ${weights.mkString("[", ",", 
"]")}")
+          s"Weights must be non-negative, but got ${weights.mkString("[", ",", 
"]")}")
         require(
           weights.sum > 0,
           s"Sum of weights must be positive, but got ${weights.mkString("[", 
",", "]")}")
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/ExecuteThreadRunner.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/ExecuteThreadRunner.scala
index ec078216c21..662288177dc 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/ExecuteThreadRunner.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/execution/ExecuteThreadRunner.scala
@@ -38,7 +38,7 @@ import org.apache.spark.util.Utils
 private[connect] class ExecuteThreadRunner(executeHolder: ExecuteHolder) 
extends Logging {
 
   // The newly created thread will inherit all InheritableThreadLocals used by 
Spark,
-  // e.g. SparkContext.localProperties. If considering implementing a 
threadpool,
+  // e.g. SparkContext.localProperties. If considering implementing a 
thread-pool,
   // forwarding of thread locals needs to be taken into account.
   private var executionThread: Thread = new ExecutionThread()
 
@@ -166,7 +166,7 @@ private[connect] class ExecuteThreadRunner(executeHolder: 
ExecuteHolder) extends
         executeHolder.responseObserver.onNext(createResultComplete())
       }
       synchronized {
-        // Prevent interrupt after onCompleted, and throwing error to an 
alredy closed stream.
+        // Prevent interrupt after onCompleted, and throwing error to an 
already closed stream.
         completed = true
         executeHolder.responseObserver.onCompleted()
       }
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala
index 3b9ae483cf1..998faf327d0 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/StreamingForeachBatchHelper.scala
@@ -113,8 +113,8 @@ object StreamingForeachBatchHelper extends Logging {
   }
 
   // TODO(SPARK-44433): Improve termination of Processes
-  //   The goal is that when a query is terminated, the python process 
asociated with foreachBatch
-  //   should be terminated. One way to do that is by registering stremaing 
query listener:
+  //   The goal is that when a query is terminated, the python process 
associated with foreachBatch
+  //   should be terminated. One way to do that is by registering streaming 
query listener:
   //   After pythonForeachBatchWrapper() is invoked by the SparkConnectPlanner.
   //   At that time, we don't have the streaming queries yet.
   //   Planner should call back into this helper with the query id when it 
starts it immediately
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala
index 3c7cfbb19e6..a49c0a8bacf 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/ExecuteHolder.scala
@@ -51,7 +51,7 @@ private[connect] class ExecuteHolder(
 
   /**
    * Tag that is set for this execution on SparkContext, via 
SparkContext.addJobTag. Used
-   * (internally) for cancallation of the Spark Jobs ran by this execution.
+   * (internally) for cancellation of the Spark Jobs ran by this execution.
    */
   val jobTag = ExecuteJobTag(sessionHolder.userId, sessionHolder.sessionId, 
operationId)
 
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala
index cddd4b97663..b8df0ed21d1 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectInterceptorRegistry.scala
@@ -35,7 +35,7 @@ object SparkConnectInterceptorRegistry {
 
   // Contains the list of configured interceptors.
   private lazy val interceptorChain: Seq[InterceptorBuilder] = Seq(
-    // Adding a new interceptor at compile time works like the eaxmple below 
with the dummy
+    // Adding a new interceptor at compile time works like the example below 
with the dummy
     // interceptor:
     // interceptor[DummyInterceptor](classOf[DummyInterceptor])
   )
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectService.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectService.scala
index b4c6572ff8e..f5971363d57 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectService.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectService.scala
@@ -270,7 +270,7 @@ object SparkConnectService extends Logging {
   // For testing purpose, it's package level private.
   private[connect] def localPort: Int = {
     assert(server != null)
-    // Return the actual local port being used. This can be different from the 
csonfigured port
+    // Return the actual local port being used. This can be different from the 
configured port
     // when the server binds to the port 0 as an example.
     server.getPort
   }
@@ -375,7 +375,7 @@ object SparkConnectService extends Logging {
   }
 
   /**
-   * Starts the GRPC Serivce.
+   * Starts the GRPC Service.
    */
   private def startGRPCService(): Unit = {
     val debugMode = 
SparkEnv.get.conf.getBoolean("spark.connect.grpc.debug.enabled", true)
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamingQueryCache.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamingQueryCache.scala
index 10f765dff7f..7527b7477b5 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamingQueryCache.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectStreamingQueryCache.scala
@@ -161,7 +161,7 @@ private[connect] class SparkConnectStreamingQueryCache(
 
   /**
    * Periodic maintenance task to do the following:
-   *   - Update status of query if it is inactive. Sets an expiery time for 
such queries
+   *   - Update status of query if it is inactive. Sets an expiry time for 
such queries
    *   - Drop expired queries from the cache.
    *   - Poll sessions associated with the cached queries in order keep them 
alive in connect
    *     service' mapping (by invoking `sessionKeepAliveFn`).
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/ui/SparkConnectServerListener.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/ui/SparkConnectServerListener.scala
index 90f9afebcb6..ec6079a8914 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/ui/SparkConnectServerListener.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/ui/SparkConnectServerListener.scala
@@ -205,7 +205,7 @@ private[connect] class SparkConnectServerListener(
         executionData.state = ExecutionState.READY
         updateLiveStore(executionData)
       case None =>
-        logWarning(s"onOperationReadyForExectuion called with unknown 
operation id: ${e.jobTag}")
+        logWarning(s"onOperationReadyForExecution called with unknown 
operation id: ${e.jobTag}")
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to