This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git
The following commit(s) were added to refs/heads/main by this push:
new 6e53880 [SPARK-52321] Add
`SessionClosed/SqlConfNotFound/ParseSyntaxError` to `SparkConnectError`
6e53880 is described below
commit 6e53880c299f48f633badb06bf4b2a62a79aa3f2
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon May 26 22:54:45 2025 -0700
[SPARK-52321] Add `SessionClosed/SqlConfNotFound/ParseSyntaxError` to
`SparkConnectError`
### What changes were proposed in this pull request?
This PR aims to add `SessionClosed`, `SqlConfNotFound`, `ParseSyntaxError `
to `SparkConnectError`.
### Why are the changes needed?
To provide a user can catch these exceptions easily instead of matching
`internalError` with string patterns.
### Does this PR introduce _any_ user-facing change?
Yes, but these are more specific exceptions than before.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #180 from dongjoon-hyun/SPARK-52321.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
Sources/SparkConnect/DataFrame.swift | 2 ++
Sources/SparkConnect/SparkConnectClient.swift | 4 ++++
Sources/SparkConnect/SparkConnectError.swift | 3 +++
Tests/SparkConnectTests/CatalogTests.swift | 4 ++--
Tests/SparkConnectTests/RuntimeConfTests.swift | 4 ++--
Tests/SparkConnectTests/SparkSessionTests.swift | 2 +-
6 files changed, 14 insertions(+), 5 deletions(-)
diff --git a/Sources/SparkConnect/DataFrame.swift
b/Sources/SparkConnect/DataFrame.swift
index fa1eb48..db720ba 100644
--- a/Sources/SparkConnect/DataFrame.swift
+++ b/Sources/SparkConnect/DataFrame.swift
@@ -311,6 +311,8 @@ public actor DataFrame: Sendable {
throw SparkConnectError.TableOrViewNotFound
case let m where m.contains("UNRESOLVED_COLUMN.WITH_SUGGESTION"):
throw SparkConnectError.ColumnNotFound
+ case let m where m.contains("PARSE_SYNTAX_ERROR"):
+ throw SparkConnectError.ParseSyntaxError
default:
throw error
}
diff --git a/Sources/SparkConnect/SparkConnectClient.swift
b/Sources/SparkConnect/SparkConnectClient.swift
index c9fc5b0..f742973 100644
--- a/Sources/SparkConnect/SparkConnectClient.swift
+++ b/Sources/SparkConnect/SparkConnectClient.swift
@@ -131,6 +131,10 @@ public actor SparkConnectClient {
return try await f(client)
} catch let error as RPCError where error.code == .internalError {
switch error.message {
+ case let m where m.contains("INVALID_HANDLE.SESSION_CLOSED"):
+ throw SparkConnectError.SessionClosed
+ case let m where m.contains("SQL_CONF_NOT_FOUND"):
+ throw SparkConnectError.SqlConfNotFound
case let m where m.contains("TABLE_OR_VIEW_ALREADY_EXISTS"):
throw SparkConnectError.TableOrViewAlreadyExists
case let m where m.contains("TABLE_OR_VIEW_NOT_FOUND"):
diff --git a/Sources/SparkConnect/SparkConnectError.swift
b/Sources/SparkConnect/SparkConnectError.swift
index f235859..890e16c 100644
--- a/Sources/SparkConnect/SparkConnectError.swift
+++ b/Sources/SparkConnect/SparkConnectError.swift
@@ -25,7 +25,10 @@ public enum SparkConnectError: Error {
case InvalidSessionID
case InvalidType
case InvalidViewName
+ case ParseSyntaxError
case SchemaNotFound
+ case SessionClosed
+ case SqlConfNotFound
case TableOrViewAlreadyExists
case TableOrViewNotFound
case UnsupportedOperation
diff --git a/Tests/SparkConnectTests/CatalogTests.swift
b/Tests/SparkConnectTests/CatalogTests.swift
index b63fd35..0888fdd 100644
--- a/Tests/SparkConnectTests/CatalogTests.swift
+++ b/Tests/SparkConnectTests/CatalogTests.swift
@@ -144,7 +144,7 @@ struct CatalogTests {
})
#expect(try await spark.catalog.tableExists(tableName) == false)
- try await #require(throws: Error.self) {
+ try await #require(throws: SparkConnectError.ParseSyntaxError) {
try await spark.catalog.tableExists("invalid table name")
}
await spark.stop()
@@ -190,7 +190,7 @@ struct CatalogTests {
#expect(try await spark.catalog.functionExists("base64"))
#expect(try await spark.catalog.functionExists("non_exist_function") ==
false)
- try await #require(throws: Error.self) {
+ try await #require(throws: SparkConnectError.ParseSyntaxError) {
try await spark.catalog.functionExists("invalid function name")
}
await spark.stop()
diff --git a/Tests/SparkConnectTests/RuntimeConfTests.swift
b/Tests/SparkConnectTests/RuntimeConfTests.swift
index 53f3813..9ef48e1 100644
--- a/Tests/SparkConnectTests/RuntimeConfTests.swift
+++ b/Tests/SparkConnectTests/RuntimeConfTests.swift
@@ -35,7 +35,7 @@ struct RuntimeConfTests {
#expect(try await !conf.get("spark.app.name").isEmpty)
- try await #require(throws: Error.self) {
+ try await #require(throws: SparkConnectError.SqlConfNotFound) {
try await conf.get("spark.test.non-exist")
}
@@ -86,7 +86,7 @@ struct RuntimeConfTests {
#expect(try await conf.get("spark.test.key1") == "value1")
try await conf.unset("spark.test.key1")
- try await #require(throws: Error.self) {
+ try await #require(throws: SparkConnectError.SqlConfNotFound) {
try await conf.get("spark.test.key1")
}
diff --git a/Tests/SparkConnectTests/SparkSessionTests.swift
b/Tests/SparkConnectTests/SparkSessionTests.swift
index 50eb95c..ab3af28 100644
--- a/Tests/SparkConnectTests/SparkSessionTests.swift
+++ b/Tests/SparkConnectTests/SparkSessionTests.swift
@@ -72,7 +72,7 @@ struct SparkSessionTests {
let sessionID = spark1.sessionID
await spark1.stop()
let remote = ProcessInfo.processInfo.environment["SPARK_REMOTE"] ??
"sc://localhost"
- try await #require(throws: Error.self) {
+ try await #require(throws: SparkConnectError.SessionClosed) {
try await
SparkSession.builder.remote("\(remote)/;session_id=\(sessionID)").getOrCreate()
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]