This is an automated email from the ASF dual-hosted git repository.
curth pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-adbc.git
The following commit(s) were added to refs/heads/main by this push:
new f9b26cbd2 feat(csharp/src/Drivers/Databricks): Integrate
OAuthClientCredentialsProvider with Databricks Driver (#2762)
f9b26cbd2 is described below
commit f9b26cbd2456b77ceca57a0e3c0b998e024ff90c
Author: Todd Meng <[email protected]>
AuthorDate: Tue May 6 04:25:43 2025 -0700
feat(csharp/src/Drivers/Databricks): Integrate
OAuthClientCredentialsProvider with Databricks Driver (#2762)
Integrates Client Credentials M2M Auth flow with rest of driver.
Layering: DatabricksConnection/SparkConnection -> TProtocal ->
TTransport -> OAuthDelegatingHandler -> HttpClient
- Each `DatabricksConnection` or `SparkConnection` instance creates an
`OAuthClientCredentialsProvider`, which manages access token retrieval
and expiration.
- This provider is used by an `OAuthDelegatingHandler`, a custom
`HttpMessageHandler` that overrides `SendAsync` to inject a valid OAuth
token into each HTTP request header.
- The` OAuthDelegatingHandler` is then attached to an `HttpClient`,
which is used by the Thrift `TTransport` and - `TProtocol` stack for
communication.
This setup ensures that the Connection's every outgoing Thrift-over-HTTP
request includes a valid OAuth token
This should be thread-safe; each connection will have its own
`OAuthClientCredentialsProvider` managing a single non-expired access
token. The `OAuthClientCredentialsProvider` has Sephamore locking logic
to prevent race conditions from populating the cache with stale tokens
if multiple threads share a connection.
Includes new parameters: requires client_id, client_secret, and
auth_flow == 1. If Using OAuth and auth_flow == 1, parses the requires
client_id and client_secret parameters.
To test, add to DATABRICKS_TEST_CONFIG_FILE environment variable json
```
"uri": "...",
"auth_type": "OAuth",
"grant_type": "client_credentials",
"client_id": "...,
"client_secret": "dos...",
```
Follow-up: add tests for token expiration logic
---
csharp/src/Drivers/Apache/Spark/README.md | 2 +-
.../Drivers/Apache/Spark/SparkHttpConnection.cs | 27 +++---
.../Databricks/Auth/DatabricksOAuthGrantType.cs | 68 +++++++++++++++
.../Auth/OAuthClientCredentialsProvider.cs | 28 ++-----
.../Databricks/Auth/OAuthDelegatingHandler.cs | 43 ++++++++++
.../src/Drivers/Databricks/DatabricksConnection.cs | 98 +++++++++++++++++++++-
.../src/Drivers/Databricks/DatabricksParameters.cs | 48 ++++++++++-
csharp/src/Drivers/Databricks/readme.md | 18 +++-
.../Auth/OAuthClientCredentialsProviderTests.cs | 17 ++--
.../Databricks/DatabricksTestConfiguration.cs | 7 +-
.../Databricks/DatabricksTestEnvironment.cs | 12 +++
11 files changed, 319 insertions(+), 49 deletions(-)
diff --git a/csharp/src/Drivers/Apache/Spark/README.md
b/csharp/src/Drivers/Apache/Spark/README.md
index 1c5855bc2..2caa7892d 100644
--- a/csharp/src/Drivers/Apache/Spark/README.md
+++ b/csharp/src/Drivers/Apache/Spark/README.md
@@ -27,7 +27,7 @@ but can also be passed in the call to `AdbcDatabase.Connect`.
| Property | Description | Default |
| :--- | :--- | :--- |
| `adbc.spark.type` | (Required) Indicates the Spark server type.
Currently only `http` (future: `standard`) | |
-| `adbc.spark.auth_type` | An indicator of the intended type of
authentication. Allowed values: `none`, `username_only`, `basic`, and `token`.
This property is optional. The authentication type can be inferred from
`token`, `username`, and `password`. If a `token` value is provided, token
authentication is used. Otherwise, if both `username` and `password` values are
provided, basic authentication is used. | |
+| `adbc.spark.auth_type` | An indicator of the intended type of
authentication. Allowed values: `none`, `username_only`, `basic`, `token`, and
`auth_type`. This property is optional. The authentication type can be inferred
from `token`, `username`, and `password`. If a `token` value is provided, token
authentication is used. Otherwise, if both `username` and `password` values are
provided, basic authentication is used. If `auth_type` is provided, token
authentication is used by default, [...]
| `adbc.spark.host` | Host name for the data source. Do not include
scheme or port number. Example: `sparkserver.region.cloudapp.azure.com` | |
| `adbc.spark.port` | The port number the data source listens on for a
new connections. | `443` |
| `adbc.spark.path` | The URI path on the data source server. Example:
`sql/protocolv1/o/0123456789123456/01234-0123456-source` | |
diff --git a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
index f17782ebc..627f5fb49 100644
--- a/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
+++ b/csharp/src/Drivers/Apache/Spark/SparkHttpConnection.cs
@@ -50,7 +50,6 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
Properties.TryGetValue(AdbcOptions.Username, out string? username);
Properties.TryGetValue(AdbcOptions.Password, out string? password);
Properties.TryGetValue(SparkParameters.AuthType, out string?
authType);
- Properties.TryGetValue(SparkParameters.AccessToken, out string?
access_token);
if (!SparkAuthTypeParser.TryParse(authType, out SparkAuthType
authTypeValue))
{
throw new
ArgumentOutOfRangeException(SparkParameters.AuthType, authType, $"Unsupported
{SparkParameters.AuthType} value.");
@@ -85,16 +84,22 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
break;
case SparkAuthType.OAuth:
- if (string.IsNullOrWhiteSpace(access_token))
- throw new ArgumentException(
- $"Parameter '{SparkParameters.AuthType}' is set to
'{SparkAuthTypeConstants.OAuth}' but parameter '{SparkParameters.AccessToken}'
is not set. Please provide a value for '{SparkParameters.AccessToken}'.",
- nameof(Properties));
+ ValidateOAuthParameters();
break;
default:
throw new
ArgumentOutOfRangeException(SparkParameters.AuthType, authType, $"Unsupported
{SparkParameters.AuthType} value.");
}
}
+ protected virtual void ValidateOAuthParameters()
+ {
+ Properties.TryGetValue(SparkParameters.AccessToken, out string?
access_token);
+ if (string.IsNullOrWhiteSpace(access_token))
+ throw new ArgumentException(
+ $"Parameter '{SparkParameters.AuthType}' is set to
'{SparkAuthTypeConstants.OAuth}' but parameter '{SparkParameters.AccessToken}'
is not set. Please provide a value for '{SparkParameters.AccessToken}'.",
+ nameof(Properties));
+ }
+
protected override void ValidateConnection()
{
// HostName or Uri is required parameter
@@ -157,14 +162,10 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
{
throw new
ArgumentOutOfRangeException(SparkParameters.AuthType, authType, $"Unsupported
{SparkParameters.AuthType} value.");
}
- Properties.TryGetValue(SparkParameters.Token, out string? token);
- Properties.TryGetValue(SparkParameters.AccessToken, out string?
access_token);
- Properties.TryGetValue(AdbcOptions.Username, out string? username);
- Properties.TryGetValue(AdbcOptions.Password, out string? password);
Properties.TryGetValue(AdbcOptions.Uri, out string? uri);
Uri baseAddress = GetBaseAddress(uri, hostName, path, port,
SparkParameters.HostName, TlsOptions.IsTlsEnabled);
- AuthenticationHeaderValue? authenticationHeaderValue =
GetAuthenticationHeaderValue(authTypeValue, token, username, password,
access_token);
+ AuthenticationHeaderValue? authenticationHeaderValue =
GetAuthenticationHeaderValue(authTypeValue);
HttpClient httpClient = new(CreateHttpHandler());
httpClient.BaseAddress = baseAddress;
@@ -185,8 +186,12 @@ namespace Apache.Arrow.Adbc.Drivers.Apache.Spark
return transport;
}
- private static AuthenticationHeaderValue?
GetAuthenticationHeaderValue(SparkAuthType authType, string? token, string?
username, string? password, string? access_token)
+ protected virtual AuthenticationHeaderValue?
GetAuthenticationHeaderValue(SparkAuthType authType)
{
+ Properties.TryGetValue(SparkParameters.Token, out string? token);
+ Properties.TryGetValue(SparkParameters.AccessToken, out string?
access_token);
+ Properties.TryGetValue(AdbcOptions.Username, out string? username);
+ Properties.TryGetValue(AdbcOptions.Password, out string? password);
if (!string.IsNullOrEmpty(token) && (authType ==
SparkAuthType.Empty || authType == SparkAuthType.Token))
{
return new
AuthenticationHeaderValue(BearerAuthenticationScheme, token);
diff --git a/csharp/src/Drivers/Databricks/Auth/DatabricksOAuthGrantType.cs
b/csharp/src/Drivers/Databricks/Auth/DatabricksOAuthGrantType.cs
new file mode 100644
index 000000000..29f92c810
--- /dev/null
+++ b/csharp/src/Drivers/Databricks/Auth/DatabricksOAuthGrantType.cs
@@ -0,0 +1,68 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
+{
+ /// <summary>
+ /// Enum representing the OAuth grant types supported by the Databricks
driver.
+ /// </summary>
+ internal enum DatabricksOAuthGrantType
+ {
+ /// <summary>
+ /// Use a pre-generated Databricks personal access token for
authentication. Default value.
+ /// </summary>
+ AccessToken,
+
+ /// <summary>
+ /// Use OAuth client credentials flow for m2m authentication.
+ /// </summary>
+ ClientCredentials,
+ }
+
+ /// <summary>
+ /// Parser for converting string grant type values to the
DatabricksOAuthGrantType enum.
+ /// </summary>
+ internal static class DatabricksOAuthGrantTypeParser
+ {
+ /// <summary>
+ /// Tries to parse a string grant type value to the corresponding
DatabricksOAuthGrantType enum value. If
+ /// the grant type is not supported, the default value is returned.
+ /// </summary>
+ /// <param name="grantType">The string grant type value to
parse.</param>
+ /// <param name="grantTypeValue">The parsed DatabricksOAuthGrantType
enum value.</param>
+ /// <returns>True if the parsing was successful, false
otherwise.</returns>
+ internal static bool TryParse(string? grantType, out
DatabricksOAuthGrantType grantTypeValue)
+ {
+ switch (grantType?.Trim().ToLowerInvariant())
+ {
+ case null:
+ case "":
+ grantTypeValue = DatabricksOAuthGrantType.AccessToken;
+ return true;
+ case DatabricksConstants.OAuthGrantTypes.AccessToken:
+ grantTypeValue = DatabricksOAuthGrantType.AccessToken;
+ return true;
+ case DatabricksConstants.OAuthGrantTypes.ClientCredentials:
+ grantTypeValue =
DatabricksOAuthGrantType.ClientCredentials;
+ return true;
+ default:
+ grantTypeValue = DatabricksOAuthGrantType.AccessToken;
+ return false;
+ }
+ }
+ }
+}
diff --git
a/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
b/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
index 8fa797586..840f3f0eb 100644
--- a/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
+++ b/csharp/src/Drivers/Databricks/Auth/OAuthClientCredentialsProvider.cs
@@ -117,7 +117,7 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
var requestContent = new FormUrlEncodedContent(new[]
{
new KeyValuePair<string, string>("grant_type",
"client_credentials"),
- new KeyValuePair<string, string>("scope", "all-apis")
+ new KeyValuePair<string, string>("scope", "sql")
});
var request = new HttpRequestMessage(HttpMethod.Post,
_tokenEndpoint)
@@ -168,8 +168,14 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
};
}
- private async Task<string> GetAccessTokenAsync(CancellationToken
cancellationToken = default)
+ public async Task<string> GetAccessTokenAsync(CancellationToken
cancellationToken = default)
{
+ // First try to get cached token without acquiring lock
+ if (GetValidCachedToken() is string cachedToken)
+ {
+ return cachedToken;
+ }
+
await _tokenLock.WaitAsync(cancellationToken);
try
@@ -188,24 +194,6 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
}
}
-
- /// <summary>
- /// Gets an OAuth access token using the client credentials grant type.
- /// </summary>
- /// <param name="cancellationToken">A cancellation token to cancel the
operation.</param>
- /// <returns>The access token.</returns>
- public string GetAccessToken(CancellationToken cancellationToken =
default)
- {
- // First try to get cached token without acquiring lock
- if (GetValidCachedToken() is string cachedToken)
- {
- return cachedToken;
- }
-
- return
GetAccessTokenAsync(cancellationToken).GetAwaiter().GetResult();
- }
-
-
public void Dispose()
{
_tokenLock.Dispose();
diff --git a/csharp/src/Drivers/Databricks/Auth/OAuthDelegatingHandler.cs
b/csharp/src/Drivers/Databricks/Auth/OAuthDelegatingHandler.cs
new file mode 100644
index 000000000..df13284ff
--- /dev/null
+++ b/csharp/src/Drivers/Databricks/Auth/OAuthDelegatingHandler.cs
@@ -0,0 +1,43 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements. See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Apache.Arrow.Adbc.Drivers.Databricks.Auth
+{
+ internal class OAuthDelegatingHandler : DelegatingHandler
+ {
+ private readonly OAuthClientCredentialsProvider _tokenProvider;
+
+ public OAuthDelegatingHandler(HttpMessageHandler innerHandler,
OAuthClientCredentialsProvider tokenProvider)
+ : base(innerHandler)
+ {
+ _tokenProvider = tokenProvider ?? throw new
ArgumentNullException(nameof(tokenProvider));
+ }
+
+ protected override async Task<HttpResponseMessage>
SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+ string accessToken = await
_tokenProvider.GetAccessTokenAsync(cancellationToken);
+ request.Headers.Authorization = new
System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", accessToken);
+ HttpResponseMessage response = await base.SendAsync(request,
cancellationToken);
+ return response;
+ }
+ }
+}
diff --git a/csharp/src/Drivers/Databricks/DatabricksConnection.cs
b/csharp/src/Drivers/Databricks/DatabricksConnection.cs
index cd7fc02a0..9951b0440 100644
--- a/csharp/src/Drivers/Databricks/DatabricksConnection.cs
+++ b/csharp/src/Drivers/Databricks/DatabricksConnection.cs
@@ -20,10 +20,12 @@ using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net.Http;
+using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Apache.Arrow.Adbc.Drivers.Apache;
using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Adbc.Drivers.Databricks.Auth;
using Apache.Arrow.Adbc.Drivers.Databricks.CloudFetch;
using Apache.Arrow.Ipc;
using Apache.Hive.Service.Rpc.Thrift;
@@ -161,11 +163,48 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
protected override HttpMessageHandler CreateHttpHandler()
{
- var baseHandler = base.CreateHttpHandler();
+ HttpMessageHandler baseHandler = base.CreateHttpHandler();
if (TemporarilyUnavailableRetry)
{
- return new RetryHttpHandler(baseHandler,
TemporarilyUnavailableRetryTimeout);
+ // Add OAuth handler if OAuth authentication is being used
+ baseHandler = new RetryHttpHandler(baseHandler,
TemporarilyUnavailableRetryTimeout);
}
+
+ // Add OAuth handler if OAuth authentication is being used
+ if (Properties.TryGetValue(SparkParameters.AuthType, out string?
authType) &&
+ SparkAuthTypeParser.TryParse(authType, out SparkAuthType
authTypeValue) &&
+ authTypeValue == SparkAuthType.OAuth &&
+ Properties.TryGetValue(DatabricksParameters.OAuthGrantType,
out string? grantTypeStr) &&
+ DatabricksOAuthGrantTypeParser.TryParse(grantTypeStr, out
DatabricksOAuthGrantType grantType) &&
+ grantType == DatabricksOAuthGrantType.ClientCredentials)
+ {
+ // Note: We assume that properties have already been validated
+ if (Properties.TryGetValue(SparkParameters.HostName, out
string? host) && !string.IsNullOrEmpty(host))
+ {
+ // Use hostname directly if provided
+ }
+ else if (Properties.TryGetValue(AdbcOptions.Uri, out string?
uri) && !string.IsNullOrEmpty(uri))
+ {
+ // Extract hostname from URI if URI is provided
+ if (Uri.TryCreate(uri, UriKind.Absolute, out Uri?
parsedUri))
+ {
+ host = parsedUri.Host;
+ }
+ }
+
+ Properties.TryGetValue(DatabricksParameters.OAuthClientId, out
string? clientId);
+ Properties.TryGetValue(DatabricksParameters.OAuthClientSecret,
out string? clientSecret);
+
+ var tokenProvider = new OAuthClientCredentialsProvider(
+ clientId!,
+ clientSecret!,
+ host!,
+ timeoutMinutes: 1
+ );
+
+ return new OAuthDelegatingHandler(baseHandler, tokenProvider);
+ }
+
return baseHandler;
}
@@ -373,5 +412,60 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
Task.FromResult(response.DirectResults.ResultSet.Results);
protected internal override Task<TRowSet>
GetRowSetAsync(TGetPrimaryKeysResp response, CancellationToken
cancellationToken = default) =>
Task.FromResult(response.DirectResults.ResultSet.Results);
+
+ protected override AuthenticationHeaderValue?
GetAuthenticationHeaderValue(SparkAuthType authType)
+ {
+ if (authType == SparkAuthType.OAuth)
+ {
+ Properties.TryGetValue(DatabricksParameters.OAuthGrantType,
out string? grantTypeStr);
+ if (DatabricksOAuthGrantTypeParser.TryParse(grantTypeStr, out
DatabricksOAuthGrantType grantType) &&
+ grantType == DatabricksOAuthGrantType.ClientCredentials)
+ {
+ // Return null for client credentials flow since OAuth
handler will handle authentication
+ return null;
+ }
+ }
+ return base.GetAuthenticationHeaderValue(authType);
+ }
+
+ protected override void ValidateOAuthParameters()
+ {
+ Properties.TryGetValue(DatabricksParameters.OAuthGrantType, out
string? grantTypeStr);
+ DatabricksOAuthGrantType grantType;
+
+ if (!DatabricksOAuthGrantTypeParser.TryParse(grantTypeStr, out
grantType))
+ {
+ throw new ArgumentOutOfRangeException(
+ DatabricksParameters.OAuthGrantType,
+ grantTypeStr,
+ $"Unsupported {DatabricksParameters.OAuthGrantType} value.
Refer to the Databricks documentation for valid values."
+ );
+ }
+
+ // If we have a valid grant type, validate the required parameters
+ if (grantType == DatabricksOAuthGrantType.ClientCredentials)
+ {
+ Properties.TryGetValue(DatabricksParameters.OAuthClientId, out
string? clientId);
+ Properties.TryGetValue(DatabricksParameters.OAuthClientSecret,
out string? clientSecret);
+
+ if (string.IsNullOrEmpty(clientId))
+ {
+ throw new ArgumentException(
+ $"Parameter '{DatabricksParameters.OAuthGrantType}' is
set to '{DatabricksConstants.OAuthGrantTypes.ClientCredentials}' but parameter
'{DatabricksParameters.OAuthClientId}' is not set. Please provide a value for
'{DatabricksParameters.OAuthClientId}'.",
+ nameof(Properties));
+ }
+ if (string.IsNullOrEmpty(clientSecret))
+ {
+ throw new ArgumentException(
+ $"Parameter '{DatabricksParameters.OAuthGrantType}' is
set to '{DatabricksConstants.OAuthGrantTypes.ClientCredentials}' but parameter
'{DatabricksParameters.OAuthClientSecret}' is not set. Please provide a value
for '{DatabricksParameters.OAuthClientSecret}'.",
+ nameof(Properties));
+ }
+ }
+ else
+ {
+ // For other auth flows, use default OAuth validation
+ base.ValidateOAuthParameters();
+ }
+ }
}
}
diff --git a/csharp/src/Drivers/Databricks/DatabricksParameters.cs
b/csharp/src/Drivers/Databricks/DatabricksParameters.cs
index 7c6e9a69f..940837b66 100644
--- a/csharp/src/Drivers/Databricks/DatabricksParameters.cs
+++ b/csharp/src/Drivers/Databricks/DatabricksParameters.cs
@@ -16,6 +16,7 @@
*/
using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using System.Collections.Generic;
namespace Apache.Arrow.Adbc.Drivers.Databricks
{
@@ -116,6 +117,30 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
/// Default value is true if not specified.
/// </summary>
public const string CloudFetchPrefetchEnabled =
"adbc.databricks.cloudfetch.prefetch_enabled";
+
+ /// <summary>
+ /// The OAuth grant type to use for authentication.
+ /// Supported values:
+ /// - "access_token": Use a pre-generated Databricks personal access
token (default)
+ /// - "client_credentials": Use OAuth client credentials flow for m2m
authentication
+ /// When using "client_credentials", the driver will automatically
handle token acquisition,
+ /// renewal, and authentication with the Databricks service.
+ /// </summary>
+ public const string OAuthGrantType =
"adbc.databricks.oauth.grant_type";
+
+ /// <summary>
+ /// The OAuth client ID for client credentials flow.
+ /// Required when grant_type is "client_credentials".
+ /// This is the client ID you obtained when registering your
application with Databricks.
+ /// </summary>
+ public const string OAuthClientId = "adbc.databricks.oauth.client_id";
+
+ /// <summary>
+ /// The OAuth client secret for client credentials flow.
+ /// Required when grant_type is "client_credentials".
+ /// This is the client secret you obtained when registering your
application with Databricks.
+ /// </summary>
+ public const string OAuthClientSecret =
"adbc.databricks.oauth.client_secret";
}
/// <summary>
@@ -123,6 +148,27 @@ namespace Apache.Arrow.Adbc.Drivers.Databricks
/// </summary>
public class DatabricksConstants
{
-
+ /// <summary>
+ /// OAuth grant type constants
+ /// </summary>
+ public static class OAuthGrantTypes
+ {
+ /// <summary>
+ /// Use a pre-generated Databricks personal access token for
authentication.
+ /// When using this grant type, you must provide the token via the
+ /// adbc.spark.oauth.access_token parameter.
+ /// </summary>
+ public const string AccessToken = "access_token";
+
+ /// <summary>
+ /// Use OAuth client credentials flow for m2m authentication.
+ /// When using this grant type, you must provide:
+ /// - adbc.databricks.oauth.client_id: The OAuth client ID
+ /// - adbc.databricks.oauth.client_secret: The OAuth client secret
+ /// The driver will automatically handle token acquisition,
renewal, and
+ /// authentication with the Databricks service.
+ /// </summary>
+ public const string ClientCredentials = "client_credentials";
+ }
}
}
diff --git a/csharp/src/Drivers/Databricks/readme.md
b/csharp/src/Drivers/Databricks/readme.md
index b794255b0..c47b7a67c 100644
--- a/csharp/src/Drivers/Databricks/readme.md
+++ b/csharp/src/Drivers/Databricks/readme.md
@@ -21,9 +21,21 @@
The Databricks ADBC driver is built on top of the Spark ADBC driver and
inherits many of it's [properties](../Apache/Spark/readme.md).
-The Databricks ADBC driver supports token-based authentiation using the
-[Databricks personal access
token](https://docs.databricks.com/en/dev-tools/auth/pat.html).
-Basic (username and password) authenication is not supported, at this time.
+The Databricks ADBC driver supports the following authentication methods:
+
+1. **Token-based authentication** using a [Databricks personal access
token](https://docs.databricks.com/en/dev-tools/auth/pat.html)
+ - Set `adbc.spark.auth_type` to `oauth`
+ - Set `adbc.databricks.oauth.grant_type` to `access_token` (this is the
default if not specified)
+ - Set `adbc.spark.oauth.access_token` to your Databricks personal access
token
+
+2. **OAuth Client Credentials Flow** for m2m authentication
+ - Set `adbc.spark.auth_type` to `oauth`
+ - Set `adbc.databricks.oauth.grant_type` to `client_credentials`
+ - Set `adbc.databricks.oauth.client_id` to your OAuth client ID
+ - Set `adbc.databricks.oauth.client_secret` to your OAuth client secret
+ - The driver will automatically handle token acquisition, renewal, and
authentication with the Databricks service
+
+Basic (username and password) authentication is not supported at this time.
## Data Types
diff --git
a/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
b/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
index de9b01684..676f2b6c1 100644
--- a/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
+++ b/csharp/test/Drivers/Databricks/Auth/OAuthClientCredentialsProviderTests.cs
@@ -42,19 +42,19 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks.Auth
}
[SkippableFact]
- public void GetAccessToken_WithValidCredentials_ReturnsToken()
+ public async Task GetAccessToken_WithValidCredentials_ReturnsToken()
{
Skip.IfNot(!string.IsNullOrEmpty(TestConfiguration.OAuthClientId),
"OAuth credentials not configured");
var service = CreateService();
- var token = service.GetAccessToken();
+ var token = await service.GetAccessTokenAsync();
Assert.NotNull(token);
Assert.NotEmpty(token);
}
[SkippableFact]
- public void
GetAccessToken_WithCancellation_ThrowsOperationCanceledException()
+ public async Task
GetAccessToken_WithCancellation_ThrowsOperationCanceledException()
{
Skip.IfNot(!string.IsNullOrEmpty(TestConfiguration.OAuthClientId),
"OAuth credentials not configured");
@@ -62,19 +62,18 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks.Auth
using var cts = new CancellationTokenSource();
cts.Cancel();
- var ex = Assert.ThrowsAny<OperationCanceledException>(() =>
- service.GetAccessToken(cts.Token));
- Assert.IsType<TaskCanceledException>(ex);
+ await Assert.ThrowsAnyAsync<OperationCanceledException>(async () =>
+ await service.GetAccessTokenAsync(cts.Token));
}
[SkippableFact]
- public void GetAccessToken_MultipleCalls_ReusesCachedToken()
+ public async Task GetAccessToken_MultipleCalls_ReusesCachedToken()
{
Skip.IfNot(!string.IsNullOrEmpty(TestConfiguration.OAuthClientId),
"OAuth credentials not configured");
var service = CreateService();
- var token1 = service.GetAccessToken();
- var token2 = service.GetAccessToken();
+ var token1 = await service.GetAccessTokenAsync();
+ var token2 = await service.GetAccessTokenAsync();
Assert.Equal(token1, token2);
}
diff --git a/csharp/test/Drivers/Databricks/DatabricksTestConfiguration.cs
b/csharp/test/Drivers/Databricks/DatabricksTestConfiguration.cs
index 0f0366c20..5690604fc 100644
--- a/csharp/test/Drivers/Databricks/DatabricksTestConfiguration.cs
+++ b/csharp/test/Drivers/Databricks/DatabricksTestConfiguration.cs
@@ -22,10 +22,13 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks
{
public class DatabricksTestConfiguration : SparkTestConfiguration
{
- [JsonPropertyName("oauth_client_id"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ [JsonPropertyName("grant_type"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ public string OAuthGrantType { get; set; } = string.Empty;
+
+ [JsonPropertyName("client_id"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
public string OAuthClientId { get; set; } = string.Empty;
- [JsonPropertyName("oauth_client_secret"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
+ [JsonPropertyName("client_secret"), JsonIgnore(Condition =
JsonIgnoreCondition.WhenWritingDefault)]
public string OAuthClientSecret { get; set; } = string.Empty;
}
}
diff --git a/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
b/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
index a84817ed2..0a11556fd 100644
--- a/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
+++ b/csharp/test/Drivers/Databricks/DatabricksTestEnvironment.cs
@@ -93,6 +93,18 @@ namespace Apache.Arrow.Adbc.Tests.Drivers.Databricks
{
parameters.Add(SparkParameters.AuthType,
testConfiguration.AuthType!);
}
+ if (!string.IsNullOrEmpty(testConfiguration.OAuthGrantType))
+ {
+ parameters.Add(DatabricksParameters.OAuthGrantType,
testConfiguration.OAuthGrantType!);
+ }
+ if (!string.IsNullOrEmpty(testConfiguration.OAuthClientId))
+ {
+ parameters.Add(DatabricksParameters.OAuthClientId,
testConfiguration.OAuthClientId!);
+ }
+ if (!string.IsNullOrEmpty(testConfiguration.OAuthClientSecret))
+ {
+ parameters.Add(DatabricksParameters.OAuthClientSecret,
testConfiguration.OAuthClientSecret!);
+ }
if (!string.IsNullOrEmpty(testConfiguration.Type))
{
parameters.Add(SparkParameters.Type, testConfiguration.Type!);