lidavidm commented on code in PR #3325:
URL: https://github.com/apache/arrow-adbc/pull/3325#discussion_r2450059908


##########
go/adbc/go.mod:
##########
@@ -59,6 +60,7 @@ require (
        github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.4.0 // indirect
        github.com/BurntSushi/toml v1.4.0 // indirect
        github.com/andybalholm/brotli v1.2.0 // indirect
+       github.com/apache/arrow/go/v12 v12.0.1 // indirect

Review Comment:
   ...Does databricks not use the latest arrow-go? 🙁 
   
   Any chance that can be fixed?



##########
go/adbc/driver/databricks/connection.go:
##########
@@ -0,0 +1,241 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "errors"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-adbc/go/adbc/driver/internal/driverbase"
+       _ "github.com/databricks/databricks-sql-go"
+)
+
+type connectionImpl struct {
+       driverbase.ConnectionImplBase
+
+       // Connection settings
+       catalog  string
+       dbSchema string
+
+       // Database connection
+       conn *sql.Conn
+}
+
+func (c *connectionImpl) Close() error {
+       if c.conn == nil {
+               return adbc.Error{Code: adbc.StatusInvalidState}
+       }
+       defer func() {
+               c.conn = nil
+       }()
+       return c.conn.Close()
+}
+
+func (c *connectionImpl) NewStatement() (adbc.Statement, error) {
+       return &statementImpl{
+               conn: c,
+       }, nil
+}
+
+func (c *connectionImpl) SetAutocommit(autocommit bool) error {
+       // Databricks SQL doesn't support explicit transaction control in the 
same way
+       // as traditional databases. Most operations are implicitly committed.
+       if !autocommit {
+               return adbc.Error{
+                       Code: adbc.StatusNotImplemented,
+                       Msg:  fmt.Sprintf("disabling autocommit is not 
supported"),
+               }
+       } else {
+               return nil
+       }
+}
+
+// CurrentNamespacer interface implementation
+func (c *connectionImpl) GetCurrentCatalog() (string, error) {
+       return c.catalog, nil
+}
+
+func (c *connectionImpl) GetCurrentDbSchema() (string, error) {
+       return c.dbSchema, nil
+}
+
+func (c *connectionImpl) SetCurrentCatalog(catalog string) error {
+       // Use the database to execute USE CATALOG
+       if c.conn != nil && catalog != "" {
+               _, err := c.conn.ExecContext(context.Background(), "USE CATALOG 
`%s`", catalog)
+               if err != nil {
+                       return adbc.Error{
+                               Code: adbc.StatusInternal,
+                               Msg:  fmt.Sprintf("failed to set catalog: %v", 
err),
+                       }
+               }
+       }
+       c.catalog = catalog
+       return nil
+}
+
+func (c *connectionImpl) SetCurrentDbSchema(schema string) error {
+       // Use the database to execute USE SCHEMA
+       if c.conn != nil && schema != "" {
+               _, err := c.conn.ExecContext(context.Background(), "USE SCHEMA 
`%s`", schema)
+               if err != nil {
+                       return adbc.Error{
+                               Code: adbc.StatusInternal,
+                               Msg:  fmt.Sprintf("failed to set schema: %v", 
err),
+                       }
+               }
+       }
+       c.dbSchema = schema
+       return nil
+}
+
+// TableTypeLister interface implementation
+func (c *connectionImpl) ListTableTypes(ctx context.Context) ([]string, error) 
{
+       // Databricks supports these table types
+       return []string{"TABLE", "VIEW", "EXTERNAL_TABLE", "MANAGED_TABLE", 
"STREAMING_TABLE", "MATERIALIZED_VIEW"}, nil
+}
+
+// Transaction methods (Databricks has limited transaction support)
+func (c *connectionImpl) Commit(ctx context.Context) error {
+       // Most operations are auto-committed.
+       return nil
+}
+
+func (c *connectionImpl) Rollback(ctx context.Context) error {
+       // Databricks SQL doesn't support explicit transactions in the 
traditional sense.
+       // Most operations are auto-committed. We'll track state but not 
perform any operation.
+       return adbc.Error{
+               Code: adbc.StatusNotImplemented,
+               Msg:  fmt.Sprintf("rollback is not supported"),
+       }
+}
+
+// DbObjectsEnumerator interface implementation
+func (c *connectionImpl) GetCatalogs(ctx context.Context, catalogFilter 
*string) ([]string, error) {
+       query := "SHOW CATALOGS"
+       if catalogFilter != nil {
+               query += fmt.Sprintf(" LIKE '%s'", *catalogFilter)
+       }
+
+       rows, err := c.conn.QueryContext(ctx, query)
+       if err != nil {
+               return nil, adbc.Error{
+                       Code: adbc.StatusInternal,
+                       Msg:  fmt.Sprintf("failed to query catalogs: %v", err),
+               }
+       }
+       defer func() {
+               if closeErr := rows.Close(); closeErr != nil {
+                       err = errors.Join(err, closeErr)
+               }
+       }()

Review Comment:
   The return value still needs to be named.



##########
go/adbc/driver/databricks/driver.go:
##########
@@ -0,0 +1,123 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+// Package databricks is an ADBC Driver Implementation for Databricks
+// SQL using databricks-sql-go as the underlying SQL driver.
+//
+// It can be used to register a driver for database/sql by importing
+// github.com/apache/arrow-adbc/go/adbc/sqldriver and running:
+//
+//     sql.Register("databricks", sqldriver.Driver{databricks.Driver{}})
+//
+// You can then open a databricks connection with the database/sql
+// standard package by using:
+//
+//     db, err := sql.Open("databricks", 
"token=<token>&hostname=<hostname>&port=<port>&httpPath=<path>")
+package databricks
+
+import (
+       "context"
+       "runtime/debug"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-adbc/go/adbc/driver/internal/driverbase"
+       "github.com/apache/arrow-go/v18/arrow/memory"
+)
+
+const (
+       // Connection options
+       OptionServerHostname = "adbc.databricks.server_hostname"
+       OptionHTTPPath       = "adbc.databricks.http_path"
+       OptionAccessToken    = "adbc.databricks.access_token"
+       OptionPort           = "adbc.databricks.port"
+       OptionCatalog        = "adbc.databricks.catalog"
+       OptionSchema         = "adbc.databricks.schema"
+
+       // Query options
+       OptionQueryTimeout        = "adbc.databricks.query.timeout"
+       OptionMaxRows             = "adbc.databricks.query.max_rows"
+       OptionQueryRetryCount     = "adbc.databricks.query.retry_count"
+       OptionDownloadThreadCount = "adbc.databricks.download_thread_count"
+
+       // TLS/SSL options
+       OptionSSLMode     = "adbc.databricks.ssl_mode"
+       OptionSSLRootCert = "adbc.databricks.ssl_root_cert"
+
+       // OAuth options (for future expansion)
+       OptionOAuthClientID     = "adbc.databricks.oauth.client_id"
+       OptionOAuthClientSecret = "adbc.databricks.oauth.client_secret"
+       OptionOAuthRefreshToken = "adbc.databricks.oauth.refresh_token"
+
+       // Default values
+       DefaultPort    = 443
+       DefaultSSLMode = "require"
+)

Review Comment:
   Given this is a brand new driver...can we just have the options be 
"databricks." and not "adbc.databricks"? The "adbc." isn't really necessary 
everywhere (I realize all the current drivers make this mistake 😅) 



##########
go/adbc/driver/databricks/statement.go:
##########
@@ -0,0 +1,231 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+package databricks
+
+import (
+       "context"
+       "database/sql"
+       "database/sql/driver"
+       "errors"
+       "fmt"
+
+       "github.com/apache/arrow-adbc/go/adbc"
+       "github.com/apache/arrow-go/v18/arrow"
+       "github.com/apache/arrow-go/v18/arrow/array"
+
+       dbsqlrows "github.com/databricks/databricks-sql-go/rows"
+)
+
+type statementImpl struct {
+       conn     *connectionImpl
+       query    string
+       prepared *sql.Stmt
+}
+
+func (s *statementImpl) Close() error {
+       if s.conn == nil {
+               return adbc.Error{
+                       Msg:  "statement already closed",
+                       Code: adbc.StatusInvalidState,
+               }
+       }
+       if s.prepared != nil {
+               return s.prepared.Close()
+       }
+       s.conn = nil
+       return nil

Review Comment:
   If there's a prepared statement, we won't `nil` out `s.conn` - I don't think 
that's what's intended



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to