This is an automated email from the ASF dual-hosted git repository.

lidavidm pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/arrow-adbc.git


The following commit(s) were added to refs/heads/main by this push:
     new a15939f  [C][Python] Implement DBAPI 2.0 (PEP 249) interface (#56)
a15939f is described below

commit a15939f1d2bc4e4941bef9344b7360b9d730b607
Author: David Li <[email protected]>
AuthorDate: Tue Aug 9 12:41:48 2022 -0400

    [C][Python] Implement DBAPI 2.0 (PEP 249) interface (#56)
    
    * [Python] Implement DBAPI 2.0 (PEP 249) interface
    
    * [C++] Fix doubles in SQLite3 driver
    
    * [Python] Remove hypothesis file
    
    * [Python] Add more docstrings, tests
    
    * [Python] Remove redundancy
    
    * [Python] Update lockfiles
    
    * [C][Python] Add and test some DBAPI extensions
    
    * [C] Fix SQLite3 driver
---
 .gitignore                                         |   1 +
 .isort.cfg                                         |   1 +
 c/drivers/sqlite/sqlite.cc                         | 139 +++---
 .../adbc_driver_manager/_lib.pyx                   |  19 +-
 .../adbc_driver_manager/dbapi.py                   | 507 +++++++++++++++++++++
 .../adbc_driver_manager/tests/test_dbapi.py        | 144 ++++++
 .../adbc_driver_manager/tests/test_lowlevel.py     |   3 +-
 python/adbc_driver_manager/poetry.lock             | 124 +++--
 python/adbc_driver_manager/pyproject.toml          |   6 +-
 python/adbc_driver_manager/requirements-dev.txt    |  65 +--
 10 files changed, 874 insertions(+), 135 deletions(-)

diff --git a/.gitignore b/.gitignore
index ad32dd2..2e7ee88 100644
--- a/.gitignore
+++ b/.gitignore
@@ -72,6 +72,7 @@ site/
 
 # Python
 dist/
+.hypothesis/
 
 # R files
 **/.Rproj.user
diff --git a/.isort.cfg b/.isort.cfg
index dc371b7..c614890 100644
--- a/.isort.cfg
+++ b/.isort.cfg
@@ -16,4 +16,5 @@
 # under the License.
 
 [settings]
+known_first_party = adbc_driver_manager
 profile = black
diff --git a/c/drivers/sqlite/sqlite.cc b/c/drivers/sqlite/sqlite.cc
index 81372d7..729160e 100644
--- a/c/drivers/sqlite/sqlite.cc
+++ b/c/drivers/sqlite/sqlite.cc
@@ -361,6 +361,12 @@ AdbcStatusCode BindParameters(sqlite3_stmt* stmt, const 
arrow::RecordBatch& data
       *rc = sqlite3_bind_null(stmt, col_index);
     } else {
       switch (column->type()->id()) {
+        case arrow::Type::DOUBLE: {
+          *rc = sqlite3_bind_double(
+              stmt, col_index,
+              static_cast<const arrow::DoubleArray&>(*column).Value(row));
+          break;
+        }
         case arrow::Type::INT64: {
           *rc = sqlite3_bind_int64(
               stmt, col_index, static_cast<const 
arrow::Int64Array&>(*column).Value(row));
@@ -395,7 +401,8 @@ class SqliteStatementReader : public 
arrow::RecordBatchReader {
         schema_(nullptr),
         next_parameters_(nullptr),
         bind_index_(0),
-        done_(false) {}
+        done_(false),
+        rc_(SQLITE_OK) {}
 
   AdbcStatusCode Init(struct AdbcError* error) {
     // TODO: this crashes if the statement is closed while the reader
@@ -405,23 +412,21 @@ class SqliteStatementReader : public 
arrow::RecordBatchReader {
 
     sqlite3* db = connection_->db();
     Status status;
-    int rc = SQLITE_OK;
     if (bind_parameters_) {
       status = bind_parameters_->ReadNext(&next_parameters_);
       ADBC_RETURN_NOT_OK(FromArrowStatus(status, error));
-      ADBC_RETURN_NOT_OK(BindNext(&rc, error));
-      ADBC_RETURN_NOT_OK(CheckRc(db, stmt_, rc, "sqlite3_bind", error));
+      ADBC_RETURN_NOT_OK(BindNext(&rc_, error));
+      ADBC_RETURN_NOT_OK(CheckRc(db, stmt_, rc_, "sqlite3_bind", error));
     }
     // XXX: with parameters, inferring the schema from the first
     // argument is inaccurate (what if one is null?). Is there a way
     // to hint to SQLite the real type?
 
-    rc = sqlite3_step(stmt_);
-    if (rc == SQLITE_ERROR) {
-      return CheckRc(db, stmt_, rc, "sqlite3_step", error);
+    rc_ = sqlite3_step(stmt_);
+    if (rc_ == SQLITE_ERROR) {
+      return CheckRc(db, stmt_, rc_, "sqlite3_step", error);
     }
     schema_ = StatementToSchema(stmt_);
-    done_ = rc != SQLITE_ROW;
     return ADBC_STATUS_OK;
   }
 
@@ -451,57 +456,63 @@ class SqliteStatementReader : public 
arrow::RecordBatchReader {
 
     sqlite3* db = connection_->db();
 
-    // The statement was stepped once at the start, so step at the end of the 
loop
     int64_t num_rows = 0;
     for (int64_t row = 0; row < kBatchSize; row++) {
-      for (int col = 0; col < schema_->num_fields(); col++) {
-        const auto& field = schema_->field(col);
-        switch (field->type()->id()) {
-          case arrow::Type::DOUBLE: {
-            // TODO: handle null values
-            const sqlite3_int64 value = sqlite3_column_double(stmt_, col);
-            ARROW_RETURN_NOT_OK(
-                
dynamic_cast<arrow::DoubleBuilder*>(builders[col].get())->Append(value));
-            break;
-          }
-          case arrow::Type::INT64: {
-            // TODO: handle null values
-            const sqlite3_int64 value = sqlite3_column_int64(stmt_, col);
-            ARROW_RETURN_NOT_OK(
-                
dynamic_cast<arrow::Int64Builder*>(builders[col].get())->Append(value));
-            break;
-          }
-          case arrow::Type::NA: {
-            // TODO: handle null values
-            ARROW_RETURN_NOT_OK(
-                
dynamic_cast<arrow::NullBuilder*>(builders[col].get())->AppendNull());
-            break;
-          }
-          case arrow::Type::STRING: {
-            const char* value =
-                reinterpret_cast<const char*>(sqlite3_column_text(stmt_, col));
-            if (!value) {
-              // TODO: check field nullability
-              ARROW_RETURN_NOT_OK(
-                  
dynamic_cast<arrow::StringBuilder*>(builders[col].get())->AppendNull());
-            } else {
-              const arrow::util::string_view view(value, std::strlen(value));
-              
ARROW_RETURN_NOT_OK(dynamic_cast<arrow::StringBuilder*>(builders[col].get())
+      if (rc_ != SQLITE_DONE) {
+        for (int col = 0; col < schema_->num_fields(); col++) {
+          const auto& field = schema_->field(col);
+          switch (field->type()->id()) {
+            case arrow::Type::DOUBLE: {
+              // TODO: handle null values
+              const double value = sqlite3_column_double(stmt_, col);
+              
ARROW_RETURN_NOT_OK(dynamic_cast<arrow::DoubleBuilder*>(builders[col].get())
                                       ->Append(value));
+              break;
+            }
+            case arrow::Type::INT64: {
+              // TODO: handle null values
+              const sqlite3_int64 value = sqlite3_column_int64(stmt_, col);
+              ARROW_RETURN_NOT_OK(
+                  
dynamic_cast<arrow::Int64Builder*>(builders[col].get())->Append(value));
+              break;
             }
-            break;
+            case arrow::Type::NA: {
+              // TODO: handle null values
+              ARROW_RETURN_NOT_OK(
+                  
dynamic_cast<arrow::NullBuilder*>(builders[col].get())->AppendNull());
+              break;
+            }
+            case arrow::Type::STRING: {
+              const char* value =
+                  reinterpret_cast<const char*>(sqlite3_column_text(stmt_, 
col));
+              if (!value) {
+                // TODO: check field nullability
+                ARROW_RETURN_NOT_OK(
+                    dynamic_cast<arrow::StringBuilder*>(builders[col].get())
+                        ->AppendNull());
+              } else {
+                const arrow::util::string_view view(value, std::strlen(value));
+                ARROW_RETURN_NOT_OK(
+                    dynamic_cast<arrow::StringBuilder*>(builders[col].get())
+                        ->Append(value));
+              }
+              break;
+            }
+            default:
+              return Status::NotImplemented("[SQLite3] Cannot read field '",
+                                            field->name(), "' of type ",
+                                            field->type()->ToString());
           }
-          default:
-            return Status::NotImplemented("[SQLite3] Cannot read field '", 
field->name(),
-                                          "' of type ", 
field->type()->ToString());
         }
+        num_rows++;
       }
-      num_rows++;
 
-      int status = sqlite3_step(stmt_);
-      if (status == SQLITE_ROW) {
+      if (rc_ == SQLITE_ROW) {
+        rc_ = sqlite3_step(stmt_);
+      }
+      if (rc_ == SQLITE_ROW) {
         continue;
-      } else if (status == SQLITE_DONE) {
+      } else if (rc_ == SQLITE_DONE) {
         if (bind_parameters_ &&
             (!next_parameters_ || bind_index_ >= 
next_parameters_->num_rows())) {
           ARROW_RETURN_NOT_OK(bind_parameters_->ReadNext(&next_parameters_));
@@ -509,28 +520,31 @@ class SqliteStatementReader : public 
arrow::RecordBatchReader {
         }
 
         if (next_parameters_ && bind_index_ < next_parameters_->num_rows()) {
-          status = sqlite3_reset(stmt_);
-          if (status != SQLITE_OK) {
+          rc_ = sqlite3_reset(stmt_);
+          if (rc_ != SQLITE_OK) {
             return Status::IOError("[SQLite3] sqlite3_reset: ", 
sqlite3_errmsg(db));
           }
           struct AdbcError error;
-          ARROW_RETURN_NOT_OK(ToArrowStatus(BindNext(&status, &error), 
&error));
-          status = sqlite3_step(stmt_);
-          if (status == SQLITE_ROW) continue;
-        } else {
-          done_ = true;
-          next_parameters_.reset();
+          ARROW_RETURN_NOT_OK(ToArrowStatus(BindNext(&rc_, &error), &error));
+          rc_ = sqlite3_step(stmt_);
+          if (rc_ != SQLITE_ERROR) continue;
         }
+        done_ = true;
+        next_parameters_.reset();
         break;
       }
       return Status::IOError("[SQLite3] sqlite3_step: ", sqlite3_errmsg(db));
     }
 
-    arrow::ArrayVector arrays(builders.size());
-    for (size_t i = 0; i < builders.size(); i++) {
-      ARROW_RETURN_NOT_OK(builders[i]->Finish(&arrays[i]));
+    if (done_ && num_rows == 0) {
+      *batch = nullptr;
+    } else {
+      arrow::ArrayVector arrays(builders.size());
+      for (size_t i = 0; i < builders.size(); i++) {
+        ARROW_RETURN_NOT_OK(builders[i]->Finish(&arrays[i]));
+      }
+      *batch = arrow::RecordBatch::Make(schema_, num_rows, std::move(arrays));
     }
-    *batch = arrow::RecordBatch::Make(schema_, num_rows, std::move(arrays));
     return Status::OK();
   }
 
@@ -550,6 +564,7 @@ class SqliteStatementReader : public 
arrow::RecordBatchReader {
   std::shared_ptr<arrow::RecordBatch> next_parameters_;
   int64_t bind_index_;
   bool done_;
+  int rc_;
 };
 
 class SqliteStatementImpl {
diff --git a/python/adbc_driver_manager/adbc_driver_manager/_lib.pyx 
b/python/adbc_driver_manager/adbc_driver_manager/_lib.pyx
index 97b1c9b..d34be95 100644
--- a/python/adbc_driver_manager/adbc_driver_manager/_lib.pyx
+++ b/python/adbc_driver_manager/adbc_driver_manager/_lib.pyx
@@ -240,8 +240,15 @@ class AdbcInfoCode(enum.IntEnum):
     DRIVER_ARROW_VERSION = ADBC_INFO_DRIVER_ARROW_VERSION
 
 
+class Warning(UserWarning):
+    """
+    PEP 249-compliant base warning class.
+    """
+
+
 class Error(Exception):
-    """PEP-249 compliant base exception class.
+    """
+    PEP 249-compliant base exception class.
 
     Attributes
     ----------
@@ -289,7 +296,13 @@ class ProgrammingError(DatabaseError):
 
 
 class NotSupportedError(DatabaseError):
-    pass
+    def __init__(self, message, *, vendor_code=None, sqlstate=None):
+        super().__init__(
+            message,
+            status_code=AdbcStatusCode.NOT_IMPLEMENTED,
+            vendor_code=vendor_code,
+            sqlstate=sqlstate,
+        )
 
 
 INGEST_OPTION_TARGET_TABLE = ADBC_INGEST_OPTION_TARGET_TABLE.decode("utf-8")
@@ -330,7 +343,7 @@ cdef void check_error(CAdbcStatusCode status, CAdbcError* 
error) except *:
                     ADBC_STATUS_UNAUTHORIZED):
         klass = ProgrammingError
     elif status == ADBC_STATUS_NOT_IMPLEMENTED:
-        klass = NotSupportedError
+        raise NotSupportedError(message)
     raise klass(message, status_code=status)
 
 
diff --git a/python/adbc_driver_manager/adbc_driver_manager/dbapi.py 
b/python/adbc_driver_manager/adbc_driver_manager/dbapi.py
new file mode 100644
index 0000000..dc63c03
--- /dev/null
+++ b/python/adbc_driver_manager/adbc_driver_manager/dbapi.py
@@ -0,0 +1,507 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+"""
+PEP 249 (DB-API 2.0) API wrapper for the ADBC Driver Manager.
+"""
+
+import datetime
+import functools
+import time
+import typing
+import warnings
+from typing import Any, Dict, List, Optional
+
+import pyarrow
+
+from . import _lib
+
+if typing.TYPE_CHECKING:
+    from typing import Self
+
+# ----------------------------------------------------------
+# Globals
+
+#: The DB-API API level (2.0).
+apilevel = "2.0"
+#: The thread safety level (connections may not be shared).
+threadsafety = 1
+#: The parameter style (qmark). This is hardcoded, but actually
+#: depends on the driver.
+paramstyle = "qmark"
+
+Warning = _lib.Warning
+Error = _lib.Error
+InterfaceError = _lib.InterfaceError
+DatabaseError = _lib.DatabaseError
+DataError = _lib.DataError
+OperationalError = _lib.OperationalError
+IntegrityError = _lib.IntegrityError
+InternalError = _lib.InternalError
+ProgrammingError = _lib.ProgrammingError
+NotSupportedError = _lib.NotSupportedError
+
+# ----------------------------------------------------------
+# Types
+
+Date = datetime.date
+Time = datetime.time
+Timestamp = datetime.datetime
+
+
+def DateFromTicks(ticks):
+    # Standard implementations from PEP 249 itself
+    return Date(*time.localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+    return Time(*time.localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+    return Timestamp(*time.localtime(ticks)[:6])
+
+
+class _TypeSet(frozenset):
+    """A set of PyArrow type IDs that compares equal to subsets of self."""
+
+    def __eq__(self, other: Any) -> bool:
+        if isinstance(other, _TypeSet):
+            return not (other - self)
+        elif isinstance(other, pyarrow.DataType):
+            return other.id in self
+        return False
+
+
+STRING = _TypeSet([pyarrow.string().id, pyarrow.large_string().id])
+BINARY = _TypeSet({pyarrow.binary().id, pyarrow.large_binary().id})
+NUMBER = _TypeSet(
+    [
+        pyarrow.int8().id,
+        pyarrow.int16().id,
+        pyarrow.int32().id,
+        pyarrow.int64().id,
+        pyarrow.uint8().id,
+        pyarrow.uint16().id,
+        pyarrow.uint32().id,
+        pyarrow.uint64().id,
+        pyarrow.float32().id,
+        pyarrow.float64().id,
+    ]
+)
+DATETIME = _TypeSet(
+    [
+        pyarrow.date32().id,
+        pyarrow.date64().id,
+        pyarrow.time32("s").id,
+        pyarrow.time64("ns").id,
+        pyarrow.timestamp("s").id,
+    ]
+)
+ROWID = _TypeSet([pyarrow.int64().id])
+
+# ----------------------------------------------------------
+# Functions
+
+
+def connect(
+    *,
+    driver: str,
+    entrypoint: str,
+    db_kwargs: Optional[Dict[str, str]] = None,
+    conn_kwargs: Optional[Dict[str, str]] = None
+) -> "Connection":
+    """
+    Connect to a database via ADBC.
+
+    Parameters
+    ----------
+    driver
+        The driver name. For example, "adbc_driver_sqlite" will
+        attempt to load libadbc_driver_sqlite.so on Unix-like systems,
+        and adbc_driver_sqlite.dll on Windows.
+    entrypoint
+        The driver-specific entrypoint.
+    db_kwargs
+        Key-value parameters to pass to the driver to initialize the
+        database.
+    conn_kwargs
+        Key-value parameters to pass to the driver to initialize the
+        connection.
+    """
+    db = None
+    conn = None
+
+    if db_kwargs is None:
+        db_kwargs = {}
+    if conn_kwargs is None:
+        conn_kwargs = {}
+
+    try:
+        db = _lib.AdbcDatabase(driver=driver, entrypoint=entrypoint, 
**db_kwargs)
+        conn = _lib.AdbcConnection(db, **conn_kwargs)
+        return Connection(db, conn)
+    except Exception:
+        if conn:
+            conn.close()
+        if db:
+            db.close()
+        raise
+
+
+# ----------------------------------------------------------
+# Classes
+
+
+class _Closeable:
+    def __enter__(self) -> "Self":
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb) -> None:
+        self.close()
+
+
+class Connection(_Closeable):
+    """
+    A DB-API 2.0 (PEP 249) connection.
+
+    Do not create this object directly; use connect().
+    """
+
+    # Optional extension: expose exception classes on Connection
+    Warning = _lib.Warning
+    Error = _lib.Error
+    InterfaceError = _lib.InterfaceError
+    DatabaseError = _lib.DatabaseError
+    DataError = _lib.DataError
+    OperationalError = _lib.OperationalError
+    IntegrityError = _lib.IntegrityError
+    InternalError = _lib.InternalError
+    ProgrammingError = _lib.ProgrammingError
+    NotSupportedError = _lib.NotSupportedError
+
+    def __init__(self, db: _lib.AdbcDatabase, conn: _lib.AdbcConnection) -> 
None:
+        self._db = db
+        self._conn = conn
+
+        try:
+            self._conn.set_autocommit(False)
+        except _lib.NotSupportedError:
+            self._commit_supported = False
+            warnings.warn(
+                "Cannot disable autocommit; conn will not be DB-API 2.0 
compliant",
+                category=Warning,
+            )
+        else:
+            self._commit_supported = True
+
+    def close(self) -> None:
+        """Close the connection."""
+        self._conn.close()
+        self._db.close()
+
+    def commit(self) -> None:
+        """Explicitly commit."""
+        if self._commit_supported:
+            self._conn.commit()
+
+    def rollback(self) -> None:
+        """Explicitly rollback."""
+        if self._commit_supported:
+            self._conn.rollback()
+
+    def cursor(self) -> "Cursor":
+        """Create a new cursor for querying the database."""
+        return Cursor(self)
+
+
+class Cursor(_Closeable):
+    """
+    A DB-API 2.0 (PEP 249) cursor.
+
+    Do not create this object directly; use Connection.cursor().
+    """
+
+    def __init__(self, conn: Connection) -> None:
+        self._conn = conn
+        self._stmt = _lib.AdbcStatement(conn._conn)
+        self._last_query: Optional[str] = None
+        self._results: Optional["_RowIterator"] = None
+        self._arraysize = 1
+
+    @property
+    def arraysize(self) -> int:
+        """The number of rows to fetch at a time with fetchmany()."""
+        return self._arraysize
+
+    @arraysize.setter
+    def arraysize(self, size: int) -> None:
+        self._arraysize = size
+
+    @property
+    def connection(self) -> Connection:
+        """
+        Get the connection associated with this cursor.
+
+        This is an optional DB-API extension.
+        """
+        return self._conn
+
+    @property
+    def description(self) -> Optional[List[tuple]]:
+        """The schema of the result set."""
+        if self._results is None:
+            return None
+        return self._results.description
+
+    @property
+    def rowcount(self):
+        """
+        Get the row count of the result set.
+
+        This is always -1 since ADBC returns results as a stream.
+        """
+        return -1
+
+    @property
+    def rownumber(self):
+        if self._results is not None:
+            return self._results.rownumber
+        return None
+
+    def callproc(self, procname, parameters):
+        raise NotSupportedError("Cursor.callproc")
+
+    def close(self):
+        """Close the cursor and free resources."""
+        if self._results is not None:
+            self._results.close()
+        self._stmt.close()
+
+    def execute(self, operation, parameters=None) -> None:
+        """Execute a query."""
+        self._results = None
+        if operation != self._last_query:
+            self._last_query = operation
+            self._stmt.set_sql_query(operation)
+            self._stmt.prepare()
+
+        if parameters:
+            rb = pyarrow.record_batch(
+                [
+                    [
+                        param_value,
+                    ]
+                    for param_value in parameters
+                ],
+                names=[str(i) for i in range(len(parameters))],
+            )
+            arr_handle = _lib.ArrowArrayHandle()
+            sch_handle = _lib.ArrowSchemaHandle()
+            rb._export_to_c(arr_handle.address, sch_handle.address)
+            self._stmt.bind(arr_handle, sch_handle)
+
+        self._stmt.execute()
+        handle = self._stmt.get_stream()
+        self._results = _RowIterator(
+            pyarrow.RecordBatchReader._import_from_c(handle.address)
+        )
+
+    def executemany(self, operation, seq_of_parameters):
+        self._results = None
+        if operation != self._last_query:
+            self._last_query = operation
+            self._stmt.set_sql_query(operation)
+            self._stmt.prepare()
+
+        if seq_of_parameters:
+            rb = pyarrow.record_batch(
+                [
+                    pyarrow.array([row[col_idx] for row in seq_of_parameters])
+                    for col_idx in range(len(seq_of_parameters[0]))
+                ],
+                names=[str(i) for i in range(len(seq_of_parameters[0]))],
+            )
+        else:
+            rb = pyarrow.record_batch([])
+
+        arr_handle = _lib.ArrowArrayHandle()
+        sch_handle = _lib.ArrowSchemaHandle()
+        rb._export_to_c(arr_handle.address, sch_handle.address)
+        self._stmt.bind(arr_handle, sch_handle)
+        self._stmt.execute()
+        # XXX: must step through results to fully execute query
+        handle = self._stmt.get_stream()
+        reader = pyarrow.RecordBatchReader._import_from_c(handle.address)
+        reader.read_all()
+
+    def fetchone(self) -> tuple:
+        """Fetch one row of the result."""
+        if self._results is None:
+            raise ProgrammingError(
+                "Cannot fetchone() before execute()",
+                status_code=_lib.AdbcStatusCode.INVALID_STATE,
+            )
+        return self._results.fetchone()
+
+    def fetchmany(self, size: Optional[int] = None) -> List[tuple]:
+        """Fetch some rows of the result."""
+        if self._results is None:
+            raise ProgrammingError(
+                "Cannot fetchmany() before execute()",
+                status_code=_lib.AdbcStatusCode.INVALID_STATE,
+            )
+        if size is None:
+            size = self.arraysize
+        return self._results.fetchmany(size)
+
+    def fetchall(self) -> List[tuple]:
+        """Fetch all rows of the result."""
+        if self._results is None:
+            raise ProgrammingError(
+                "Cannot fetchall() before execute()",
+                status_code=_lib.AdbcStatusCode.INVALID_STATE,
+            )
+        return self._results.fetchall()
+
+    def fetchallarrow(self) -> pyarrow.Table:
+        """
+        Fetch all rows of the result as a PyArrow Table.
+
+        This implements a similar API as turbodbc.
+        """
+        return self.fetch_arrow_table()
+
+    def fetch_arrow_table(self) -> pyarrow.Table:
+        """
+        Fetch all rows of the result as a PyArrow Table.
+
+        This implements a similar API as DuckDB.
+        """
+        if self._results is None:
+            raise ProgrammingError(
+                "Cannot fetch_df() before execute()",
+                status_code=_lib.AdbcStatusCode.INVALID_STATE,
+            )
+        return self._results.fetch_arrow_table()
+
+    def fetch_df(self):
+        """
+        Fetch all rows of the result as a Pandas DataFrame.
+
+        This implements a similar API as DuckDB.
+        """
+        if self._results is None:
+            raise ProgrammingError(
+                "Cannot fetch_df() before execute()",
+                status_code=_lib.AdbcStatusCode.INVALID_STATE,
+            )
+        return self._results.fetch_df()
+
+    def next(self):
+        """Fetch the next row, or raise StopIteration."""
+        row = self.fetchone()
+        if row is None:
+            raise StopIteration
+        return row
+
+    def nextset(self):
+        raise NotSupportedError("Cursor.nextset")
+
+    def setinputsizes(self, sizes):
+        # Not used
+        pass
+
+    def setoutputsize(self, size, column=None):
+        # Not used
+        pass
+
+    def __iter__(self):
+        return self
+
+    def __next__(self):
+        return self.next()
+
+
+class _RowIterator(_Closeable):
+    """Track state needed to iterate over the result set."""
+
+    def __init__(self, reader: pyarrow.RecordBatchReader) -> None:
+        self._reader = reader
+        self._current_batch = None
+        self._next_row = 0
+        self._finished = False
+        self.rownumber = 0
+
+    def close(self) -> None:
+        self._reader.close()
+
+    @property
+    def description(self) -> List[tuple]:
+        return [
+            (field.name, field.type, None, None, None, None, None)
+            for field in self._reader.schema
+        ]
+
+    def fetchone(self):
+        if self._current_batch is None or self._next_row >= 
len(self._current_batch):
+            try:
+                self._current_batch = self._reader.read_next_batch()
+                self._next_row = 0
+            except StopIteration:
+                self._current_batch = None
+                self._finished = True
+
+        if self._finished:
+            return None
+
+        row = tuple(
+            _convert_value(arr, row=self._next_row)
+            for arr in self._current_batch.columns
+        )
+        self._next_row += 1
+        self.rownumber += 1
+        return row
+
+    def fetchmany(self, size: int):
+        rows = []
+        for _ in range(size):
+            row = self.fetchone()
+            if row is None:
+                break
+            rows.append(row)
+        return rows
+
+    def fetchall(self):
+        rows = []
+        while True:
+            row = self.fetchone()
+            if row is None:
+                break
+            rows.append(row)
+        return rows
+
+    def fetch_arrow_table(self):
+        return self._reader.read_all()
+
+    def fetch_df(self):
+        return self._reader.read_pandas()
+
+
[email protected]
+def _convert_value(arr: pyarrow.Array, *, row: int) -> Any:
+    return arr[row].as_py()
diff --git a/python/adbc_driver_manager/adbc_driver_manager/tests/test_dbapi.py 
b/python/adbc_driver_manager/adbc_driver_manager/tests/test_dbapi.py
new file mode 100644
index 0000000..f088021
--- /dev/null
+++ b/python/adbc_driver_manager/adbc_driver_manager/tests/test_dbapi.py
@@ -0,0 +1,144 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+import pandas
+import pyarrow
+import pytest
+from pandas.testing import assert_frame_equal
+
+from adbc_driver_manager import dbapi
+
+
[email protected]
+def sqlite():
+    """Dynamically load the SQLite driver."""
+    with dbapi.connect(
+        driver="adbc_driver_sqlite",
+        entrypoint="AdbcSqliteDriverInit",
+    ) as conn:
+        yield conn
+
+
+def test_type_objects():
+    assert dbapi.NUMBER == pyarrow.int64()
+    assert pyarrow.int64() == dbapi.NUMBER
+
+    assert dbapi.STRING == pyarrow.string()
+    assert pyarrow.string() == dbapi.STRING
+
+    assert dbapi.STRING != dbapi.NUMBER
+    assert dbapi.NUMBER != dbapi.DATETIME
+    assert dbapi.NUMBER == dbapi.ROWID
+
+
+def test_attrs(sqlite):
+    assert sqlite.Warning == dbapi.Warning
+    assert sqlite.Error == dbapi.Error
+    assert sqlite.InterfaceError == dbapi.InterfaceError
+    assert sqlite.DatabaseError == dbapi.DatabaseError
+    assert sqlite.DataError == dbapi.DataError
+    assert sqlite.OperationalError == dbapi.OperationalError
+    assert sqlite.IntegrityError == dbapi.IntegrityError
+    assert sqlite.InternalError == dbapi.InternalError
+    assert sqlite.ProgrammingError == dbapi.ProgrammingError
+    assert sqlite.NotSupportedError == dbapi.NotSupportedError
+
+    with sqlite.cursor() as cur:
+        assert cur.arraysize == 1
+        assert cur.connection is sqlite
+        assert cur.description is None
+        assert cur.rowcount == -1
+
+
+def test_query_fetch_py(sqlite):
+    with sqlite.cursor() as cur:
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert cur.description == [
+            ("1", dbapi.NUMBER, None, None, None, None, None),
+            ('"foo"', dbapi.STRING, None, None, None, None, None),
+            ("2.0", dbapi.NUMBER, None, None, None, None, None),
+        ]
+        assert cur.rownumber == 0
+        assert cur.fetchone() == (1, "foo", 2.0)
+        assert cur.rownumber == 1
+        assert cur.fetchone() is None
+
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert cur.fetchmany() == [(1, "foo", 2.0)]
+        assert cur.fetchmany() == []
+
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert cur.fetchall() == [(1, "foo", 2.0)]
+        assert cur.fetchall() == []
+
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert list(cur) == [(1, "foo", 2.0)]
+
+
+def test_query_fetch_arrow(sqlite):
+    with sqlite.cursor() as cur:
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert cur.fetch_arrow_table() == pyarrow.table(
+            {
+                "1": [1],
+                '"foo"': ["foo"],
+                "2.0": [2.0],
+            }
+        )
+
+
+def test_query_fetch_df(sqlite):
+    with sqlite.cursor() as cur:
+        cur.execute('SELECT 1, "foo", 2.0')
+        assert_frame_equal(
+            cur.fetch_df(),
+            pandas.DataFrame(
+                {
+                    "1": [1],
+                    '"foo"': ["foo"],
+                    "2.0": [2.0],
+                }
+            ),
+        )
+
+
+def test_query_parameters(sqlite):
+    with sqlite.cursor() as cur:
+        cur.execute("SELECT ? + 1, ?", (1.0, 2))
+        assert cur.fetchall() == [(2.0, 2)]
+
+
+def test_executemany(sqlite):
+    with sqlite.cursor() as cur:
+        cur.execute("CREATE TABLE foo (a, b)")
+        cur.executemany(
+            "INSERT INTO foo VALUES (?, ?)",
+            [
+                (1, 2),
+                (3, 4),
+                (5, 6),
+            ],
+        )
+        cur.execute("SELECT COUNT(*) FROM foo")
+        assert cur.fetchone() == (3,)
+        cur.execute("SELECT * FROM foo ORDER BY a ASC")
+        assert cur.rownumber == 0
+        assert next(cur) == (1, 2)
+        assert cur.rownumber == 1
+        assert next(cur) == (3, 4)
+        assert cur.rownumber == 2
+        assert next(cur) == (5, 6)
diff --git 
a/python/adbc_driver_manager/adbc_driver_manager/tests/test_lowlevel.py 
b/python/adbc_driver_manager/adbc_driver_manager/tests/test_lowlevel.py
index e112353..623a260 100644
--- a/python/adbc_driver_manager/adbc_driver_manager/tests/test_lowlevel.py
+++ b/python/adbc_driver_manager/adbc_driver_manager/tests/test_lowlevel.py
@@ -15,10 +15,11 @@
 # specific language governing permissions and limitations
 # under the License.
 
-import adbc_driver_manager
 import pyarrow
 import pytest
 
+import adbc_driver_manager
+
 
 @pytest.fixture
 def sqlite():
diff --git a/python/adbc_driver_manager/poetry.lock 
b/python/adbc_driver_manager/poetry.lock
index 9d84c40..b1b6e23 100644
--- a/python/adbc_driver_manager/poetry.lock
+++ b/python/adbc_driver_manager/poetry.lock
@@ -48,7 +48,7 @@ python-versions = "*"
 name = "numpy"
 version = "1.23.1"
 description = "NumPy is the fundamental package for array computing with 
Python."
-category = "dev"
+category = "main"
 optional = false
 python-versions = ">=3.8"
 
@@ -63,6 +63,27 @@ python-versions = ">=3.6"
 [package.dependencies]
 pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
 
+[[package]]
+name = "pandas"
+version = "1.4.3"
+description = "Powerful data structures for data analysis, time series, and 
statistics"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+
+[package.dependencies]
+numpy = [
+    {version = ">=1.18.5", markers = "platform_machine != \"aarch64\" and 
platform_machine != \"arm64\" and python_version < \"3.10\""},
+    {version = ">=1.19.2", markers = "platform_machine == \"aarch64\" and 
python_version < \"3.10\""},
+    {version = ">=1.20.0", markers = "platform_machine == \"arm64\" and 
python_version < \"3.10\""},
+    {version = ">=1.21.0", markers = "python_version >= \"3.10\""},
+]
+python-dateutil = ">=2.8.1"
+pytz = ">=2020.1"
+
+[package.extras]
+test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"]
+
 [[package]]
 name = "pluggy"
 version = "1.0.0"
@@ -85,9 +106,9 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, 
!=3.3.*, !=3.4.*"
 
 [[package]]
 name = "pyarrow"
-version = "8.0.0"
+version = "9.0.0"
 description = "Python library for Apache Arrow"
-category = "dev"
+category = "main"
 optional = false
 python-versions = ">=3.7"
 
@@ -126,6 +147,33 @@ tomli = ">=1.0.0"
 [package.extras]
 testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments 
(>=2.7.2)", "requests", "xmlschema"]
 
+[[package]]
+name = "python-dateutil"
+version = "2.8.2"
+description = "Extensions to the standard Python datetime module"
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
+
+[package.dependencies]
+six = ">=1.5"
+
+[[package]]
+name = "pytz"
+version = "2022.1"
+description = "World timezone definitions, modern and historical"
+category = "main"
+optional = false
+python-versions = "*"
+
+[[package]]
+name = "six"
+version = "1.16.0"
+description = "Python 2 and 3 compatibility utilities"
+category = "main"
+optional = false
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
+
 [[package]]
 name = "tomli"
 version = "2.0.1"
@@ -137,7 +185,7 @@ python-versions = ">=3.7"
 [metadata]
 lock-version = "1.1"
 python-versions = ">=3.8"
-content-hash = 
"04688db6645eb1df814b2f381e14bef8b236b86fa408e50937e87bda6a97f9c5"
+content-hash = 
"6929ad1880dbb184988a7019b239c2f93bd6f20cd186a12ae64c96335bc945e7"
 
 [metadata.files]
 atomicwrites = [
@@ -225,6 +273,29 @@ packaging = [
     {file = "packaging-21.3-py3-none-any.whl", hash = 
"sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
     {file = "packaging-21.3.tar.gz", hash = 
"sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
 ]
+pandas = [
+    {file = "pandas-1.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = 
"sha256:d51674ed8e2551ef7773820ef5dab9322be0828629f2cbf8d1fc31a0c4fed640"},
+    {file = "pandas-1.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = 
"sha256:16ad23db55efcc93fa878f7837267973b61ea85d244fc5ff0ccbcfa5638706c5"},
+    {file = "pandas-1.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = 
"sha256:958a0588149190c22cdebbc0797e01972950c927a11a900fe6c2296f207b1d6f"},
+    {file = 
"pandas-1.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:e48fbb64165cda451c06a0f9e4c7a16b534fcabd32546d531b3c240ce2844112"},
+    {file = 
"pandas-1.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash 
= "sha256:6f803320c9da732cc79210d7e8cc5c8019aad512589c910c66529eb1b1818230"},
+    {file = "pandas-1.4.3-cp310-cp310-win_amd64.whl", hash = 
"sha256:2893e923472a5e090c2d5e8db83e8f907364ec048572084c7d10ef93546be6d1"},
+    {file = "pandas-1.4.3-cp38-cp38-macosx_10_9_universal2.whl", hash = 
"sha256:24ea75f47bbd5574675dae21d51779a4948715416413b30614c1e8b480909f81"},
+    {file = "pandas-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = 
"sha256:d5ebc990bd34f4ac3c73a2724c2dcc9ee7bf1ce6cf08e87bb25c6ad33507e318"},
+    {file = "pandas-1.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = 
"sha256:d6c0106415ff1a10c326c49bc5dd9ea8b9897a6ca0c8688eb9c30ddec49535ef"},
+    {file = 
"pandas-1.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash 
= "sha256:78b00429161ccb0da252229bcda8010b445c4bf924e721265bec5a6e96a92e92"},
+    {file = 
"pandas-1.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = 
"sha256:6dfbf16b1ea4f4d0ee11084d9c026340514d1d30270eaa82a9f1297b6c8ecbf0"},
+    {file = "pandas-1.4.3-cp38-cp38-win32.whl", hash = 
"sha256:48350592665ea3cbcd07efc8c12ff12d89be09cd47231c7925e3b8afada9d50d"},
+    {file = "pandas-1.4.3-cp38-cp38-win_amd64.whl", hash = 
"sha256:605d572126eb4ab2eadf5c59d5d69f0608df2bf7bcad5c5880a47a20a0699e3e"},
+    {file = "pandas-1.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = 
"sha256:a3924692160e3d847e18702bb048dc38e0e13411d2b503fecb1adf0fcf950ba4"},
+    {file = "pandas-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = 
"sha256:07238a58d7cbc8a004855ade7b75bbd22c0db4b0ffccc721556bab8a095515f6"},
+    {file = "pandas-1.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = 
"sha256:755679c49460bd0d2f837ab99f0a26948e68fa0718b7e42afbabd074d945bf84"},
+    {file = 
"pandas-1.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash 
= "sha256:41fc406e374590a3d492325b889a2686b31e7a7780bec83db2512988550dadbf"},
+    {file = 
"pandas-1.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = 
"sha256:1d9382f72a4f0e93909feece6fef5500e838ce1c355a581b3d8f259839f2ea76"},
+    {file = "pandas-1.4.3-cp39-cp39-win32.whl", hash = 
"sha256:0daf876dba6c622154b2e6741f29e87161f844e64f84801554f879d27ba63c0d"},
+    {file = "pandas-1.4.3-cp39-cp39-win_amd64.whl", hash = 
"sha256:721a3dd2f06ef942f83a819c0f3f6a648b2830b191a72bbe9451bcd49c3bd42e"},
+    {file = "pandas-1.4.3.tar.gz", hash = 
"sha256:2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c"},
+]
 pluggy = [
     {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = 
"sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
     {file = "pluggy-1.0.0.tar.gz", hash = 
"sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
@@ -233,38 +304,7 @@ py = [
     {file = "py-1.11.0-py2.py3-none-any.whl", hash = 
"sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
     {file = "py-1.11.0.tar.gz", hash = 
"sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
 ]
-pyarrow = [
-    {file = "pyarrow-8.0.0-cp310-cp310-macosx_10_13_universal2.whl", hash = 
"sha256:d5ef4372559b191cafe7db8932801eee252bfc35e983304e7d60b6954576a071"},
-    {file = "pyarrow-8.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = 
"sha256:863be6bad6c53797129610930794a3e797cb7d41c0a30e6794a2ac0e42ce41b8"},
-    {file = "pyarrow-8.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = 
"sha256:69b043a3fce064ebd9fbae6abc30e885680296e5bd5e6f7353e6a87966cf2ad7"},
-    {file = "pyarrow-8.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = 
"sha256:51e58778fcb8829fca37fbfaea7f208d5ce7ea89ea133dd13d8ce745278ee6f0"},
-    {file = 
"pyarrow-8.0.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", 
hash = 
"sha256:15511ce2f50343f3fd5e9f7c30e4d004da9134e9597e93e9c96c3985928cbe82"},
-    {file = 
"pyarrow-8.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:ea132067ec712d1b1116a841db1c95861508862b21eddbcafefbce8e4b96b867"},
-    {file = 
"pyarrow-8.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", 
hash = 
"sha256:deb400df8f19a90b662babceb6dd12daddda6bb357c216e558b207c0770c7654"},
-    {file = "pyarrow-8.0.0-cp310-cp310-win_amd64.whl", hash = 
"sha256:3bd201af6e01f475f02be88cf1f6ee9856ab98c11d8bbb6f58347c58cd07be00"},
-    {file = "pyarrow-8.0.0-cp37-cp37m-macosx_10_13_x86_64.whl", hash = 
"sha256:78a6ac39cd793582998dac88ab5c1c1dd1e6503df6672f064f33a21937ec1d8d"},
-    {file = "pyarrow-8.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = 
"sha256:d6f1e1040413651819074ef5b500835c6c42e6c446532a1ddef8bc5054e8dba5"},
-    {file = 
"pyarrow-8.0.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash 
= "sha256:98c13b2e28a91b0fbf24b483df54a8d7814c074c2623ecef40dce1fa52f6539b"},
-    {file = 
"pyarrow-8.0.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:c9c97c8e288847e091dfbcdf8ce51160e638346f51919a9e74fe038b2e8aee62"},
-    {file = 
"pyarrow-8.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash 
= "sha256:edad25522ad509e534400d6ab98cf1872d30c31bc5e947712bfd57def7af15bb"},
-    {file = "pyarrow-8.0.0-cp37-cp37m-win_amd64.whl", hash = 
"sha256:ece333706a94c1221ced8b299042f85fd88b5db802d71be70024433ddf3aecab"},
-    {file = "pyarrow-8.0.0-cp38-cp38-macosx_10_13_x86_64.whl", hash = 
"sha256:95c7822eb37663e073da9892f3499fe28e84f3464711a3e555e0c5463fd53a19"},
-    {file = "pyarrow-8.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = 
"sha256:25a5f7c7f36df520b0b7363ba9f51c3070799d4b05d587c60c0adaba57763479"},
-    {file = "pyarrow-8.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = 
"sha256:ce64bc1da3109ef5ab9e4c60316945a7239c798098a631358e9ab39f6e5529e9"},
-    {file = 
"pyarrow-8.0.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash 
= "sha256:541e7845ce5f27a861eb5b88ee165d931943347eec17b9ff1e308663531c9647"},
-    {file = 
"pyarrow-8.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:8cd86e04a899bef43e25184f4b934584861d787cf7519851a8c031803d45c6d8"},
-    {file = 
"pyarrow-8.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash 
= "sha256:ba2b7aa7efb59156b87987a06f5241932914e4d5bbb74a465306b00a6c808849"},
-    {file = "pyarrow-8.0.0-cp38-cp38-win_amd64.whl", hash = 
"sha256:42b7982301a9ccd06e1dd4fabd2e8e5df74b93ce4c6b87b81eb9e2d86dc79871"},
-    {file = "pyarrow-8.0.0-cp39-cp39-macosx_10_13_universal2.whl", hash = 
"sha256:1dd482ccb07c96188947ad94d7536ab696afde23ad172df8e18944ec79f55055"},
-    {file = "pyarrow-8.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = 
"sha256:81b87b782a1366279411f7b235deab07c8c016e13f9af9f7c7b0ee564fedcc8f"},
-    {file = "pyarrow-8.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = 
"sha256:03a10daad957970e914920b793f6a49416699e791f4c827927fd4e4d892a5d16"},
-    {file = "pyarrow-8.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = 
"sha256:65c7f4cc2be195e3db09296d31a654bb6d8786deebcab00f0e2455fd109d7456"},
-    {file = 
"pyarrow-8.0.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash 
= "sha256:3fee786259d986f8c046100ced54d63b0c8c9f7cdb7d1bbe07dc69e0f928141c"},
-    {file = 
"pyarrow-8.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", 
hash = 
"sha256:6ea2c54e6b5ecd64e8299d2abb40770fe83a718f5ddc3825ddd5cd28e352cce1"},
-    {file = 
"pyarrow-8.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash 
= "sha256:8392b9a1e837230090fe916415ed4c3433b2ddb1a798e3f6438303c70fbabcfc"},
-    {file = "pyarrow-8.0.0-cp39-cp39-win_amd64.whl", hash = 
"sha256:cb06cacc19f3b426681f2f6803cc06ff481e7fe5b3a533b406bc5b2138843d4f"},
-    {file = "pyarrow-8.0.0.tar.gz", hash = 
"sha256:4a18a211ed888f1ac0b0ebcb99e2d9a3e913a481120ee9b1fe33d3fedb945d4e"},
-]
+pyarrow = []
 pyparsing = [
     {file = "pyparsing-3.0.9-py3-none-any.whl", hash = 
"sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"},
     {file = "pyparsing-3.0.9.tar.gz", hash = 
"sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"},
@@ -273,6 +313,18 @@ pytest = [
     {file = "pytest-7.1.2-py3-none-any.whl", hash = 
"sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"},
     {file = "pytest-7.1.2.tar.gz", hash = 
"sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"},
 ]
+python-dateutil = [
+    {file = "python-dateutil-2.8.2.tar.gz", hash = 
"sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
+    {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = 
"sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
+]
+pytz = [
+    {file = "pytz-2022.1-py2.py3-none-any.whl", hash = 
"sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"},
+    {file = "pytz-2022.1.tar.gz", hash = 
"sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"},
+]
+six = [
+    {file = "six-1.16.0-py2.py3-none-any.whl", hash = 
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+    {file = "six-1.16.0.tar.gz", hash = 
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
+]
 tomli = [
     {file = "tomli-2.0.1-py3-none-any.whl", hash = 
"sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
     {file = "tomli-2.0.1.tar.gz", hash = 
"sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
diff --git a/python/adbc_driver_manager/pyproject.toml 
b/python/adbc_driver_manager/pyproject.toml
index bfcd42c..116a925 100644
--- a/python/adbc_driver_manager/pyproject.toml
+++ b/python/adbc_driver_manager/pyproject.toml
@@ -21,16 +21,20 @@ version = "0.0.1-alpha.1"
 description = ""
 authors = ["David Li <[email protected]>"]
 license = "Apache-2.0"
+homepage = "https://arrow.apache.org";
+repository = "https://github.com/apache/arrow-adbc";
 
 [tool.poetry.build]
 script = "build.py"
 
 [tool.poetry.dependencies]
+pandas = { version = ">=1.2,<2", optional = true }
+pyarrow = ">=8.0.0"
 python = ">=3.8"
 
 [tool.poetry.dev-dependencies]
 Cython = "^0.29.32"
-pyarrow = "^8.0.0"
+pandas = ">=1.2"
 pytest = "^7.1.2"
 setuptools = "^63.4.0"
 
diff --git a/python/adbc_driver_manager/requirements-dev.txt 
b/python/adbc_driver_manager/requirements-dev.txt
index d23ca51..87c566a 100644
--- a/python/adbc_driver_manager/requirements-dev.txt
+++ b/python/adbc_driver_manager/requirements-dev.txt
@@ -50,7 +50,7 @@ cython==0.29.32; (python_version >= "2.6" and 
python_full_version < "3.0.0") or
 iniconfig==1.1.1; python_version >= "3.7" \
     
--hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
     
--hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32
-numpy==1.23.1; python_version >= "3.8" \
+numpy==1.23.1 \
     
--hash=sha256:b15c3f1ed08df4980e02cc79ee058b788a3d0bef2fb3c9ca90bb8cbd5b8a3a04 \
     
--hash=sha256:9ce242162015b7e88092dccd0e854548c0926b75c7924a3495e02c6067aba1f5 \
     
--hash=sha256:e0d7447679ae9a7124385ccf0ea990bb85bb869cef217e2ea6c844b6a6855073 \
@@ -76,49 +76,50 @@ numpy==1.23.1; python_version >= "3.8" \
 packaging==21.3; python_version >= "3.7" \
     
--hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 \
     
--hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb
+pandas==1.4.3; python_version >= "3.8" \
+    
--hash=sha256:d51674ed8e2551ef7773820ef5dab9322be0828629f2cbf8d1fc31a0c4fed640 \
+    
--hash=sha256:16ad23db55efcc93fa878f7837267973b61ea85d244fc5ff0ccbcfa5638706c5 \
+    
--hash=sha256:958a0588149190c22cdebbc0797e01972950c927a11a900fe6c2296f207b1d6f \
+    
--hash=sha256:e48fbb64165cda451c06a0f9e4c7a16b534fcabd32546d531b3c240ce2844112 \
+    
--hash=sha256:6f803320c9da732cc79210d7e8cc5c8019aad512589c910c66529eb1b1818230 \
+    
--hash=sha256:2893e923472a5e090c2d5e8db83e8f907364ec048572084c7d10ef93546be6d1 \
+    
--hash=sha256:24ea75f47bbd5574675dae21d51779a4948715416413b30614c1e8b480909f81 \
+    
--hash=sha256:d5ebc990bd34f4ac3c73a2724c2dcc9ee7bf1ce6cf08e87bb25c6ad33507e318 \
+    
--hash=sha256:d6c0106415ff1a10c326c49bc5dd9ea8b9897a6ca0c8688eb9c30ddec49535ef \
+    
--hash=sha256:78b00429161ccb0da252229bcda8010b445c4bf924e721265bec5a6e96a92e92 \
+    
--hash=sha256:6dfbf16b1ea4f4d0ee11084d9c026340514d1d30270eaa82a9f1297b6c8ecbf0 \
+    
--hash=sha256:48350592665ea3cbcd07efc8c12ff12d89be09cd47231c7925e3b8afada9d50d \
+    
--hash=sha256:605d572126eb4ab2eadf5c59d5d69f0608df2bf7bcad5c5880a47a20a0699e3e \
+    
--hash=sha256:a3924692160e3d847e18702bb048dc38e0e13411d2b503fecb1adf0fcf950ba4 \
+    
--hash=sha256:07238a58d7cbc8a004855ade7b75bbd22c0db4b0ffccc721556bab8a095515f6 \
+    
--hash=sha256:755679c49460bd0d2f837ab99f0a26948e68fa0718b7e42afbabd074d945bf84 \
+    
--hash=sha256:41fc406e374590a3d492325b889a2686b31e7a7780bec83db2512988550dadbf \
+    
--hash=sha256:1d9382f72a4f0e93909feece6fef5500e838ce1c355a581b3d8f259839f2ea76 \
+    
--hash=sha256:0daf876dba6c622154b2e6741f29e87161f844e64f84801554f879d27ba63c0d \
+    
--hash=sha256:721a3dd2f06ef942f83a819c0f3f6a648b2830b191a72bbe9451bcd49c3bd42e \
+    
--hash=sha256:2ff7788468e75917574f080cd4681b27e1a7bf36461fe968b49a87b5a54d007c
 pluggy==1.0.0; python_version >= "3.7" \
     
--hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 \
     
--hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159
 py==1.11.0; python_version >= "3.7" and python_full_version < "3.0.0" or 
python_full_version >= "3.5.0" and python_version >= "3.7" \
     
--hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 \
     
--hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719
-pyarrow==8.0.0; python_version >= "3.7" \
-    
--hash=sha256:d5ef4372559b191cafe7db8932801eee252bfc35e983304e7d60b6954576a071 \
-    
--hash=sha256:863be6bad6c53797129610930794a3e797cb7d41c0a30e6794a2ac0e42ce41b8 \
-    
--hash=sha256:69b043a3fce064ebd9fbae6abc30e885680296e5bd5e6f7353e6a87966cf2ad7 \
-    
--hash=sha256:51e58778fcb8829fca37fbfaea7f208d5ce7ea89ea133dd13d8ce745278ee6f0 \
-    
--hash=sha256:15511ce2f50343f3fd5e9f7c30e4d004da9134e9597e93e9c96c3985928cbe82 \
-    
--hash=sha256:ea132067ec712d1b1116a841db1c95861508862b21eddbcafefbce8e4b96b867 \
-    
--hash=sha256:deb400df8f19a90b662babceb6dd12daddda6bb357c216e558b207c0770c7654 \
-    
--hash=sha256:3bd201af6e01f475f02be88cf1f6ee9856ab98c11d8bbb6f58347c58cd07be00 \
-    
--hash=sha256:78a6ac39cd793582998dac88ab5c1c1dd1e6503df6672f064f33a21937ec1d8d \
-    
--hash=sha256:d6f1e1040413651819074ef5b500835c6c42e6c446532a1ddef8bc5054e8dba5 \
-    
--hash=sha256:98c13b2e28a91b0fbf24b483df54a8d7814c074c2623ecef40dce1fa52f6539b \
-    
--hash=sha256:c9c97c8e288847e091dfbcdf8ce51160e638346f51919a9e74fe038b2e8aee62 \
-    
--hash=sha256:edad25522ad509e534400d6ab98cf1872d30c31bc5e947712bfd57def7af15bb \
-    
--hash=sha256:ece333706a94c1221ced8b299042f85fd88b5db802d71be70024433ddf3aecab \
-    
--hash=sha256:95c7822eb37663e073da9892f3499fe28e84f3464711a3e555e0c5463fd53a19 \
-    
--hash=sha256:25a5f7c7f36df520b0b7363ba9f51c3070799d4b05d587c60c0adaba57763479 \
-    
--hash=sha256:ce64bc1da3109ef5ab9e4c60316945a7239c798098a631358e9ab39f6e5529e9 \
-    
--hash=sha256:541e7845ce5f27a861eb5b88ee165d931943347eec17b9ff1e308663531c9647 \
-    
--hash=sha256:8cd86e04a899bef43e25184f4b934584861d787cf7519851a8c031803d45c6d8 \
-    
--hash=sha256:ba2b7aa7efb59156b87987a06f5241932914e4d5bbb74a465306b00a6c808849 \
-    
--hash=sha256:42b7982301a9ccd06e1dd4fabd2e8e5df74b93ce4c6b87b81eb9e2d86dc79871 \
-    
--hash=sha256:1dd482ccb07c96188947ad94d7536ab696afde23ad172df8e18944ec79f55055 \
-    
--hash=sha256:81b87b782a1366279411f7b235deab07c8c016e13f9af9f7c7b0ee564fedcc8f \
-    
--hash=sha256:03a10daad957970e914920b793f6a49416699e791f4c827927fd4e4d892a5d16 \
-    
--hash=sha256:65c7f4cc2be195e3db09296d31a654bb6d8786deebcab00f0e2455fd109d7456 \
-    
--hash=sha256:3fee786259d986f8c046100ced54d63b0c8c9f7cdb7d1bbe07dc69e0f928141c \
-    
--hash=sha256:6ea2c54e6b5ecd64e8299d2abb40770fe83a718f5ddc3825ddd5cd28e352cce1 \
-    
--hash=sha256:8392b9a1e837230090fe916415ed4c3433b2ddb1a798e3f6438303c70fbabcfc \
-    
--hash=sha256:cb06cacc19f3b426681f2f6803cc06ff481e7fe5b3a533b406bc5b2138843d4f \
-    
--hash=sha256:4a18a211ed888f1ac0b0ebcb99e2d9a3e913a481120ee9b1fe33d3fedb945d4e
+pyarrow==9.0.0; python_version >= "3.7"
 pyparsing==3.0.9; python_full_version >= "3.6.8" and python_version >= "3.7" \
     
--hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc \
     
--hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb
 pytest==7.1.2; python_version >= "3.7" \
     
--hash=sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c \
     
--hash=sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45
+python-dateutil==2.8.2; python_version >= "3.8" and python_full_version < 
"3.0.0" or python_full_version >= "3.3.0" and python_version >= "3.8" \
+    
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
+    
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
+pytz==2022.1; python_version >= "3.8" \
+    
--hash=sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c \
+    
--hash=sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7
+six==1.16.0; python_version >= "3.8" and python_full_version < "3.0.0" or 
python_full_version >= "3.3.0" and python_version >= "3.8" \
+    
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 \
+    
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926
 tomli==2.0.1; python_version >= "3.7" \
     
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
     
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f

Reply via email to