CurtHagenlocher commented on code in PR #2275:
URL: https://github.com/apache/arrow-adbc/pull/2275#discussion_r1817013380


##########
csharp/test/Drivers/Apache/Spark/ClientTests.cs:
##########
@@ -0,0 +1,227 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Adbc.Tests.Xunit;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
+{
+    /// <summary>
+    /// Class for testing the ADBC Client using the BigQuery ADBC driver.
+    /// </summary>
+    /// <remarks>
+    /// Tests are ordered to ensure data is created for the other
+    /// queries to run.
+    /// </remarks>
+    [TestCaseOrderer("Apache.Arrow.Adbc.Tests.Xunit.TestOrderer", 
"Apache.Arrow.Adbc.Tests")]
+    public class ClientTests : TestBase<SparkTestConfiguration, 
SparkTestEnvironment>
+    {
+        public ClientTests(ITestOutputHelper? outputHelper) : 
base(outputHelper, new SparkTestEnvironment.Factory())
+        {
+            Skip.IfNot(Utils.CanExecuteTestConfig(TestConfigVariable));
+        }
+
+        /// <summary>
+        /// Validates if the client execute updates.
+        /// </summary>
+        [SkippableFact, Order(1)]
+        public void CanClientExecuteUpdate()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                adbcConnection.Open();
+
+                string[] queries = GetQueries();
+                int affectedRows = ValidateAffectedRows ? 1 : -1;
+
+                List<int> expectedResults = TestEnvironment.ServerType != 
SparkServerType.Databricks
+                    ? [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        //1,  // UPDATE
+                        //1,  // DELETE
+                    ]
+                    : [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // UPDATE
+                        affectedRows,  // DELETE
+                    ];
+
+

Review Comment:
   nit: remove extra blank line



##########
csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs:
##########
@@ -68,7 +68,7 @@ public HiveServer2Reader(
             HiveServer2Statement statement,
             Schema schema,
             DataTypeConversion dataTypeConversion,
-            CancellationToken cancellationToken = default)
+            CancellationToken _ = default)

Review Comment:
   Should the `CancellationToken` parameter just be removed? This isn't a 
public API.



##########
csharp/test/Drivers/Apache/Impala/ImpalaTestEnvironment.cs:
##########
@@ -73,5 +73,6 @@ public override Dictionary<string, string> 
GetDriverParameters(ApacheTestConfigu
 
         public override string GetInsertStatement(string tableName, string 
columnName, string? value) =>
             string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, 
columnName, value ?? "NULL");
+        public override SampleDataBuilder GetSampleDataBuilder() => throw new 
NotImplementedException();

Review Comment:
   nit: insert blank line for consistency?



##########
csharp/src/Drivers/Apache/Hive2/HiveServer2Reader.cs:
##########
@@ -88,22 +88,20 @@ public HiveServer2Reader(
             // Await the fetch response
             TFetchResultsResp response = await FetchNext(_statement, 
cancellationToken);
 
-            // Build the current batch
-            RecordBatch result = CreateBatch(response, out int fetchedRows);
-
-            if ((_statement.BatchSize > 0 && fetchedRows < 
_statement.BatchSize) || fetchedRows == 0)
+            int columnCount = GetColumnCount(response);
+            int rowCount = GetRowCount(response, columnCount); ;
+            if ((_statement.BatchSize > 0 && rowCount < _statement.BatchSize) 
|| rowCount == 0)

Review Comment:
   We can't reliably get "last batch" information from 
`TFetchResultsResp.HasMoreRows`?



##########
csharp/test/Drivers/Apache/Spark/SparkTestEnvironment.cs:
##########
@@ -132,5 +135,151 @@ public override Dictionary<string, string> 
GetDriverParameters(SparkTestConfigur
 
         public override string GetInsertStatement(string tableName, string 
columnName, string? value) =>
             string.Format("INSERT INTO {0} ({1}) SELECT {2};", tableName, 
columnName, value ?? "NULL");
+
+        public override SampleDataBuilder GetSampleDataBuilder()
+        {
+            SampleDataBuilder sampleDataBuilder = new();
+
+            // standard values
+            sampleDataBuilder.Samples.Add(
+                new SampleData()
+                {
+                    Query = "SELECT " +
+                            "CAST(1 as BIGINT) as id, " +
+                            "CAST(2 as INTEGER) as int, " +
+                            "CAST(1.23 as FLOAT) as number_float, " +
+                            "CAST(4.56 as DOUBLE) as number_double, " +
+                            "4.56BD as decimal, " +
+                            "9.9999999999999999999999999999999999999BD as 
big_decimal, " +
+                            "CAST(True as BOOLEAN) as is_active, " +
+                            "'John Doe' as name, " +
+                            "X'616263313233' as data, " +
+                            "DATE '2023-09-08' as date, " +
+                            "TIMESTAMP '2023-09-08 12:34:56+00:00' as 
timestamp, " +
+                            "INTERVAL 178956969 YEAR 11 MONTH as interval, " +
+                            "ARRAY(1, 2, 3) as numbers, " +
+                            "STRUCT('John Doe' as name, 30 as age) as person," 
+
+                            "MAP('name', CAST('Jane Doe' AS STRING), 'age', 
CAST(29 AS INT)) as map",

Review Comment:
   Is this conversion happening in ADBC code or does it come that way from the 
server?



##########
csharp/test/Drivers/Apache/Spark/ClientTests.cs:
##########
@@ -0,0 +1,227 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Adbc.Tests.Xunit;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
+{
+    /// <summary>
+    /// Class for testing the ADBC Client using the BigQuery ADBC driver.

Review Comment:
   `Spark`



##########
csharp/test/Drivers/Apache/Spark/ClientTests.cs:
##########
@@ -0,0 +1,227 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one or more
+* contributor license agreements.  See the NOTICE file distributed with
+* this work for additional information regarding copyright ownership.
+* The ASF licenses this file to You under the Apache License, Version 2.0
+* (the "License"); you may not use this file except in compliance with
+* the License.  You may obtain a copy of the License at
+*
+*    http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+using System;
+using System.Collections.Generic;
+using Apache.Arrow.Adbc.Drivers.Apache.Spark;
+using Apache.Arrow.Adbc.Tests.Xunit;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace Apache.Arrow.Adbc.Tests.Drivers.Apache.Spark
+{
+    /// <summary>
+    /// Class for testing the ADBC Client using the BigQuery ADBC driver.
+    /// </summary>
+    /// <remarks>
+    /// Tests are ordered to ensure data is created for the other
+    /// queries to run.
+    /// </remarks>
+    [TestCaseOrderer("Apache.Arrow.Adbc.Tests.Xunit.TestOrderer", 
"Apache.Arrow.Adbc.Tests")]
+    public class ClientTests : TestBase<SparkTestConfiguration, 
SparkTestEnvironment>
+    {
+        public ClientTests(ITestOutputHelper? outputHelper) : 
base(outputHelper, new SparkTestEnvironment.Factory())
+        {
+            Skip.IfNot(Utils.CanExecuteTestConfig(TestConfigVariable));
+        }
+
+        /// <summary>
+        /// Validates if the client execute updates.
+        /// </summary>
+        [SkippableFact, Order(1)]
+        public void CanClientExecuteUpdate()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                adbcConnection.Open();
+
+                string[] queries = GetQueries();
+                int affectedRows = ValidateAffectedRows ? 1 : -1;
+
+                List<int> expectedResults = TestEnvironment.ServerType != 
SparkServerType.Databricks
+                    ? [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        //1,  // UPDATE
+                        //1,  // DELETE
+                    ]
+                    : [
+                        -1, // DROP   TABLE
+                        -1, // CREATE TABLE
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // INSERT
+                        affectedRows,  // UPDATE
+                        affectedRows,  // DELETE
+                    ];
+
+
+                Tests.ClientTests.CanClientExecuteUpdate(adbcConnection, 
TestConfiguration, queries, expectedResults);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can get the schema.
+        /// </summary>
+        [SkippableFact, Order(2)]
+        public void CanClientGetSchema()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientGetSchema(adbcConnection, 
TestConfiguration, $"SELECT * FROM {TestConfiguration.Metadata.Table}");
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can connect to a live server and
+        /// parse the results.
+        /// </summary>
+        [SkippableFact, Order(3)]
+        public void CanClientExecuteQuery()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientExecuteQuery(adbcConnection, 
TestConfiguration);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client can connect to a live server and
+        /// parse the results.
+        /// </summary>
+        [SkippableFact, Order(5)]
+        public void CanClientExecuteEmptyQuery()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection())
+            {
+                Tests.ClientTests.CanClientExecuteQuery(
+                    adbcConnection,
+                    TestConfiguration,
+                    customQuery: $"SELECT * FROM 
{TestConfiguration.Metadata.Table} WHERE FALSE",
+                    expectedResultsCount: 0);
+            }
+        }
+
+        /// <summary>
+        /// Validates if the client is retrieving and converting values
+        /// to the expected types.
+        /// </summary>
+        [SkippableFact, Order(4)]
+        public void VerifyTypesAndValues()
+        {
+            using (Adbc.Client.AdbcConnection dbConnection = 
GetAdbcConnection())
+            {
+                SampleDataBuilder sampleDataBuilder = GetSampleDataBuilder();
+
+                Tests.ClientTests.VerifyTypesAndValues(dbConnection, 
sampleDataBuilder);
+            }
+        }
+
+        [SkippableFact]
+        public void VerifySchemaTablesWithNoConstraints()
+        {
+            using (Adbc.Client.AdbcConnection adbcConnection = 
GetAdbcConnection(includeTableConstraints: false))
+            {
+                adbcConnection.Open();
+
+                string schema = "Tables";
+
+                var tables = adbcConnection.GetSchema(schema);
+
+                Assert.True(tables.Rows.Count > 0, $"No tables were found in 
the schema '{schema}'");
+            }
+        }
+
+

Review Comment:
   nit: remove extra blank line



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to