This is an automated email from the ASF dual-hosted git repository.

dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new d716d292ea0 HIVE-27060: Exception in add partitions with SQL Server 
when number of parameters exceed 2100 (Venu Reddy, reviewed by Zhihua Deng)
d716d292ea0 is described below

commit d716d292ea04c449ba872a9cc01884532297d035
Author: Venu Reddy <35334869+venureddy2...@users.noreply.github.com>
AuthorDate: Fri Feb 10 14:10:15 2023 +0530

    HIVE-27060: Exception in add partitions with SQL Server when number of 
parameters exceed 2100 (Venu Reddy, reviewed by Zhihua Deng)
    
    Closes #4044
---
 .../org/apache/hadoop/hive/metastore/DirectSqlInsertPart.java  | 10 ++++++++--
 .../org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java   |  6 +-----
 2 files changed, 9 insertions(+), 7 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DirectSqlInsertPart.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DirectSqlInsertPart.java
index 182768402f6..be17470edd6 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DirectSqlInsertPart.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/DirectSqlInsertPart.java
@@ -19,7 +19,6 @@
 package org.apache.hadoop.hive.metastore;
 
 import static org.apache.commons.lang3.StringUtils.repeat;
-import static org.apache.hadoop.hive.metastore.Batchable.NO_BATCHING;
 
 import java.util.ArrayList;
 import java.util.Collections;
@@ -83,7 +82,14 @@ class DirectSqlInsertPart {
     if (rowCount == 0 || columnCount == 0) {
       return;
     }
-    int maxRowsInBatch = (batchSize == NO_BATCHING) ? rowCount : batchSize;
+    int maxRowsInBatch = batchSize > 0 ? batchSize : rowCount;
+    if (dbType.isSQLSERVER()) {
+      // SQL Server supports a maximum of 2100 parameters in a request. Adjust 
the maxRowsInBatch accordingly
+      int maxAllowedRows = (2100 - columnCount) / columnCount;
+      if (maxRowsInBatch > maxAllowedRows) {
+        maxRowsInBatch = maxAllowedRows;
+      }
+    }
     int maxBatches = rowCount / maxRowsInBatch;
     int last = rowCount % maxRowsInBatch;
     String rowFormat = "(" + repeat(",?", columnCount).substring(1) + ")";
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
index a39da29cbb8..5789b535deb 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java
@@ -195,16 +195,12 @@ class MetaStoreDirectSql {
 
     this.dbType = dbType;
     int batchSize = MetastoreConf.getIntVar(conf, 
ConfVars.DIRECT_SQL_PARTITION_BATCH_SIZE);
+    this.directSqlInsertPart = new DirectSqlInsertPart(pm, dbType, batchSize);
     if (batchSize == DETECT_BATCHING) {
       batchSize = dbType.needsInBatching() ? 1000 : NO_BATCHING;
     }
     this.batchSize = batchSize;
     this.updateStat = new DirectSqlUpdateStat(pm, conf, dbType, batchSize);
-
-    // TODO: Oracle supports to insert more than 1000 rows with a single 
insert query. Can use NO_BATCHING for oracle db
-    //  too during batch detection(DETECT_BATCHING) for insert queries as 
future improvement. Currently, used the same
-    //  limit as IN clause/operator limit(i.e., 1000) during batch detection.
-    this.directSqlInsertPart = new DirectSqlInsertPart(pm, dbType, batchSize);
     ImmutableMap.Builder<String, String> fieldNameToTableNameBuilder =
         new ImmutableMap.Builder<>();
 

Reply via email to