This is an automated email from the ASF dual-hosted git repository.

zykkk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git


The following commit(s) were added to refs/heads/master by this push:
     new 10623ad6717 [improvement](jdbc catalog) Optimize connection pool 
caching logic (#28859)
10623ad6717 is described below

commit 10623ad67178421a6299f3351fa2813e7ff646a8
Author: zy-kkk <[email protected]>
AuthorDate: Tue Dec 26 14:12:37 2023 +0800

    [improvement](jdbc catalog) Optimize connection pool caching logic (#28859)
    
    In the old caching logic, we only used jdbcurl, user, and password as cache 
keys. This may cause the old link to be still used when replacing the jar 
package, so we should concatenate all the parameters required for the 
connection pool as the key.
---
 .../src/main/java/org/apache/doris/jdbc/JdbcDataSource.java | 13 +++++++++----
 .../src/main/java/org/apache/doris/jdbc/JdbcExecutor.java   | 10 +++++-----
 2 files changed, 14 insertions(+), 9 deletions(-)

diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
index 95b3dac585a..6a6a022d29c 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
@@ -30,15 +30,20 @@ public class JdbcDataSource {
         return jdbcDataSource;
     }
 
-    public DruidDataSource getSource(String jdbcUrl) {
-        return sourcesMap.get(jdbcUrl);
+    public DruidDataSource getSource(String cacheKey) {
+        return sourcesMap.get(cacheKey);
     }
 
-    public void putSource(String jdbcUrl, DruidDataSource ds) {
-        sourcesMap.put(jdbcUrl, ds);
+    public void putSource(String cacheKey, DruidDataSource ds) {
+        sourcesMap.put(cacheKey, ds);
     }
 
     public Map<String, DruidDataSource> getSourcesMap() {
         return sourcesMap;
     }
+
+    public String createCacheKey(String jdbcUrl, String jdbcUser, String 
jdbcPassword, String jdbcDriverUrl,
+            String jdbcDriverClass) {
+        return jdbcUrl + jdbcUser + jdbcPassword + jdbcDriverUrl + 
jdbcDriverClass;
+    }
 }
diff --git 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
index 8864afa4938..9f98740aa61 100644
--- 
a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
+++ 
b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcExecutor.java
@@ -281,6 +281,8 @@ public class JdbcExecutor {
 
     private void init(String driverUrl, String sql, int batchSize, String 
driverClass, String jdbcUrl, String jdbcUser,
             String jdbcPassword, TJdbcOperation op, TOdbcTableType tableType) 
throws UdfRuntimeException {
+        String druidDataSourceKey = 
JdbcDataSource.getDataSource().createCacheKey(jdbcUrl, jdbcUser, jdbcPassword,
+                driverUrl, driverClass);
         try {
             if (isNebula()) {
                 batchSizeNum = batchSize;
@@ -290,10 +292,10 @@ public class JdbcExecutor {
             } else {
                 ClassLoader parent = getClass().getClassLoader();
                 ClassLoader classLoader = UdfUtils.getClassLoader(driverUrl, 
parent);
-                druidDataSource = 
JdbcDataSource.getDataSource().getSource(jdbcUrl + jdbcUser + jdbcPassword);
+                druidDataSource = 
JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
                 if (druidDataSource == null) {
                     synchronized (druidDataSourceLock) {
-                        druidDataSource = 
JdbcDataSource.getDataSource().getSource(jdbcUrl + jdbcUser + jdbcPassword);
+                        druidDataSource = 
JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
                         if (druidDataSource == null) {
                             long start = System.currentTimeMillis();
                             DruidDataSource ds = new DruidDataSource();
@@ -312,11 +314,9 @@ public class JdbcExecutor {
                             ds.setTimeBetweenEvictionRunsMillis(maxIdleTime / 
5);
                             ds.setMinEvictableIdleTimeMillis(maxIdleTime);
                             druidDataSource = ds;
-                            // here is a cache of datasource, which using the 
string(jdbcUrl + jdbcUser +
-                            // jdbcPassword) as key.
                             // and the default datasource init = 1, min = 1, 
max = 100, if one of connection idle
                             // time greater than 10 minutes. then connection 
will be retrieved.
-                            JdbcDataSource.getDataSource().putSource(jdbcUrl + 
jdbcUser + jdbcPassword, ds);
+                            
JdbcDataSource.getDataSource().putSource(druidDataSourceKey, ds);
                             LOG.info("init datasource [" + (jdbcUrl + 
jdbcUser) + "] cost: " + (
                                     System.currentTimeMillis() - start) + " 
ms");
                         }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to