This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new 679b3fbe66 [ZEPPELIN-5493] Remove prefix in jdbc interpreter (#4463)
679b3fbe66 is described below

commit 679b3fbe666f0b7533f2bf8bea0d7dcfbfc2ac49
Author: Guanhua Li <guanhua...@foxmail.com>
AuthorDate: Fri Oct 28 16:05:11 2022 +0800

    [ZEPPELIN-5493] Remove prefix in jdbc interpreter (#4463)
    
    * Remove prefix in jdbc interpreter
    
    * update docs
---
 docs/interpreter/hive.md                           |  45 +-------
 docs/interpreter/jdbc.md                           |   8 +-
 .../org/apache/zeppelin/jdbc/JDBCInterpreter.java  | 127 ++++++++++-----------
 .../zeppelin/jdbc/JDBCUserConfigurations.java      |  63 +++++-----
 .../apache/zeppelin/jdbc/JDBCInterpreterTest.java  | 120 ++++---------------
 5 files changed, 122 insertions(+), 241 deletions(-)

diff --git a/docs/interpreter/hive.md b/docs/interpreter/hive.md
index 467e494857..94e49ce6a4 100644
--- a/docs/interpreter/hive.md
+++ b/docs/interpreter/hive.md
@@ -25,7 +25,7 @@ limitations under the License.
 
 ## Important Notice
 
-Hive Interpreter will be deprecated and merged into JDBC Interpreter. 
+Hive Interpreter has been deprecated and merged into JDBC Interpreter. 
 You can use Hive Interpreter by using JDBC Interpreter with same 
functionality. 
 See the example below of settings and dependencies.
 
@@ -36,19 +36,19 @@ See the example below of settings and dependencies.
     <th>Value</th>
   </tr>
   <tr>
-    <td>hive.driver</td>
+    <td>default.driver</td>
     <td>org.apache.hive.jdbc.HiveDriver</td>
   </tr>
   <tr>
-    <td>hive.url</td>
+    <td>default.url</td>
     <td>jdbc:hive2://localhost:10000</td>
   </tr>
   <tr>
-    <td>hive.user</td>
+    <td>default.user</td>
     <td>hiveUser</td>
   </tr>
   <tr>
-    <td>hive.password</td>
+    <td>default.password</td>
     <td>hivePassword</td>
   </tr>
 </table>
@@ -102,31 +102,6 @@ See the example below of settings and dependencies.
     <td></td>
     <td><b>( Optional ) </b>Other properties used by the driver</td>
   </tr>
-  <tr>
-    <td>${prefix}.driver</td>
-    <td></td>
-    <td>Driver class path of <code>%hive(${prefix})</code> </td>
-  </tr>
-  <tr>
-    <td>${prefix}.url</td>
-    <td></td>
-    <td>Url of <code>%hive(${prefix})</code> </td>
-  </tr>
-  <tr>
-    <td>${prefix}.user</td>
-    <td></td>
-    <td><b>( Optional ) </b>Username of the connection of 
<code>%hive(${prefix})</code> </td>
-  </tr>
-  <tr>
-    <td>${prefix}.password</td>
-    <td></td>
-    <td><b>( Optional ) </b>Password of the connection of 
<code>%hive(${prefix})</code> </td>
-  </tr>
-  <tr>
-    <td>${prefix}.xxx</td>
-    <td></td>
-    <td><b>( Optional ) </b>Other properties used by the driver of 
<code>%hive(${prefix})</code> </td>
-  </tr>
   <tr>
     <td>zeppelin.jdbc.hive.timeout.threshold</td>
     <td>60000</td>
@@ -144,8 +119,6 @@ See the example below of settings and dependencies.
   </tr>
 </table>
 
-This interpreter provides multiple configuration with `${prefix}`. User can 
set a multiple connection properties by this prefix. It can be used like 
`%hive(${prefix})`.
-
 ## Overview
 
 The [Apache Hive](https://hive.apache.org/) ™ data warehouse software 
facilitates querying and managing large datasets 
@@ -162,14 +135,6 @@ Basically, you can use
 select * from my_table;
 ```
 
-or
-
-```sql
-%hive(etl)
--- 'etl' is a ${prefix}
-select * from my_table;
-```
-
 You can also run multiple queries up to 10 by default. Changing these settings 
is not implemented yet.
 
 ### Apply Zeppelin Dynamic Forms
diff --git a/docs/interpreter/jdbc.md b/docs/interpreter/jdbc.md
index a94f7bde4d..62d62039eb 100644
--- a/docs/interpreter/jdbc.md
+++ b/docs/interpreter/jdbc.md
@@ -722,16 +722,16 @@ See [User Impersonation in 
interpreter](../usage/interpreter/user_impersonation.
     <th>Value</th>
   </tr>
   <tr>
-    <td>hive.driver</td>
+    <td>default.driver</td>
     <td>org.apache.hive.jdbc.HiveDriver</td>
   </tr>
   <tr>
-    <td>hive.url</td>
+    <td>default.url</td>
     
<td>jdbc:hive2://hive-server-host:2181/;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2</td>
   </tr>
   <tr>
-    <td>hive.proxy.user.property</td>
-    <td>hive.server2.proxy.user</td>
+    <td>default.proxy.user.property</td>
+    <td>default.server2.proxy.user</td>
   </tr>
   <tr>
     <td>zeppelin.jdbc.auth.type</td>
diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java 
b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
index 705ad19192..702beb0795 100644
--- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
+++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
@@ -326,7 +326,7 @@ public class JDBCInterpreter extends KerberosInterpreter {
   private void initConnectionPoolMap() {
     for (String key : jdbcUserConfigurationsMap.keySet()) {
       try {
-        closeDBPool(key, DEFAULT_KEY);
+        closeDBPool(key);
       } catch (SQLException e) {
         LOGGER.error("Error while closing database pool.", e);
       }
@@ -356,10 +356,9 @@ public class JDBCInterpreter extends KerberosInterpreter {
    */
   private String getUser(InterpreterContext context) {
     String user = context.getAuthenticationInfo().getUser();
-    String dbPrefix = getDBPrefix(context);
 
-    if ("anonymous".equalsIgnoreCase(user) && 
basePropertiesMap.containsKey(dbPrefix)) {
-      String userInProperty = 
basePropertiesMap.get(dbPrefix).getProperty(USER_KEY);
+    if ("anonymous".equalsIgnoreCase(user) && 
basePropertiesMap.containsKey(DEFAULT_KEY)) {
+      String userInProperty = 
basePropertiesMap.get(DEFAULT_KEY).getProperty(USER_KEY);
       if (StringUtils.isNotBlank(userInProperty)) {
         user = userInProperty;
       }
@@ -375,10 +374,10 @@ public class JDBCInterpreter extends KerberosInterpreter {
     }
   }
 
-  private String getJDBCDriverName(String user, String dbPrefix) {
+  private String getJDBCDriverName(String user) {
     StringBuffer driverName = new StringBuffer();
     driverName.append(DBCP_STRING);
-    driverName.append(dbPrefix);
+    driverName.append(DEFAULT_KEY);
     driverName.append(user);
     return driverName.toString();
   }
@@ -409,37 +408,37 @@ public class JDBCInterpreter extends KerberosInterpreter {
     return jdbcUserConfigurations;
   }
 
-  private void closeDBPool(String user, String propertyKey) throws 
SQLException {
-    PoolingDriver poolingDriver = 
getJDBCConfiguration(user).removeDBDriverPool(propertyKey);
+  private void closeDBPool(String user) throws SQLException {
+    PoolingDriver poolingDriver = 
getJDBCConfiguration(user).removeDBDriverPool();
     if (poolingDriver != null) {
-      poolingDriver.closePool(propertyKey + user);
+      poolingDriver.closePool(DEFAULT_KEY + user);
     }
   }
 
-  private void setUserProperty(String dbPrefix, InterpreterContext context)
+  private void setUserProperty(InterpreterContext context)
       throws SQLException, IOException, InterpreterException {
 
     String user = getUser(context);
     JDBCUserConfigurations jdbcUserConfigurations = getJDBCConfiguration(user);
-    if (basePropertiesMap.get(dbPrefix).containsKey(USER_KEY) &&
-        !basePropertiesMap.get(dbPrefix).getProperty(USER_KEY).isEmpty()) {
-      String password = getPassword(basePropertiesMap.get(dbPrefix));
+    if (basePropertiesMap.get(DEFAULT_KEY).containsKey(USER_KEY) &&
+        !basePropertiesMap.get(DEFAULT_KEY).getProperty(USER_KEY).isEmpty()) {
+      String password = getPassword(basePropertiesMap.get(DEFAULT_KEY));
       if (!isEmpty(password)) {
-        basePropertiesMap.get(dbPrefix).setProperty(PASSWORD_KEY, password);
+        basePropertiesMap.get(DEFAULT_KEY).setProperty(PASSWORD_KEY, password);
       }
     }
-    jdbcUserConfigurations.setPropertyMap(dbPrefix, 
basePropertiesMap.get(dbPrefix));
-    if (existAccountInBaseProperty(dbPrefix)) {
+    jdbcUserConfigurations.setProperty(basePropertiesMap.get(DEFAULT_KEY));
+    if (existAccountInBaseProperty(DEFAULT_KEY)) {
       return;
     }
 
     UsernamePassword usernamePassword = getUsernamePassword(context,
-            getEntityName(context.getReplName(), dbPrefix));
+            getEntityName(context.getReplName(), DEFAULT_KEY));
     if (usernamePassword != null) {
-      jdbcUserConfigurations.cleanUserProperty(dbPrefix);
-      jdbcUserConfigurations.setUserProperty(dbPrefix, usernamePassword);
+      jdbcUserConfigurations.cleanUserProperty();
+      jdbcUserConfigurations.setUserProperty(usernamePassword);
     } else {
-      closeDBPool(user, dbPrefix);
+      closeDBPool(user);
     }
   }
 
@@ -467,11 +466,11 @@ public class JDBCInterpreter extends KerberosInterpreter {
     connectionPool.setMaxWaitMillis(maxWaitMillis);
   }
 
-  private void createConnectionPool(String url, String user, String dbPrefix,
+  private void createConnectionPool(String url, String user,
       Properties properties) throws SQLException, ClassNotFoundException {
 
-    LOGGER.info("Creating connection pool for url: {}, user: {}, dbPrefix: {}, 
properties: {}",
-            url, user, dbPrefix, properties);
+    LOGGER.info("Creating connection pool for url: {}, user: {}, properties: 
{}",
+            url, user, properties);
 
     /* Remove properties that is not valid properties for presto/trino by 
checking driver key.
      * - Presto: com.facebook.presto.jdbc.PrestoDriver
@@ -504,43 +503,43 @@ public class JDBCInterpreter extends KerberosInterpreter {
     poolableConnectionFactory.setPool(connectionPool);
     Class.forName(driverClass);
     PoolingDriver driver = new PoolingDriver();
-    driver.registerPool(dbPrefix + user, connectionPool);
-    getJDBCConfiguration(user).saveDBDriverPool(dbPrefix, driver);
+    driver.registerPool(DEFAULT_KEY + user, connectionPool);
+    getJDBCConfiguration(user).saveDBDriverPool(driver);
   }
 
-  private Connection getConnectionFromPool(String url, String user, String 
dbPrefix,
+  private Connection getConnectionFromPool(String url, String user,
       Properties properties) throws SQLException, ClassNotFoundException {
-    String jdbcDriver = getJDBCDriverName(user, dbPrefix);
+    String jdbcDriver = getJDBCDriverName(user);
 
-    if (!getJDBCConfiguration(user).isConnectionInDBDriverPool(dbPrefix)) {
-      createConnectionPool(url, user, dbPrefix, properties);
+    if (!getJDBCConfiguration(user).isConnectionInDBDriverPool()) {
+      createConnectionPool(url, user, properties);
     }
     return DriverManager.getConnection(jdbcDriver);
   }
 
-  public Connection getConnection(String dbPrefix, InterpreterContext context)
+  public Connection getConnection(InterpreterContext context)
       throws ClassNotFoundException, SQLException, InterpreterException, 
IOException {
 
-    if (dbPrefix == null || basePropertiesMap.get(dbPrefix) == null) {
-      LOGGER.warn("No such dbPrefix: {}", dbPrefix);
+    if (basePropertiesMap.get(DEFAULT_KEY) == null) {
+      LOGGER.warn("No default config");
       return null;
     }
 
     Connection connection = null;
     String user = getUser(context);
     JDBCUserConfigurations jdbcUserConfigurations = getJDBCConfiguration(user);
-    setUserProperty(dbPrefix, context);
+    setUserProperty(context);
 
-    final Properties properties = 
jdbcUserConfigurations.getPropertyMap(dbPrefix);
+    final Properties properties = jdbcUserConfigurations.getProperty();
     String url = properties.getProperty(URL_KEY);
-    url = appendProxyUserToURL(url, user, dbPrefix);
+    url = appendProxyUserToURL(url, user);
     String connectionUrl = appendTagsToURL(url, context);
 
     String authType = getProperty("zeppelin.jdbc.auth.type", "SIMPLE")
             .trim().toUpperCase();
     switch (authType) {
       case "SIMPLE":
-        connection = getConnectionFromPool(connectionUrl, user, dbPrefix, 
properties);
+        connection = getConnectionFromPool(connectionUrl, user, properties);
         break;
       case "KERBEROS":
         LOGGER.debug("Calling createSecureConfiguration(); this will do " +
@@ -550,9 +549,9 @@ public class JDBCInterpreter extends KerberosInterpreter {
         LOGGER.debug("createSecureConfiguration() returned");
         boolean isProxyEnabled = Boolean.parseBoolean(
                 getProperty("zeppelin.jdbc.auth.kerberos.proxy.enable", 
"true"));
-        if (basePropertiesMap.get(dbPrefix).containsKey("proxy.user.property")
+        if 
(basePropertiesMap.get(DEFAULT_KEY).containsKey("proxy.user.property")
                 || !isProxyEnabled) {
-          connection = getConnectionFromPool(connectionUrl, user, dbPrefix, 
properties);
+          connection = getConnectionFromPool(connectionUrl, user, properties);
         } else {
           UserGroupInformation ugi = null;
           try {
@@ -563,11 +562,10 @@ public class JDBCInterpreter extends KerberosInterpreter {
             throw new InterpreterException("Error in getCurrentUser", e);
           }
 
-          final String poolKey = dbPrefix;
           final String finalUser = user;
           try {
             connection = ugi.doAs((PrivilegedExceptionAction<Connection>) () ->
-                    getConnectionFromPool(connectionUrl, finalUser, poolKey, 
properties));
+                    getConnectionFromPool(connectionUrl, finalUser, 
properties));
           } catch (Exception e) {
             LOGGER.error("Error in doAs", e);
             throw new InterpreterException("Error in doAs", e);
@@ -579,11 +577,11 @@ public class JDBCInterpreter extends KerberosInterpreter {
     return connection;
   }
 
-  private String appendProxyUserToURL(String url, String user, String 
propertyKey) {
+  private String appendProxyUserToURL(String url, String user) {
     StringBuilder connectionUrl = new StringBuilder(url);
 
     if (user != null && !user.equals("anonymous") &&
-        basePropertiesMap.get(propertyKey).containsKey("proxy.user.property")) 
{
+        basePropertiesMap.get(DEFAULT_KEY).containsKey("proxy.user.property")) 
{
 
       Integer lastIndexOfUrl = connectionUrl.indexOf("?");
       if (lastIndexOfUrl == -1) {
@@ -591,9 +589,9 @@ public class JDBCInterpreter extends KerberosInterpreter {
       }
       LOGGER.info("Using proxy user as: {}", user);
       LOGGER.info("Using proxy property for user as: {}",
-          
basePropertiesMap.get(propertyKey).getProperty("proxy.user.property"));
+          
basePropertiesMap.get(DEFAULT_KEY).getProperty("proxy.user.property"));
       connectionUrl.insert(lastIndexOfUrl, ";" +
-          
basePropertiesMap.get(propertyKey).getProperty("proxy.user.property") + "=" + 
user + ";");
+          
basePropertiesMap.get(DEFAULT_KEY).getProperty("proxy.user.property") + "=" + 
user + ";");
     } else if (user != null && !user.equals("anonymous") && 
url.contains("hive")) {
       LOGGER.warn("User impersonation for hive has changed please refer: 
http://zeppelin.apache"; +
           ".org/docs/latest/interpreter/jdbc.html#apache-hive");
@@ -719,7 +717,7 @@ public class JDBCInterpreter extends KerberosInterpreter {
     for (String propertyKey : basePropertiesMap.keySet()) {
       String precode = getProperty(String.format("%s.precode", propertyKey));
       if (StringUtils.isNotBlank(precode)) {
-        interpreterResult = executeSql(propertyKey, precode, 
interpreterContext);
+        interpreterResult = executeSql(precode, interpreterContext);
         if (interpreterResult.code() != Code.SUCCESS) {
           break;
         }
@@ -735,15 +733,14 @@ public class JDBCInterpreter extends KerberosInterpreter {
   }
 
   /**
-   * Execute the sql statement under this dbPrefix.
+   * Execute the sql statement.
    *
-   * @param dbPrefix
    * @param sql
    * @param context
    * @return
    * @throws InterpreterException
    */
-  private InterpreterResult executeSql(String dbPrefix, String sql,
+  private InterpreterResult executeSql(String sql,
       InterpreterContext context) throws InterpreterException {
     Connection connection = null;
     Statement statement;
@@ -752,13 +749,13 @@ public class JDBCInterpreter extends KerberosInterpreter {
     String user = getUser(context);
 
     try {
-      connection = getConnection(dbPrefix, context);
+      connection = getConnection(context);
     } catch (Exception e) {
       LOGGER.error("Fail to getConnection", e);
       try {
-        closeDBPool(user, dbPrefix);
+        closeDBPool(user);
       } catch (SQLException e1) {
-        LOGGER.error("Cannot close DBPool for user, dbPrefix: " + user + 
dbPrefix, e1);
+        LOGGER.error("Cannot close DBPool for user: " + user , e1);
       }
       if (e instanceof SQLException) {
         return new InterpreterResult(Code.ERROR, e.getMessage());
@@ -767,7 +764,7 @@ public class JDBCInterpreter extends KerberosInterpreter {
       }
     }
     if (connection == null) {
-      return new InterpreterResult(Code.ERROR, "Prefix not found.");
+      return new InterpreterResult(Code.ERROR, "User's connectin not found.");
     }
 
     try {
@@ -797,23 +794,23 @@ public class JDBCInterpreter extends KerberosInterpreter {
           getJDBCConfiguration(user).saveStatement(paragraphId, statement);
 
           String statementPrecode =
-              getProperty(String.format(STATEMENT_PRECODE_KEY_TEMPLATE, 
dbPrefix));
+              getProperty(String.format(STATEMENT_PRECODE_KEY_TEMPLATE, 
DEFAULT_KEY));
 
           if (StringUtils.isNotBlank(statementPrecode)) {
             statement.execute(statementPrecode);
           }
 
           // start hive monitor thread if it is hive jdbc
-          String jdbcURL = 
getJDBCConfiguration(user).getPropertyMap(dbPrefix).getProperty(URL_KEY);
+          String jdbcURL = 
getJDBCConfiguration(user).getProperty().getProperty(URL_KEY);
           String driver =
-                  
getJDBCConfiguration(user).getPropertyMap(dbPrefix).getProperty(DRIVER_KEY);
+                  
getJDBCConfiguration(user).getProperty().getProperty(DRIVER_KEY);
           if (jdbcURL != null && jdbcURL.startsWith("jdbc:hive2://")
                   && driver != null && 
driver.equals("org.apache.hive.jdbc.HiveDriver")) {
             HiveUtils.startHiveMonitorThread(statement, context,
                     Boolean.parseBoolean(getProperty("hive.log.display", 
"true")), this);
           }
           boolean isResultSetAvailable = statement.execute(sqlToExecute);
-          
getJDBCConfiguration(user).setConnectionInDBDriverPoolSuccessful(dbPrefix);
+          getJDBCConfiguration(user).setConnectionInDBDriverPoolSuccessful();
           if (isResultSetAvailable) {
             resultSet = statement.getResultSet();
 
@@ -939,11 +936,14 @@ public class JDBCInterpreter extends KerberosInterpreter {
   @Override
   public InterpreterResult internalInterpret(String cmd, InterpreterContext 
context)
           throws InterpreterException {
+    String dbprefix = getDBPrefix(context);
+    if (!StringUtils.equals(dbprefix, DEFAULT_KEY)) {
+      LOGGER.warn("DBprefix like %jdbc(db=mysql) or %jdbc(mysql) is not 
supported anymore!");
+      LOGGER.warn("JDBC Interpreter would try to use default config.");
+    }
     LOGGER.debug("Run SQL command '{}'", cmd);
-    String dbPrefix = getDBPrefix(context);
-    LOGGER.debug("DBPrefix: {}, SQL command: '{}'", dbPrefix, cmd);
     if (!isRefreshMode(context)) {
-      return executeSql(dbPrefix, cmd, context);
+      return executeSql(cmd, context);
     } else {
       int refreshInterval = 
Integer.parseInt(context.getLocalProperties().get("refreshInterval"));
       paragraphCancelMap.put(context.getParagraphId(), false);
@@ -954,7 +954,7 @@ public class JDBCInterpreter extends KerberosInterpreter {
       refreshExecutor.scheduleAtFixedRate(() -> {
         context.out.clear(false);
         try {
-          InterpreterResult result = executeSql(dbPrefix, cmd, context);
+          InterpreterResult result = executeSql(cmd, context);
           context.out.flush();
           interpreterResultRef.set(result);
           if (result.code() != Code.SUCCESS) {
@@ -1064,21 +1064,20 @@ public class JDBCInterpreter extends 
KerberosInterpreter {
   public List<InterpreterCompletion> completion(String buf, int cursor,
       InterpreterContext context) throws InterpreterException {
     List<InterpreterCompletion> candidates = new ArrayList<>();
-    String propertyKey = getDBPrefix(context);
     String sqlCompleterKey =
-        String.format("%s.%s", getUser(context), propertyKey);
+        String.format("%s.%s", getUser(context), DEFAULT_KEY);
     SqlCompleter sqlCompleter = sqlCompletersMap.get(sqlCompleterKey);
 
     Connection connection = null;
     try {
       if (context != null) {
-        connection = getConnection(propertyKey, context);
+        connection = getConnection(context);
       }
     } catch (ClassNotFoundException | SQLException | IOException e) {
       LOGGER.warn("SQLCompleter will created without use connection");
     }
 
-    sqlCompleter = createOrUpdateSqlCompleter(sqlCompleter, connection, 
propertyKey, buf, cursor);
+    sqlCompleter = createOrUpdateSqlCompleter(sqlCompleter, connection, 
DEFAULT_KEY, buf, cursor);
     sqlCompletersMap.put(sqlCompleterKey, sqlCompleter);
     sqlCompleter.complete(buf, cursor, candidates);
 
diff --git 
a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCUserConfigurations.java 
b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCUserConfigurations.java
index 223b85b43d..311fb0bad0 100644
--- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCUserConfigurations.java
+++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCUserConfigurations.java
@@ -15,6 +15,7 @@
 package org.apache.zeppelin.jdbc;
 
 import org.apache.commons.dbcp2.PoolingDriver;
+import org.apache.zeppelin.user.UsernamePassword;
 
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -22,25 +23,17 @@ import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
 
-import org.apache.zeppelin.user.UsernamePassword;
-
 /**
  * UserConfigurations for JDBC impersonation.
  */
 public class JDBCUserConfigurations {
   private final Map<String, Statement> paragraphIdStatementMap;
-  // dbPrefix --> PoolingDriver
-  private final Map<String, PoolingDriver> poolingDriverMap;
-  // dbPrefix --> Properties
-  private final HashMap<String, Properties> propertiesMap;
-  // dbPrefix --> Boolean
-  private HashMap<String, Boolean> isSuccessful;
+  private PoolingDriver poolingDriver;
+  private Properties properties;
+  private Boolean isSuccessful;
 
   public JDBCUserConfigurations() {
     paragraphIdStatementMap = new HashMap<>();
-    poolingDriverMap = new HashMap<>();
-    propertiesMap = new HashMap<>();
-    isSuccessful = new HashMap<>();
   }
 
   public void initStatementMap() throws SQLException {
@@ -51,27 +44,26 @@ public class JDBCUserConfigurations {
   }
 
   public void initConnectionPoolMap() throws SQLException {
-    poolingDriverMap.clear();
-    isSuccessful.clear();
+    this.poolingDriver = null;
+    this.isSuccessful = null;
   }
 
-  public void setPropertyMap(String dbPrefix, Properties properties) {
-    Properties p = (Properties) properties.clone();
-    propertiesMap.put(dbPrefix, p);
+  public void setProperty(Properties properties) {
+    this.properties = (Properties) properties.clone();
   }
 
-  public Properties getPropertyMap(String key) {
-    return propertiesMap.get(key);
+  public Properties getProperty() {
+    return this.properties;
   }
 
-  public void cleanUserProperty(String dfPrefix) {
-    propertiesMap.get(dfPrefix).remove("user");
-    propertiesMap.get(dfPrefix).remove("password");
+  public void cleanUserProperty() {
+    this.properties.remove("user");
+    this.properties.remove("password");
   }
 
-  public void setUserProperty(String dbPrefix, UsernamePassword 
usernamePassword) {
-    propertiesMap.get(dbPrefix).setProperty("user", 
usernamePassword.getUsername());
-    propertiesMap.get(dbPrefix).setProperty("password", 
usernamePassword.getPassword());
+  public void setUserProperty(UsernamePassword usernamePassword) {
+    this.properties.setProperty("user", usernamePassword.getUsername());
+    this.properties.setProperty("password", usernamePassword.getPassword());
   }
 
   public void saveStatement(String paragraphId, Statement statement) throws 
SQLException {
@@ -86,20 +78,23 @@ public class JDBCUserConfigurations {
     paragraphIdStatementMap.remove(paragraphId);
   }
 
-  public void saveDBDriverPool(String dbPrefix, PoolingDriver driver) throws 
SQLException {
-    poolingDriverMap.put(dbPrefix, driver);
-    isSuccessful.put(dbPrefix, false);
+  public void saveDBDriverPool(PoolingDriver driver) throws SQLException {
+    this.poolingDriver = driver;
+    this.isSuccessful = false;
   }
-  public PoolingDriver removeDBDriverPool(String key) throws SQLException {
-    isSuccessful.remove(key);
-    return poolingDriverMap.remove(key);
+
+  public PoolingDriver removeDBDriverPool() throws SQLException {
+    this.isSuccessful = null;
+    PoolingDriver tmp = poolingDriver;
+    this.poolingDriver = null;
+    return tmp;
   }
 
-  public boolean isConnectionInDBDriverPool(String key) {
-    return poolingDriverMap.containsKey(key);
+  public boolean isConnectionInDBDriverPool() {
+    return this.poolingDriver != null;
   }
 
-  public void setConnectionInDBDriverPoolSuccessful(String dbPrefix) {
-    isSuccessful.put(dbPrefix, true);
+  public void setConnectionInDBDriverPoolSuccessful() {
+    this.isSuccessful = true;
   }
 }
diff --git 
a/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java 
b/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
index 114ef7a99b..3d3611c7ad 100644
--- a/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
+++ b/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
@@ -58,7 +58,6 @@ import static 
org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_STATEMENT_PRECODE
 import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_URL;
 import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_USER;
 import static org.apache.zeppelin.jdbc.JDBCInterpreter.PRECODE_KEY_TEMPLATE;
-import static 
org.apache.zeppelin.jdbc.JDBCInterpreter.STATEMENT_PRECODE_KEY_TEMPLATE;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -118,7 +117,6 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
         .build();
   }
 
-
   @Test
   public void testForParsePropertyKey() {
     JDBCInterpreter t = new JDBCInterpreter(new Properties());
@@ -143,8 +141,12 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     assertEquals("hive", t.getDBPrefix(interpreterContext));
   }
 
+  /**
+   * DBprefix like %jdbc(db=mysql) or %jdbc(mysql) is not supported anymore
+   * JDBC Interpreter would try to use default config.
+   */
   @Test
-  public void testForMapPrefix() throws IOException, InterpreterException {
+  public void testDBPrefixProhibited() throws IOException, 
InterpreterException {
     Properties properties = new Properties();
     properties.setProperty("common.max_count", "1000");
     properties.setProperty("common.max_retry", "3");
@@ -161,12 +163,16 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     InterpreterContext context = InterpreterContext.builder()
         .setAuthenticationInfo(new AuthenticationInfo("testUser"))
         .setLocalProperties(localProperties)
+        .setParagraphId("paragraphId")
+        .setInterpreterOut(new InterpreterOutput())
         .build();
     InterpreterResult interpreterResult = t.interpret(sqlQuery, context);
 
-    // if prefix not found return ERROR and Prefix not found.
-    assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code());
-    assertEquals("Prefix not found.", 
interpreterResult.message().get(0).getData());
+    // The result should be the same as that run with default config
+    assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
+    List<InterpreterResultMessage> resultMessages = 
context.out.toInterpreterResultMessage();
+    assertEquals("ID\tNAME\na\ta_name\nb\tb_name\nc\tnull\n",
+            resultMessages.get(0).getData());
   }
 
   @Test
@@ -539,7 +545,7 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
   public void testMultiTenant_1() throws IOException, InterpreterException {
     // user1 %jdbc  select from default db
     // user2 %jdbc  select from default db
-    // user2 %jdbc  select from from hive db
+
     Properties properties = getDBProperty("default", "dbuser", "dbpassword");
     properties.putAll(getDBProperty("hive", "", ""));
 
@@ -557,8 +563,8 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     jdbc.interpret("", context);
 
     JDBCUserConfigurations user1JDBC1Conf = jdbc.getJDBCConfiguration("user1");
-    assertEquals("dbuser", 
user1JDBC1Conf.getPropertyMap("default").get("user"));
-    assertEquals("dbpassword", 
user1JDBC1Conf.getPropertyMap("default").get("password"));
+    assertEquals("dbuser", user1JDBC1Conf.getProperty().get("user"));
+    assertEquals("dbpassword", user1JDBC1Conf.getProperty().get("password"));
 
     // user2 run default
     context = InterpreterContext.builder()
@@ -569,23 +575,9 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     jdbc.interpret("", context);
 
     JDBCUserConfigurations user2JDBC1Conf = jdbc.getJDBCConfiguration("user2");
-    assertEquals("dbuser", 
user2JDBC1Conf.getPropertyMap("default").get("user"));
-    assertEquals("dbpassword", 
user2JDBC1Conf.getPropertyMap("default").get("password"));
-
-    // user2 run hive
-    Map<String, String> localProperties = new HashMap<>();
-    localProperties.put("db", "hive");
-    context = InterpreterContext.builder()
-            .setAuthenticationInfo(user2Credential)
-            .setInterpreterOut(new InterpreterOutput())
-            .setLocalProperties(localProperties)
-            .setReplName("jdbc")
-            .build();
-    jdbc.interpret("", context);
+    assertEquals("dbuser", user2JDBC1Conf.getProperty().get("user"));
+    assertEquals("dbpassword", user2JDBC1Conf.getProperty().get("password"));
 
-    user2JDBC1Conf = jdbc.getJDBCConfiguration("user2");
-    assertEquals("user2Id", user2JDBC1Conf.getPropertyMap("hive").get("user"));
-    assertEquals("user2Pw", 
user2JDBC1Conf.getPropertyMap("hive").get("password"));
 
     jdbc.close();
   }
@@ -609,8 +601,8 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     jdbc.interpret("", context);
 
     JDBCUserConfigurations user1JDBC1Conf = jdbc.getJDBCConfiguration("user1");
-    assertEquals("user1Id", 
user1JDBC1Conf.getPropertyMap("default").get("user"));
-    assertEquals("user1Pw", 
user1JDBC1Conf.getPropertyMap("default").get("password"));
+    assertEquals("user1Id", user1JDBC1Conf.getProperty().get("user"));
+    assertEquals("user1Pw", user1JDBC1Conf.getProperty().get("password"));
 
     // user2 run default
     context = InterpreterContext.builder()
@@ -621,8 +613,8 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     jdbc.interpret("", context);
 
     JDBCUserConfigurations user2JDBC1Conf = jdbc.getJDBCConfiguration("user2");
-    assertEquals("user2Id", 
user2JDBC1Conf.getPropertyMap("default").get("user"));
-    assertEquals("user2Pw", 
user2JDBC1Conf.getPropertyMap("default").get("password"));
+    assertEquals("user2Id", user2JDBC1Conf.getProperty().get("user"));
+    assertEquals("user2Pw", user2JDBC1Conf.getProperty().get("password"));
 
     jdbc.close();
   }
@@ -678,45 +670,6 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
     assertEquals(InterpreterResult.Type.TEXT, 
interpreterResult.message().get(0).getType());
   }
 
-  @Test
-  public void testPrecodeWithAnotherPrefix() throws IOException,
-          InterpreterException {
-    Properties properties = new Properties();
-    properties.setProperty("anotherPrefix.driver", "org.h2.Driver");
-    properties.setProperty("anotherPrefix.url", getJdbcConnection());
-    properties.setProperty("anotherPrefix.user", "");
-    properties.setProperty("anotherPrefix.password", "");
-    properties.setProperty(String.format(PRECODE_KEY_TEMPLATE, 
"anotherPrefix"),
-            "create table test_precode_2 (id int); insert into test_precode_2 
values (2);");
-    JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
-    jdbcInterpreter.open();
-
-    Map<String, String> localProperties = new HashMap<>();
-    localProperties.put("db", "anotherPrefix");
-    InterpreterContext context = InterpreterContext.builder()
-        .setAuthenticationInfo(new AuthenticationInfo("testUser"))
-        .setInterpreterOut(new InterpreterOutput())
-        .setLocalProperties(localProperties)
-        .build();
-    jdbcInterpreter.executePrecode(context);
-
-    String sqlQuery = "select * from test_precode_2";
-
-    InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, 
context);
-
-    List<InterpreterResultMessage> resultMessages = 
context.out.toInterpreterResultMessage();
-    assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
-
-    assertEquals(3, resultMessages.size());
-    assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(0).getType());
-    assertEquals("Query executed successfully. Affected rows : 0\n\n",
-            resultMessages.get(0).getData());
-    assertEquals(InterpreterResult.Type.TEXT, resultMessages.get(1).getType());
-    assertEquals("Query executed successfully. Affected rows : 1\n",
-            resultMessages.get(1).getData());
-    assertEquals(InterpreterResult.Type.TABLE, 
resultMessages.get(2).getType());
-    assertEquals("ID\n2\n", resultMessages.get(2).getData());
-  }
 
   @Test
   public void testStatementPrecode() throws IOException, InterpreterException {
@@ -761,37 +714,6 @@ public class JDBCInterpreterTest extends 
BasicJDBCTestCaseAdapter {
             interpreterResult.message().get(0).getData().contains("Syntax 
error"));
   }
 
-  @Test
-  public void testStatementPrecodeWithAnotherPrefix() throws IOException,
-          InterpreterException {
-    Properties properties = new Properties();
-    properties.setProperty("anotherPrefix.driver", "org.h2.Driver");
-    properties.setProperty("anotherPrefix.url", getJdbcConnection());
-    properties.setProperty("anotherPrefix.user", "");
-    properties.setProperty("anotherPrefix.password", "");
-    properties.setProperty(String.format(STATEMENT_PRECODE_KEY_TEMPLATE, 
"anotherPrefix"),
-            "set @v='statementAnotherPrefix'");
-    JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
-    jdbcInterpreter.open();
-
-    Map<String, String> localProperties = new HashMap<>();
-    localProperties.put("db", "anotherPrefix");
-    InterpreterContext context = InterpreterContext.builder()
-        .setAuthenticationInfo(new AuthenticationInfo("testUser"))
-        .setInterpreterOut(new InterpreterOutput())
-        .setLocalProperties(localProperties)
-        .build();
-
-    String sqlQuery = "select @v";
-
-    InterpreterResult interpreterResult = jdbcInterpreter.interpret(sqlQuery, 
context);
-
-    assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
-    List<InterpreterResultMessage> resultMessages = 
context.out.toInterpreterResultMessage();
-    assertEquals(InterpreterResult.Type.TABLE, 
resultMessages.get(0).getType());
-    assertEquals("@V\nstatementAnotherPrefix\n", 
resultMessages.get(0).getData());
-  }
-
   @Test
   public void testSplitSqlQueryWithComments() throws IOException,
           InterpreterException {


Reply via email to