Repository: incubator-zeppelin
Updated Branches:
refs/heads/master 0cfc84fa6 -> 404846f96
JDBC generic interpreter
Only you need to add to the classpath the jdbc connector jar and the
interpreter add the particular properties for each db.
In the file zeppelin-daemon.sh add:
ZEPPELIN_CLASSPATH+=":${ZEPPELIN_HOME}/jdbc/jdbc/connector jar"
Author: beeva-victorgarcia <[email protected]>
Author: Victor <[email protected]>
Author: vgmartinez <[email protected]>
Closes #361 from vgmartinez/jdbc_interpreter and squashes the following commits:
2513c1b [vgmartinez] Merge branch 'master' into jdbc_interpreter
8046692 [vgmartinez] merged with master
e602621 [beeva-victorgarcia] remove spaces
37a4c1a [beeva-victorgarcia] remove dependency
4085849 [beeva-victorgarcia] rebase branch
bd20ac2 [beeva-victorgarcia] remove README.md
2f93406 [beeva-victorgarcia] clean commit
f0ad06d [beeva-victorgarcia] Merge branch 'master' of
https://github.com/apache/incubator-zeppelin
a0e0d54 [beeva-victorgarcia] add some test
fe92f89 [beeva-victorgarcia] add multiple connections for interpreter
a06718c [beeva-victorgarcia] -a
09006f1 [Victor] fix test
462c3b1 [Victor] Merge branch 'master' of
https://github.com/apache/incubator-zeppelin into jdbc_interpreter
a66a5b7 [Victor] add descriptions
710699c [Victor] deleted cassandra/.cache-main from commit
4f28f5a [Victor] change psql to jdbc
53d0a81 [Victor] generic interpreter for jdbc
Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit:
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/404846f9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/404846f9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/404846f9
Branch: refs/heads/master
Commit: 404846f969c6037f35d9d339c7f39b51434e1385
Parents: 0cfc84f
Author: beeva-victorgarcia <[email protected]>
Authored: Sun Jan 17 19:08:31 2016 +0100
Committer: Jongyoul Lee <[email protected]>
Committed: Mon Jan 18 12:25:52 2016 +0900
----------------------------------------------------------------------
bin/zeppelin-daemon.sh | 3 +
conf/zeppelin-site.xml.template | 2 +-
jdbc/pom.xml | 169 ++++++++
.../apache/zeppelin/jdbc/JDBCInterpreter.java | 413 +++++++++++++++++++
.../zeppelin/jdbc/JDBCInterpreterTest.java | 125 ++++++
pom.xml | 2 +-
postgresql/README.md | 9 -
postgresql/pom.xml | 162 --------
.../zeppelin/postgresql/SqlCompleter.java | 250 -----------
postgresql/src/main/resources/ansi.sql.keywords | 1 -
.../postgresql-native-driver-sql.keywords | 1 -
.../postgresql/PostgreSqlInterpreterTest.java | 260 ------------
.../zeppelin/postgresql/SqlCompleterTest.java | 197 ---------
.../zeppelin/conf/ZeppelinConfiguration.java | 4 +-
14 files changed, 714 insertions(+), 884 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/bin/zeppelin-daemon.sh
----------------------------------------------------------------------
diff --git a/bin/zeppelin-daemon.sh b/bin/zeppelin-daemon.sh
index 6e08089..692740b 100755
--- a/bin/zeppelin-daemon.sh
+++ b/bin/zeppelin-daemon.sh
@@ -67,6 +67,9 @@ if [[ -d "${ZEPPELIN_HOME}/zeppelin-server/target/classes"
]]; then
ZEPPELIN_CLASSPATH+=":${ZEPPELIN_HOME}/zeppelin-server/target/classes"
fi
+# Add jdbc connector jar
+# ZEPPELIN_CLASSPATH+=":${ZEPPELIN_HOME}/jdbc/jars/jdbc-connector-jar"
+
addJarInDir "${ZEPPELIN_HOME}"
addJarInDir "${ZEPPELIN_HOME}/lib"
addJarInDir "${ZEPPELIN_HOME}/zeppelin-interpreter/target/lib"
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/conf/zeppelin-site.xml.template
----------------------------------------------------------------------
diff --git a/conf/zeppelin-site.xml.template b/conf/zeppelin-site.xml.template
index a51c732..4fe5a0a 100755
--- a/conf/zeppelin-site.xml.template
+++ b/conf/zeppelin-site.xml.template
@@ -105,7 +105,7 @@
<property>
<name>zeppelin.interpreters</name>
-
<value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter,org.apache.zeppelin.tajo.TajoInterpreter,org.apache.zeppelin.flink.FlinkInterpreter,org.apache.zeppelin.lens.LensInterpreter,org.apache.zeppelin.ignite.IgniteInterpreter,org.apache.zeppelin.ignite.IgniteSqlInterpreter,org.apache.zeppelin.cassandra.CassandraInterpreter,org.apache.zeppelin.geode.GeodeOqlInterpreter,org.apache.zeppelin.postgresql.PostgreSqlInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.kylin.KylinInterpreter,org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,org.apache.zeppelin.scalding.ScaldingInterpreter</value>
+
<value>org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter,org.apache.zeppelin.tajo.TajoInterpreter,org.apache.zeppelin.flink.FlinkInterpreter,org.apache.zeppelin.lens.LensInterpreter,org.apache.zeppelin.ignite.IgniteInterpreter,org.apache.zeppelin.ignite.IgniteSqlInterpreter,org.apache.zeppelin.cassandra.CassandraInterpreter,org.apache.zeppelin.geode.GeodeOqlInterpreter,org.apache.zeppelin.jdbc.JDBCInterpreter,org.apache.zeppelin.phoenix.PhoenixInterpreter,org.apache.zeppelin.kylin.KylinInterpreter,org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,org.apache.zeppelin.scalding.ScaldingInterpreter</value>
<description>Comma separated interpreter configurations. First interpreter
become a default</description>
</property>
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/jdbc/pom.xml
----------------------------------------------------------------------
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
new file mode 100644
index 0000000..a5596f8
--- /dev/null
+++ b/jdbc/pom.xml
@@ -0,0 +1,169 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+ ~ Licensed to the Apache Software Foundation (ASF) under one or more
+ ~ contributor license agreements. See the NOTICE file distributed with
+ ~ this work for additional information regarding copyright ownership.
+ ~ The ASF licenses this file to You under the Apache License, Version 2.0
+ ~ (the "License"); you may not use this file except in compliance with
+ ~ the License. You may obtain a copy of the License at
+ ~
+ ~ http://www.apache.org/licenses/LICENSE-2.0
+ ~
+ ~ Unless required by applicable law or agreed to in writing, software
+ ~ distributed under the License is distributed on an "AS IS" BASIS,
+ ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ~ See the License for the specific language governing permissions and
+ ~ limitations under the License.
+ -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <artifactId>zeppelin</artifactId>
+ <groupId>org.apache.zeppelin</groupId>
+ <version>0.6.0-incubating-SNAPSHOT</version>
+ </parent>
+
+ <groupId>org.apache.zeppelin</groupId>
+ <artifactId>zeppelin-jdbc</artifactId>
+ <packaging>jar</packaging>
+ <version>0.6.0-incubating-SNAPSHOT</version>
+ <name>Zeppelin: JDBC interpreter</name>
+ <url>http://www.apache.org</url>
+
+ <properties>
+ <postgresql.version>9.4-1201-jdbc41</postgresql.version>
+ </properties>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.zeppelin</groupId>
+ <artifactId>zeppelin-interpreter</artifactId>
+ <version>${project.version}</version>
+ <scope>provided</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.postgresql</groupId>
+ <artifactId>postgresql</artifactId>
+ <version>${postgresql.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.google.guava</groupId>
+ <artifactId>guava</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>2.12.1</version>
+ </dependency>
+
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.h2database</groupId>
+ <artifactId>h2</artifactId>
+ <version>1.4.190</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>1.9.5</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>com.mockrunner</groupId>
+ <artifactId>mockrunner-jdbc</artifactId>
+ <version>1.0.8</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-deploy-plugin</artifactId>
+ <version>2.7</version>
+ <configuration>
+ <skip>true</skip>
+ </configuration>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <version>1.3.1</version>
+ <executions>
+ <execution>
+ <id>enforce</id>
+ <phase>none</phase>
+ </execution>
+ </executions>
+ </plugin>
+
+ <plugin>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <version>2.8</version>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+
<outputDirectory>${project.build.directory}/../../interpreter/jdbc</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ <includeScope>runtime</includeScope>
+ </configuration>
+ </execution>
+ <execution>
+ <id>copy-artifact</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy</goal>
+ </goals>
+ <configuration>
+
<outputDirectory>${project.build.directory}/../../interpreter/jdbc</outputDirectory>
+ <overWriteReleases>false</overWriteReleases>
+ <overWriteSnapshots>false</overWriteSnapshots>
+ <overWriteIfNewer>true</overWriteIfNewer>
+ <includeScope>runtime</includeScope>
+ <artifactItems>
+ <artifactItem>
+ <groupId>${project.groupId}</groupId>
+ <artifactId>${project.artifactId}</artifactId>
+ <version>${project.version}</version>
+ <type>${project.packaging}</type>
+ </artifactItem>
+ </artifactItems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
----------------------------------------------------------------------
diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
new file mode 100644
index 0000000..4b9775e
--- /dev/null
+++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java
@@ -0,0 +1,413 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
+ * or implied. See the License for the specific language governing permissions
and limitations under
+ * the License.
+ */
+package org.apache.zeppelin.jdbc;
+
+import static org.apache.commons.lang.StringUtils.containsIgnoreCase;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResult.Code;
+import org.apache.zeppelin.scheduler.Scheduler;
+import org.apache.zeppelin.scheduler.SchedulerFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Function;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.common.collect.Sets.SetView;
+
+/**
+ * JDBC interpreter for Zeppelin. This interpreter can also be used for
accessing HAWQ,
+ * GreenplumDB, MariaDB, MySQL, Postgres and Redshit.
+ *
+ * <ul>
+ * <li>{@code default.url} - JDBC URL to connect to.</li>
+ * <li>{@code default.user} - JDBC user name..</li>
+ * <li>{@code default.password} - JDBC password..</li>
+ * <li>{@code default.driver.name} - JDBC driver name.</li>
+ * <li>{@code common.max.result} - Max number of SQL result to display.</li>
+ * </ul>
+ *
+ * <p>
+ * How to use: <br/>
+ * {@code %jdbc.sql} <br/>
+ * {@code
+ * SELECT store_id, count(*)
+ * FROM retail_demo.order_lineitems_pxf
+ * GROUP BY store_id;
+ * }
+ * </p>
+ *
+ */
+public class JDBCInterpreter extends Interpreter {
+
+ private Logger logger = LoggerFactory.getLogger(JDBCInterpreter.class);
+
+ static final String COMMON_KEY = "common";
+ static final String MAX_LINE_KEY = "max_count";
+ static final String MAX_LINE_DEFAULT = "1000";
+
+ static final String DEFAULT_KEY = "default";
+ static final String DRIVER_KEY = "driver";
+ static final String URL_KEY = "url";
+ static final String USER_KEY = "user";
+ static final String PASSWORD_KEY = "password";
+ static final String DOT = ".";
+
+ private static final char WHITESPACE = ' ';
+ private static final char NEWLINE = '\n';
+ private static final char TAB = '\t';
+ private static final String TABLE_MAGIC_TAG = "%table ";
+ private static final String EXPLAIN_PREDICATE = "EXPLAIN ";
+ private static final String UPDATE_COUNT_HEADER = "Update Count";
+
+ static final String COMMON_MAX_LINE = COMMON_KEY + DOT + MAX_LINE_KEY;
+
+ static final String DEFAULT_DRIVER = DEFAULT_KEY + DOT + DRIVER_KEY;
+ static final String DEFAULT_URL = DEFAULT_KEY + DOT + URL_KEY;
+ static final String DEFAULT_USER = DEFAULT_KEY + DOT + USER_KEY;
+ static final String DEFAULT_PASSWORD = DEFAULT_KEY + DOT + PASSWORD_KEY;
+
+ static final String EMPTY_COLUMN_VALUE = "";
+
+ private final HashMap<String, Properties> propertiesMap;
+ private final Map<String, Statement> paragraphIdStatementMap;
+
+ private final Map<String, ArrayList<Connection>>
propertyKeyUnusedConnectionListMap;
+ private final Map<String, Connection> paragraphIdConnectionMap;
+
+ static {
+ Interpreter.register(
+ "sql",
+ "jdbc",
+ JDBCInterpreter.class.getName(),
+ new InterpreterPropertyBuilder()
+ .add(DEFAULT_URL, "jdbc:postgresql://localhost:5432/", "The URL
for JDBC.")
+ .add(DEFAULT_USER, "gpadmin", "The JDBC user name")
+ .add(DEFAULT_PASSWORD, "",
+ "The JDBC user password")
+ .add(DEFAULT_DRIVER, "org.postgresql.Driver", "JDBC Driver Name")
+ .add(COMMON_MAX_LINE, MAX_LINE_DEFAULT,
+ "Max number of SQL result to display.").build());
+ }
+
+ public JDBCInterpreter(Properties property) {
+ super(property);
+ propertiesMap = new HashMap<>();
+ propertyKeyUnusedConnectionListMap = new HashMap<>();
+ paragraphIdStatementMap = new HashMap<>();
+ paragraphIdConnectionMap = new HashMap<>();
+ }
+
+ public HashMap<String, Properties> getPropertiesMap() {
+ return propertiesMap;
+ }
+
+ @Override
+ public void open() {
+ for (String propertyKey : property.stringPropertyNames()) {
+ logger.debug("propertyKey: {}", propertyKey);
+ String[] keyValue = propertyKey.split("\\.", 2);
+ if (2 == keyValue.length) {
+ logger.info("key: {}, value: {}", keyValue[0], keyValue[1]);
+ Properties prefixProperties;
+ if (propertiesMap.containsKey(keyValue[0])) {
+ prefixProperties = propertiesMap.get(keyValue[0]);
+ } else {
+ prefixProperties = new Properties();
+ propertiesMap.put(keyValue[0], prefixProperties);
+ }
+ prefixProperties.put(keyValue[1], property.getProperty(propertyKey));
+ }
+ }
+
+ Set<String> removeKeySet = new HashSet<>();
+ for (String key : propertiesMap.keySet()) {
+ if (!COMMON_KEY.equals(key)) {
+ Properties properties = propertiesMap.get(key);
+ if (!properties.containsKey(DRIVER_KEY) ||
!properties.containsKey(URL_KEY)) {
+ logger.error("{} will be ignored. {}.{} and {}.{} is mandatory.",
+ key, DRIVER_KEY, key, key, URL_KEY);
+ removeKeySet.add(key);
+ }
+ }
+ }
+
+ for (String key : removeKeySet) {
+ propertiesMap.remove(key);
+ }
+
+ logger.debug("propertiesMap: {}", propertiesMap);
+ }
+
+ public Connection getConnection(String propertyKey) throws
ClassNotFoundException, SQLException {
+ Connection connection = null;
+ if (propertyKeyUnusedConnectionListMap.containsKey(propertyKey)) {
+ ArrayList<Connection> connectionList =
propertyKeyUnusedConnectionListMap.get(propertyKey);
+ if (0 != connectionList.size()) {
+ connection =
propertyKeyUnusedConnectionListMap.get(propertyKey).remove(0);
+ if (null != connection && connection.isClosed()) {
+ connection.close();
+ connection = null;
+ }
+ }
+ }
+ if (null == connection) {
+ Properties properties = propertiesMap.get(propertyKey);
+ logger.info(properties.getProperty(DRIVER_KEY));
+ Class.forName(properties.getProperty(DRIVER_KEY));
+ String url = properties.getProperty(URL_KEY);
+ String user = properties.getProperty(USER_KEY);
+ String password = properties.getProperty(PASSWORD_KEY);
+ if (null != user && null != password) {
+ connection = DriverManager.getConnection(url, user, password);
+ } else {
+ connection = DriverManager.getConnection(url, properties);
+ }
+ }
+ return connection;
+ }
+
+ public Statement getStatement(String propertyKey, String paragraphId)
+ throws SQLException, ClassNotFoundException {
+ Connection connection;
+ if (paragraphIdConnectionMap.containsKey(paragraphId)) {
+ connection = paragraphIdConnectionMap.get(paragraphId);
+ } else {
+ connection = getConnection(propertyKey);
+ }
+
+ Statement statement = connection.createStatement();
+ if (isStatementClosed(statement)) {
+ connection = getConnection(propertyKey);
+ statement = connection.createStatement();
+ }
+ paragraphIdConnectionMap.put(paragraphId, connection);
+ paragraphIdStatementMap.put(paragraphId, statement);
+
+ return statement;
+ }
+
+ private boolean isStatementClosed(Statement statement) {
+ try {
+ return statement.isClosed();
+ } catch (Throwable t) {
+ logger.debug("{} doesn't support isClosed method", statement);
+ return false;
+ }
+ }
+
+ @Override
+ public void close() {
+
+ try {
+ for (List<Connection> connectionList :
propertyKeyUnusedConnectionListMap.values()) {
+ for (Connection c : connectionList) {
+ c.close();
+ }
+ }
+
+ for (Statement statement : paragraphIdStatementMap.values()) {
+ statement.close();
+ }
+ paragraphIdStatementMap.clear();
+
+ for (Connection connection : paragraphIdConnectionMap.values()) {
+ connection.close();
+ }
+ paragraphIdConnectionMap.clear();
+
+ } catch (SQLException e) {
+ logger.error("Error while closing...", e);
+ }
+ }
+
+ private InterpreterResult executeSql(String propertyKey, String sql,
+ InterpreterContext interpreterContext) {
+
+ String paragraphId = interpreterContext.getParagraphId();
+
+ try {
+
+ Statement statement = getStatement(propertyKey, paragraphId);
+ statement.setMaxRows(getMaxResult());
+
+ StringBuilder msg = null;
+ boolean isTableType = false;
+
+ if (containsIgnoreCase(sql, EXPLAIN_PREDICATE)) {
+ msg = new StringBuilder();
+ } else {
+ msg = new StringBuilder(TABLE_MAGIC_TAG);
+ isTableType = true;
+ }
+
+ ResultSet resultSet = null;
+ try {
+
+ boolean isResultSetAvailable = statement.execute(sql);
+
+ if (isResultSetAvailable) {
+ resultSet = statement.getResultSet();
+
+ ResultSetMetaData md = resultSet.getMetaData();
+
+ for (int i = 1; i < md.getColumnCount() + 1; i++) {
+ if (i > 1) {
+ msg.append(TAB);
+ }
+ msg.append(replaceReservedChars(isTableType, md.getColumnName(i)));
+ }
+ msg.append(NEWLINE);
+
+ int displayRowCount = 0;
+ while (resultSet.next() && displayRowCount < getMaxResult()) {
+ for (int i = 1; i < md.getColumnCount() + 1; i++) {
+ msg.append(replaceReservedChars(isTableType,
resultSet.getString(i)));
+ if (i != md.getColumnCount()) {
+ msg.append(TAB);
+ }
+ }
+ msg.append(NEWLINE);
+ displayRowCount++;
+ }
+ } else {
+ // Response contains either an update count or there are no results.
+ int updateCount = statement.getUpdateCount();
+ msg.append(UPDATE_COUNT_HEADER).append(NEWLINE);
+ msg.append(updateCount).append(NEWLINE);
+ }
+ } finally {
+ try {
+ if (resultSet != null) {
+ resultSet.close();
+ }
+ statement.close();
+ } finally {
+ statement = null;
+ }
+ }
+
+ return new InterpreterResult(Code.SUCCESS, msg.toString());
+
+ } catch (SQLException ex) {
+ logger.error("Cannot run " + sql, ex);
+ return new InterpreterResult(Code.ERROR, ex.getMessage());
+ } catch (ClassNotFoundException e) {
+ logger.error("Cannot run " + sql, e);
+ return new InterpreterResult(Code.ERROR, e.getMessage());
+ }
+ }
+
+ /**
+ * For %table response replace Tab and Newline characters from the content.
+ */
+ private String replaceReservedChars(boolean isTableResponseType, String str)
{
+ if (str == null) {
+ return EMPTY_COLUMN_VALUE;
+ }
+ return (!isTableResponseType) ? str : str.replace(TAB,
WHITESPACE).replace(NEWLINE, WHITESPACE);
+ }
+
+ @Override
+ public InterpreterResult interpret(String cmd, InterpreterContext
contextInterpreter) {
+ logger.info("Run SQL command '{}'", cmd);
+ String propertyKey = getPropertyKey(cmd);
+
+ if (null != propertyKey) {
+ cmd = cmd.substring(propertyKey.length() + 2);
+ } else {
+ propertyKey = DEFAULT_KEY;
+ }
+
+ cmd = cmd.trim();
+
+ logger.info("PropertyKey: {}, SQL command: '{}'", propertyKey, cmd);
+
+ return executeSql(propertyKey, cmd, contextInterpreter);
+ }
+
+ @Override
+ public void cancel(InterpreterContext context) {
+
+ logger.info("Cancel current query statement.");
+
+ String paragraphId = context.getParagraphId();
+ try {
+ paragraphIdStatementMap.get(paragraphId).cancel();
+ } catch (SQLException e) {
+ logger.error("Error while cancelling...", e);
+ }
+ }
+
+ public String getPropertyKey(String cmd) {
+ int firstLineIndex = cmd.indexOf("\n");
+ if (-1 == firstLineIndex) {
+ firstLineIndex = cmd.length();
+ }
+ int configStartIndex = cmd.indexOf("(");
+ int configLastIndex = cmd.indexOf(")");
+ if (configStartIndex != -1 && configLastIndex != -1
+ && configLastIndex < firstLineIndex && configLastIndex <
firstLineIndex) {
+ return cmd.substring(configStartIndex + 1, configLastIndex);
+ }
+ return null;
+ }
+
+ @Override
+ public FormType getFormType() {
+ return FormType.SIMPLE;
+ }
+
+ @Override
+ public int getProgress(InterpreterContext context) {
+ return 0;
+ }
+
+ @Override
+ public Scheduler getScheduler() {
+ return SchedulerFactory.singleton().createOrGetFIFOScheduler(
+ JDBCInterpreter.class.getName() + this.hashCode());
+ }
+
+ @Override
+ public List<String> completion(String buf, int cursor) {
+ return null;
+ }
+
+ public int getMaxResult() {
+ return Integer.valueOf(
+ propertiesMap.get(COMMON_KEY).getProperty(MAX_LINE_KEY,
MAX_LINE_DEFAULT));
+ }
+}
+
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
----------------------------------------------------------------------
diff --git
a/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
b/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
new file mode 100644
index 0000000..83e0507
--- /dev/null
+++ b/jdbc/src/test/java/org/apache/zeppelin/jdbc/JDBCInterpreterTest.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional
information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
+ * or implied. See the License for the specific language governing permissions
and limitations under
+ * the License.
+ */
+package org.apache.zeppelin.jdbc;
+
+import static java.lang.String.format;
+import static org.junit.Assert.assertEquals;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_KEY;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_DRIVER;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_PASSWORD;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_USER;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_URL;
+import static org.apache.zeppelin.jdbc.JDBCInterpreter.COMMON_MAX_LINE;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Properties;
+
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.jdbc.JDBCInterpreter;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter;
+/**
+ * JDBC interpreter unit tests
+ */
+public class JDBCInterpreterTest extends BasicJDBCTestCaseAdapter {
+
+ static String jdbcConnection;
+
+ private static String getJdbcConnection() throws IOException {
+ if(null == jdbcConnection) {
+ Path tmpDir = Files.createTempDirectory("h2-test-");
+ tmpDir.toFile().deleteOnExit();
+ jdbcConnection = format("jdbc:h2:%s", tmpDir);
+ }
+ return jdbcConnection;
+ }
+
+ @Before
+ public void setUp() throws Exception {
+
+ Class.forName("org.h2.Driver");
+ Connection connection = DriverManager.getConnection(getJdbcConnection());
+ Statement statement = connection.createStatement();
+ statement.execute(
+ "DROP TABLE IF EXISTS test_table; " +
+ "CREATE TABLE test_table(id varchar(255), name varchar(255));");
+ statement.execute(
+ "insert into test_table(id, name) values ('a', 'a_name'),('b',
'b_name');"
+ );
+ }
+
+ @Test
+ public void testDefaultProperties() throws SQLException {
+ JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(new Properties());
+
+ assertEquals("org.postgresql.Driver",
jdbcInterpreter.getProperty(DEFAULT_DRIVER));
+ assertEquals("jdbc:postgresql://localhost:5432/",
jdbcInterpreter.getProperty(DEFAULT_URL));
+ assertEquals("gpadmin", jdbcInterpreter.getProperty(DEFAULT_USER));
+ assertEquals("", jdbcInterpreter.getProperty(DEFAULT_PASSWORD));
+ assertEquals("1000", jdbcInterpreter.getProperty(COMMON_MAX_LINE));
+ }
+
+ @Test
+ public void testSelectQuery() throws SQLException, IOException {
+ Properties properties = new Properties();
+ properties.setProperty("common.max_count", "1000");
+ properties.setProperty("common.max_retry", "3");
+ properties.setProperty("default.driver", "org.h2.Driver");
+ properties.setProperty("default.url", getJdbcConnection());
+ properties.setProperty("default.user", "");
+ properties.setProperty("default.password", "");
+ JDBCInterpreter t = new JDBCInterpreter(properties);
+ t.open();
+
+ String sqlQuery = "select * from test_table";
+
+ InterpreterResult interpreterResult = t.interpret(sqlQuery, new
InterpreterContext("", "1", "","", null,null,null,null));
+
+ assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
+ assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
+ assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n",
interpreterResult.message());
+ }
+
+ @Test
+ public void testSelectQueryMaxResult() throws SQLException, IOException {
+
+ Properties properties = new Properties();
+ properties.setProperty("common.max_count", "1");
+ properties.setProperty("common.max_retry", "3");
+ properties.setProperty("default.driver", "org.h2.Driver");
+ properties.setProperty("default.url", getJdbcConnection());
+ properties.setProperty("default.user", "");
+ properties.setProperty("default.password", "");
+ JDBCInterpreter t = new JDBCInterpreter(properties);
+ t.open();
+
+ String sqlQuery = "select * from test_table";
+
+ InterpreterResult interpreterResult = t.interpret(sqlQuery, new
InterpreterContext("", "1", "","", null,null,null,null));
+
+ assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
+ assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
+ assertEquals("ID\tNAME\na\ta_name\n", interpreterResult.message());
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index ead9637..925721e 100755
--- a/pom.xml
+++ b/pom.xml
@@ -93,7 +93,7 @@
<module>shell</module>
<module>hive</module>
<module>phoenix</module>
- <module>postgresql</module>
+ <module>jdbc</module>
<module>tajo</module>
<module>flink</module>
<module>ignite</module>
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/README.md
----------------------------------------------------------------------
diff --git a/postgresql/README.md b/postgresql/README.md
deleted file mode 100644
index 918abb9..0000000
--- a/postgresql/README.md
+++ /dev/null
@@ -1,9 +0,0 @@
-## PostgreSQL, HAWQ and Greenplum Interpreter for Apache Zeppelin
-
-This interpreter supports the following SQL engines:
-* [PostgreSQL](http://www.postgresql.org/) - OSS, Object-relational database
management system (ORDBMS)
-* [Apache HAWQ](http://pivotal.io/big-data/pivotal-hawq) - Powerful [Open
Source](https://wiki.apache.org/incubator/HAWQProposal) SQL-On-Hadoop engine.
-* [Greenplum](http://pivotal.io/big-data/pivotal-greenplum-database) - MPP
database built on open source PostgreSQL.
-
-The official documentation: [PostgreSQL,
HAWQ](https://zeppelin.incubator.apache.org/docs/interpreter/postgresql.html)
-
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/pom.xml
----------------------------------------------------------------------
diff --git a/postgresql/pom.xml b/postgresql/pom.xml
deleted file mode 100644
index daf27af..0000000
--- a/postgresql/pom.xml
+++ /dev/null
@@ -1,162 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Licensed to the Apache Software Foundation (ASF) under one or more
- ~ contributor license agreements. See the NOTICE file distributed with
- ~ this work for additional information regarding copyright ownership.
- ~ The ASF licenses this file to You under the Apache License, Version 2.0
- ~ (the "License"); you may not use this file except in compliance with
- ~ the License. You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <artifactId>zeppelin</artifactId>
- <groupId>org.apache.zeppelin</groupId>
- <version>0.6.0-incubating-SNAPSHOT</version>
- </parent>
-
- <groupId>org.apache.zeppelin</groupId>
- <artifactId>zeppelin-postgresql</artifactId>
- <packaging>jar</packaging>
- <version>0.6.0-incubating-SNAPSHOT</version>
- <name>Zeppelin: PostgreSQL interpreter</name>
- <url>http://www.apache.org</url>
-
- <properties>
- <postgresql.version>9.4-1201-jdbc41</postgresql.version>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.apache.zeppelin</groupId>
- <artifactId>zeppelin-interpreter</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </dependency>
-
- <dependency>
- <groupId>org.postgresql</groupId>
- <artifactId>postgresql</artifactId>
- <version>${postgresql.version}</version>
- </dependency>
-
- <dependency>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- </dependency>
-
- <dependency>
- <groupId>jline</groupId>
- <artifactId>jline</artifactId>
- <version>2.12.1</version>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <version>1.9.5</version>
- <scope>test</scope>
- </dependency>
-
- <dependency>
- <groupId>com.mockrunner</groupId>
- <artifactId>mockrunner-jdbc</artifactId>
- <version>1.0.8</version>
- <scope>test</scope>
- </dependency>
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <version>2.7</version>
- <configuration>
- <skip>true</skip>
- </configuration>
- </plugin>
-
- <plugin>
- <artifactId>maven-enforcer-plugin</artifactId>
- <version>1.3.1</version>
- <executions>
- <execution>
- <id>enforce</id>
- <phase>none</phase>
- </execution>
- </executions>
- </plugin>
-
- <plugin>
- <artifactId>maven-dependency-plugin</artifactId>
- <version>2.8</version>
- <executions>
- <execution>
- <id>copy-dependencies</id>
- <phase>package</phase>
- <goals>
- <goal>copy-dependencies</goal>
- </goals>
- <configuration>
-
<outputDirectory>${project.build.directory}/../../interpreter/psql</outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>false</overWriteSnapshots>
- <overWriteIfNewer>true</overWriteIfNewer>
- <includeScope>runtime</includeScope>
- </configuration>
- </execution>
- <execution>
- <id>copy-artifact</id>
- <phase>package</phase>
- <goals>
- <goal>copy</goal>
- </goals>
- <configuration>
-
<outputDirectory>${project.build.directory}/../../interpreter/psql</outputDirectory>
- <overWriteReleases>false</overWriteReleases>
- <overWriteSnapshots>false</overWriteSnapshots>
- <overWriteIfNewer>true</overWriteIfNewer>
- <includeScope>runtime</includeScope>
- <artifactItems>
- <artifactItem>
- <groupId>${project.groupId}</groupId>
- <artifactId>${project.artifactId}</artifactId>
- <version>${project.version}</version>
- <type>${project.packaging}</type>
- </artifactItem>
- </artifactItems>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/src/main/java/org/apache/zeppelin/postgresql/SqlCompleter.java
----------------------------------------------------------------------
diff --git
a/postgresql/src/main/java/org/apache/zeppelin/postgresql/SqlCompleter.java
b/postgresql/src/main/java/org/apache/zeppelin/postgresql/SqlCompleter.java
deleted file mode 100644
index 9d2857f..0000000
--- a/postgresql/src/main/java/org/apache/zeppelin/postgresql/SqlCompleter.java
+++ /dev/null
@@ -1,250 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
- * agreements. See the NOTICE file distributed with this work for additional
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
- * or implied. See the License for the specific language governing permissions
and limitations under
- * the License.
- */
-package org.apache.zeppelin.postgresql;
-
-/*
- * This source file is based on code taken from SQLLine 1.0.2 See SQLLine
notice in LICENSE
- */
-import static org.apache.commons.lang.StringUtils.isBlank;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.sql.Connection;
-import java.sql.DatabaseMetaData;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.StringTokenizer;
-import java.util.TreeSet;
-import java.util.regex.Pattern;
-
-import jline.console.completer.ArgumentCompleter.ArgumentList;
-import jline.console.completer.ArgumentCompleter.WhitespaceArgumentDelimiter;
-import jline.console.completer.StringsCompleter;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.Sets;
-import com.google.common.collect.Sets.SetView;
-
-/**
- * SQL auto complete functionality for the PostgreSqlInterpreter.
- */
-public class SqlCompleter extends StringsCompleter {
-
- private static Logger logger = LoggerFactory.getLogger(SqlCompleter.class);
-
- /**
- * Delimiter that can split SQL statement in keyword list
- */
- private WhitespaceArgumentDelimiter sqlDelimiter = new
WhitespaceArgumentDelimiter() {
-
- private Pattern pattern = Pattern.compile("[\\.:;,]");
-
- @Override
- public boolean isDelimiterChar(CharSequence buffer, int pos) {
- return pattern.matcher("" + buffer.charAt(pos)).matches()
- || super.isDelimiterChar(buffer, pos);
- }
- };
-
- private Set<String> modelCompletions = new HashSet<String>();
-
- public SqlCompleter(Set<String> allCompletions, Set<String>
dataModelCompletions) {
- super(allCompletions);
- this.modelCompletions = dataModelCompletions;
- }
-
- @Override
- public int complete(String buffer, int cursor, List<CharSequence>
candidates) {
-
- if (isBlank(buffer) || (cursor > buffer.length() + 1)) {
- return -1;
- }
-
- // The delimiter breaks the buffer into separate words (arguments),
separated by the
- // white spaces.
- ArgumentList argumentList = sqlDelimiter.delimit(buffer, cursor);
- String argument = argumentList.getCursorArgument();
- // cursor in the selected argument
- int argumentPosition = argumentList.getArgumentPosition();
-
- if (isBlank(argument)) {
- int argumentsCount = argumentList.getArguments().length;
- if (argumentsCount <= 0 || ((buffer.length() + 2) < cursor)
- || sqlDelimiter.isDelimiterChar(buffer, cursor - 2)) {
- return -1;
- }
- argument = argumentList.getArguments()[argumentsCount - 1];
- argumentPosition = argument.length();
- }
-
- int complete = super.complete(argument, argumentPosition, candidates);
-
- logger.debug("complete:" + complete + ", size:" + candidates.size());
-
- return complete;
- }
-
- public void updateDataModelMetaData(Connection connection) {
-
- try {
- Set<String> newModelCompletions =
getDataModelMetadataCompletions(connection);
- logger.debug("New model metadata is:" +
Joiner.on(',').join(newModelCompletions));
-
- // Sets.difference(set1, set2) - returned set contains all elements that
are contained by set1
- // and not contained by set2. set2 may also contain elements not present
in set1; these are
- // simply ignored.
- SetView<String> removedCompletions = Sets.difference(modelCompletions,
newModelCompletions);
- logger.debug("Removed Model Completions: " +
Joiner.on(',').join(removedCompletions));
- this.getStrings().removeAll(removedCompletions);
-
- SetView<String> newCompletions = Sets.difference(newModelCompletions,
modelCompletions);
- logger.debug("New Completions: " + Joiner.on(',').join(newCompletions));
- this.getStrings().addAll(newCompletions);
-
- modelCompletions = newModelCompletions;
-
- } catch (SQLException e) {
- logger.error("Failed to update the metadata conmpletions", e);
- }
- }
-
- public static Set<String> getSqlKeywordsCompletions(Connection connection)
throws IOException,
- SQLException {
-
- // Add the default SQL completions
- String keywords =
- new BufferedReader(new InputStreamReader(
-
SqlCompleter.class.getResourceAsStream("/ansi.sql.keywords"))).readLine();
-
- DatabaseMetaData metaData = connection.getMetaData();
-
- // Add the driver specific SQL completions
- String driverSpecificKeywords =
- "/" + metaData.getDriverName().replace(" ", "-").toLowerCase() +
"-sql.keywords";
-
- logger.info("JDBC DriverName:" + driverSpecificKeywords);
-
- if (SqlCompleter.class.getResource(driverSpecificKeywords) != null) {
- String driverKeywords =
- new BufferedReader(new InputStreamReader(
-
SqlCompleter.class.getResourceAsStream(driverSpecificKeywords))).readLine();
- keywords += "," + driverKeywords.toUpperCase();
- }
-
- Set<String> completions = new TreeSet<String>();
-
-
- // Add the keywords from the current JDBC connection
- try {
- keywords += "," + metaData.getSQLKeywords();
- } catch (Exception e) {
- logger.debug("fail to get SQL key words from database metadata: " + e,
e);
- }
- try {
- keywords += "," + metaData.getStringFunctions();
- } catch (Exception e) {
- logger.debug("fail to get string function names from database metadata:
" + e, e);
- }
- try {
- keywords += "," + metaData.getNumericFunctions();
- } catch (Exception e) {
- logger.debug("fail to get numeric function names from database metadata:
" + e, e);
- }
- try {
- keywords += "," + metaData.getSystemFunctions();
- } catch (Exception e) {
- logger.debug("fail to get system function names from database metadata:
" + e, e);
- }
- try {
- keywords += "," + metaData.getTimeDateFunctions();
- } catch (Exception e) {
- logger.debug("fail to get time date function names from database
metadata: " + e, e);
- }
-
- // Also allow lower-case versions of all the keywords
- keywords += "," + keywords.toLowerCase();
-
- StringTokenizer tok = new StringTokenizer(keywords, ", ");
- while (tok.hasMoreTokens()) {
- completions.add(tok.nextToken());
- }
-
- return completions;
- }
-
- public static Set<String> getDataModelMetadataCompletions(Connection
connection)
- throws SQLException {
- Set<String> completions = new TreeSet<String>();
- getColumnNames(connection.getMetaData(), completions);
- getSchemaNames(connection.getMetaData(), completions);
- return completions;
- }
-
- private static void getColumnNames(DatabaseMetaData meta, Set<String> names)
throws SQLException {
-
- try {
- ResultSet columns = meta.getColumns(meta.getConnection().getCatalog(),
null, "%", "%");
- try {
-
- while (columns.next()) {
- // Add the following strings: (1) column name, (2) table name
- String name = columns.getString("TABLE_NAME");
- if (!isBlank(name)) {
- names.add(name);
- names.add(columns.getString("COLUMN_NAME"));
- // names.add(columns.getString("TABLE_NAME") + "." +
columns.getString("COLUMN_NAME"));
- }
- }
- } finally {
- columns.close();
- }
-
- logger.debug(Joiner.on(',').join(names));
- } catch (Throwable t) {
- logger.error("Failed to retrieve the column name", t);
- }
- }
-
- private static void getSchemaNames(DatabaseMetaData meta, Set<String> names)
throws SQLException {
-
- try {
- ResultSet schemas = meta.getSchemas();
- try {
- while (schemas.next()) {
- String schemaName = schemas.getString("TABLE_SCHEM");
- if (!isBlank(schemaName)) {
- names.add(schemaName + ".");
- }
- }
- } finally {
- schemas.close();
- }
- } catch (Throwable t) {
- logger.error("Failed to retrieve the column name", t);
- }
- }
-
- // test purpose only
- WhitespaceArgumentDelimiter getSqlDelimiter() {
- return this.sqlDelimiter;
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/src/main/resources/ansi.sql.keywords
----------------------------------------------------------------------
diff --git a/postgresql/src/main/resources/ansi.sql.keywords
b/postgresql/src/main/resources/ansi.sql.keywords
deleted file mode 100644
index 1f25a81..0000000
--- a/postgresql/src/main/resources/ansi.sql.keywords
+++ /dev/null
@@ -1 +0,0 @@
-ABSOLUTE,ACTION,ADD,ALL,ALLOCATE,ALTER,AND,ANY,ARE,AS,ASC,ASSERTION,AT,AUTHORIZATION,AVG,BEGIN,BETWEEN,BIT,BIT_LENGTH,BOTH,BY,CASCADE,CASCADED,CASE,CAST,CATALOG,CHAR,CHARACTER,CHAR_LENGTH,CHARACTER_LENGTH,CHECK,CLOSE,CLUSTER,COALESCE,COLLATE,COLLATION,COLUMN,COMMIT,CONNECT,CONNECTION,CONSTRAINT,CONSTRAINTS,CONTINUE,CONVERT,CORRESPONDING,COUNT,CREATE,CROSS,CURRENT,CURRENT_DATE,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_USER,CURSOR,DATE,DAY,DEALLOCATE,DEC,DECIMAL,DECLARE,DEFAULT,DEFERRABLE,DEFERRED,DELETE,DESC,DESCRIBE,DESCRIPTOR,DIAGNOSTICS,DISCONNECT,DISTINCT,DOMAIN,DOUBLE,DROP,ELSE,END,END-EXEC,ESCAPE,EXCEPT,EXCEPTION,EXEC,EXECUTE,EXISTS,EXTERNAL,EXTRACT,FALSE,FETCH,FIRST,FLOAT,FOR,FOREIGN,FOUND,FROM,FULL,GET,GLOBAL,GO,GOTO,GRANT,GROUP,HAVING,HOUR,IDENTITY,IMMEDIATE,IN,INDICATOR,INITIALLY,INNER,INPUT,INSENSITIVE,INSERT,INT,INTEGER,INTERSECT,INTERVAL,INTO,IS,ISOLATION,JOIN,KEY,LANGUAGE,LAST,LEADING,LEFT,LEVEL,LIKE,LOCAL,LOWER,MATCH,MAX,MIN,MINUTE,MODULE,MONTH,NAMES,NATIONAL,NATURAL,NCHA
R,NEXT,NO,NOT,NULL,NULLIF,NUMERIC,OCTET_LENGTH,OF,ON,ONLY,OPEN,OPTION,OR,ORDER,OUTER,OUTPUT,OVERLAPS,OVERWRITE,PAD,PARTIAL,PARTITION,POSITION,PRECISION,PREPARE,PRESERVE,PRIMARY,PRIOR,PRIVILEGES,PROCEDURE,PUBLIC,READ,REAL,REFERENCES,RELATIVE,RESTRICT,REVOKE,RIGHT,ROLLBACK,ROWS,SCHEMA,SCROLL,SECOND,SECTION,SELECT,SESSION,SESSION_USER,SET,SIZE,SMALLINT,SOME,SPACE,SQL,SQLCODE,SQLERROR,SQLSTATE,SUBSTRING,SUM,SYSTEM_USER,TABLE,TEMPORARY,THEN,TIME,TIMESTAMP,TIMEZONE_HOUR,TIMEZONE_MINUTE,TO,TRAILING,TRANSACTION,TRANSLATE,TRANSLATION,TRIM,TRUE,UNION,UNIQUE,UNKNOWN,UPDATE,UPPER,USAGE,USER,USING,VALUE,VALUES,VARCHAR,VARYING,VIEW,WHEN,WHENEVER,WHERE,WITH,WORK,WRITE,YEAR,ZONE,ADA,C,CATALOG_NAME,CHARACTER_SET_CATALOG,CHARACTER_SET_NAME,CHARACTER_SET_SCHEMA,CLASS_ORIGIN,COBOL,COLLATION_CATALOG,COLLATION_NAME,COLLATION_SCHEMA,COLUMN_NAME,COMMAND_FUNCTION,COMMITTED,CONDITION_NUMBER,CONNECTION_NAME,CONSTRAINT_CATALOG,CONSTRAINT_NAME,CONSTRAINT_SCHEMA,CURSOR_NAME,DATA,DATETIME_INTERVAL_CODE,DATETIME_I
NTERVAL_PRECISION,DYNAMIC_FUNCTION,FORTRAN,LENGTH,MESSAGE_LENGTH,MESSAGE_OCTET_LENGTH,MESSAGE_TEXT,MORE,MUMPS,NAME,NULLABLE,NUMBER,PASCAL,PLI,REPEATABLE,RETURNED_LENGTH,RETURNED_OCTET_LENGTH,RETURNED_SQLSTATE,ROW_COUNT,SCALE,SCHEMA_NAME,SERIALIZABLE,SERVER_NAME,SUBCLASS_ORIGIN,TABLE_NAME,TYPE,UNCOMMITTED,UNNAMED,LIMIT
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/src/main/resources/postgresql-native-driver-sql.keywords
----------------------------------------------------------------------
diff --git
a/postgresql/src/main/resources/postgresql-native-driver-sql.keywords
b/postgresql/src/main/resources/postgresql-native-driver-sql.keywords
deleted file mode 100644
index a857cbd..0000000
--- a/postgresql/src/main/resources/postgresql-native-driver-sql.keywords
+++ /dev/null
@@ -1 +0,0 @@
-A,ABORT,ABS,ABSENT,ABSOLUTE,ACCESS,ACCORDING,ACTION,ADA,ADD,ADMIN,AFTER,AGGREGATE,ALL,ALLOCATE,ALSO,ALTER,ALWAYS,ANALYSE,ANALYZE,AND,ANY,ARE,ARRAY,ARRAY_AGG,ARRAY_MAX_CARDINALITY,AS,ASC,ASENSITIVE,ASSERTION,ASSIGNMENT,ASYMMETRIC,AT,ATOMIC,ATTRIBUTE,ATTRIBUTES,AUTHORIZATION,AVG,BACKWARD,BASE64,BEFORE,BEGIN,BEGIN_FRAME,BEGIN_PARTITION,BERNOULLI,BETWEEN,BIGINT,BINARY,BIT,BIT_LENGTH,BLOB,BLOCKED,BOM,BOOLEAN,BOTH,BREADTH,BY,C,CACHE,CALL,CALLED,CARDINALITY,CASCADE,CASCADED,CASE,CAST,CATALOG,CATALOG_NAME,CEIL,CEILING,CHAIN,CHAR,CHARACTER,CHARACTERISTICS,CHARACTERS,CHARACTER_LENGTH,CHARACTER_SET_CATALOG,CHARACTER_SET_NAME,CHARACTER_SET_SCHEMA,CHAR_LENGTH,CHECK,CHECKPOINT,CLASS,CLASS_ORIGIN,CLOB,CLOSE,CLUSTER,COALESCE,COBOL,COLLATE,COLLATION,COLLATION_CATALOG,COLLATION_NAME,COLLATION_SCHEMA,COLLECT,COLUMN,COLUMNS,COLUMN_NAME,COMMAND_FUNCTION,COMMAND_FUNCTION_CODE,COMMENT,COMMENTS,COMMIT,COMMITTED,CONCURRENTLY,CONDITION,CONDITION_NUMBER,CONFIGURATION,CONNECT,CONNECTION,CONNECTION_NAME,CONSTRA
INT,CONSTRAINTS,CONSTRAINT_CATALOG,CONSTRAINT_NAME,CONSTRAINT_SCHEMA,CONSTRUCTOR,CONTAINS,CONTENT,CONTINUE,CONTROL,CONVERSION,CONVERT,COPY,CORR,CORRESPONDING,COST,COUNT,COVAR_POP,COVAR_SAMP,CREATE,CROSS,CSV,CUBE,CUME_DIST,CURRENT,CURRENT_CATALOG,CURRENT_DATE,CURRENT_DEFAULT_TRANSFORM_GROUP,CURRENT_PATH,CURRENT_ROLE,CURRENT_ROW,CURRENT_SCHEMA,CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_TRANSFORM_GROUP_FOR_TYPE,CURRENT_USER,CURSOR,CURSOR_NAME,CYCLE,DATA,DATABASE,DATALINK,DATE,DATETIME_INTERVAL_CODE,DATETIME_INTERVAL_PRECISION,DAY,DB,DEALLOCATE,DEC,DECIMAL,DECLARE,DEFAULT,DEFAULTS,DEFERRABLE,DEFERRED,DEFINED,DEFINER,DEGREE,DELETE,DELIMITER,DELIMITERS,DENSE_RANK,DEPTH,DEREF,DERIVED,DESC,DESCRIBE,DESCRIPTOR,DETERMINISTIC,DIAGNOSTICS,DICTIONARY,DISABLE,DISCARD,DISCONNECT,DISPATCH,DISTINCT,DLNEWCOPY,DLPREVIOUSCOPY,DLURLCOMPLETE,DLURLCOMPLETEONLY,DLURLCOMPLETEWRITE,DLURLPATH,DLURLPATHONLY,DLURLPATHWRITE,DLURLSCHEME,DLURLSERVER,DLVALUE,DO,DOCUMENT,DOMAIN,DOUBLE,DROP,DYNAMIC,DYNAMIC_FUNCTION,DYNAM
IC_FUNCTION_CODE,EACH,ELEMENT,ELSE,EMPTY,ENABLE,ENCODING,ENCRYPTED,END,END-EXEC,END_FRAME,END_PARTITION,ENFORCED,ENUM,EQUALS,ESCAPE,EVENT,EVERY,EXCEPT,EXCEPTION,EXCLUDE,EXCLUDING,EXCLUSIVE,EXEC,EXECUTE,EXISTS,EXP,EXPLAIN,EXPRESSION,EXTENSION,EXTERNAL,EXTRACT,FALSE,FAMILY,FETCH,FILE,FILTER,FINAL,FIRST,FIRST_VALUE,FLAG,FLOAT,FLOOR,FOLLOWING,FOR,FORCE,FOREIGN,FORTRAN,FORWARD,FOUND,FRAME_ROW,FREE,FREEZE,FROM,FS,FULL,FUNCTION,FUNCTIONS,FUSION,G,GENERAL,GENERATED,GET,GLOBAL,GO,GOTO,GRANT,GRANTED,GREATEST,GROUP,GROUPING,GROUPS,HANDLER,HAVING,HEADER,HEX,HIERARCHY,HOLD,HOUR,ID,IDENTITY,IF,IGNORE,ILIKE,IMMEDIATE,IMMEDIATELY,IMMUTABLE,IMPLEMENTATION,IMPLICIT,IMPORT,IN,INCLUDING,INCREMENT,INDENT,INDEX,INDEXES,INDICATOR,INHERIT,INHERITS,INITIALLY,INLINE,INNER,INOUT,INPUT,INSENSITIVE,INSERT,INSTANCE,INSTANTIABLE,INSTEAD,INT,INTEGER,INTEGRITY,INTERSECT,INTERSECTION,INTERVAL,INTO,INVOKER,IS,ISNULL,ISOLATION,JOIN,K,KEY,KEY_MEMBER,KEY_TYPE,LABEL,LAG,LANGUAGE,LARGE,LAST,LAST_VALUE,LATERAL,LC_COLLATE,L
C_CTYPE,LEAD,LEADING,LEAKPROOF,LEAST,LEFT,LENGTH,LEVEL,LIBRARY,LIKE,LIKE_REGEX,LIMIT,LINK,LISTEN,LN,LOAD,LOCAL,LOCALTIME,LOCALTIMESTAMP,LOCATION,LOCATOR,LOCK,LOWER,M,MAP,MAPPING,MATCH,MATCHED,MATERIALIZED,MAX,MAXVALUE,MAX_CARDINALITY,MEMBER,MERGE,MESSAGE_LENGTH,MESSAGE_OCTET_LENGTH,MESSAGE_TEXT,METHOD,MIN,MINUTE,MINVALUE,MOD,MODE,MODIFIES,MODULE,MONTH,MORE,MOVE,MULTISET,MUMPS,NAME,NAMES,NAMESPACE,NATIONAL,NATURAL,NCHAR,NCLOB,NESTING,NEW,NEXT,NFC,NFD,NFKC,NFKD,NIL,NO,NONE,NORMALIZE,NORMALIZED,NOT,NOTHING,NOTIFY,NOTNULL,NOWAIT,NTH_VALUE,NTILE,NULL,NULLABLE,NULLIF,NULLS,NUMBER,NUMERIC,OBJECT,OCCURRENCES_REGEX,OCTETS,OCTET_LENGTH,OF,OFF,OFFSET,OIDS,OLD,ON,ONLY,OPEN,OPERATOR,OPTION,OPTIONS,OR,ORDER,ORDERING,ORDINALITY,OTHERS,OUT,OUTER,OUTPUT,OVER,OVERLAPS,OVERLAY,OVERRIDING,OWNED,OWNER,P,PAD,PARAMETER,PARAMETER_MODE,PARAMETER_NAME,PARAMETER_ORDINAL_POSITION,PARAMETER_SPECIFIC_CATALOG,PARAMETER_SPECIFIC_NAME,PARAMETER_SPECIFIC_SCHEMA,PARSER,PARTIAL,PARTITION,PASCAL,PASSING,PASSTHROUGH,PAS
SWORD,PATH,PERCENT,PERCENTILE_CONT,PERCENTILE_DISC,PERCENT_RANK,PERIOD,PERMISSION,PLACING,PLANS,PLI,PORTION,POSITION,POSITION_REGEX,POWER,PRECEDES,PRECEDING,PRECISION,PREPARE,PREPARED,PRESERVE,PRIMARY,PRIOR,PRIVILEGES,PROCEDURAL,PROCEDURE,PROGRAM,PUBLIC,QUOTE,RANGE,RANK,READ,READS,REAL,REASSIGN,RECHECK,RECOVERY,RECURSIVE,REF,REFERENCES,REFERENCING,REFRESH,REGR_AVGX,REGR_AVGY,REGR_COUNT,REGR_INTERCEPT,REGR_R2,REGR_SLOPE,REGR_SXX,REGR_SXY,REGR_SYY,REINDEX,RELATIVE,RELEASE,RENAME,REPEATABLE,REPLACE,REPLICA,REQUIRING,RESET,RESPECT,RESTART,RESTORE,RESTRICT,RESULT,RETURN,RETURNED_CARDINALITY,RETURNED_LENGTH,RETURNED_OCTET_LENGTH,RETURNED_SQLSTATE,RETURNING,RETURNS,REVOKE,RIGHT,ROLE,ROLLBACK,ROLLUP,ROUTINE,ROUTINE_CATALOG,ROUTINE_NAME,ROUTINE_SCHEMA,ROW,ROWS,ROW_COUNT,ROW_NUMBER,RULE,SAVEPOINT,SCALE,SCHEMA,SCHEMA_NAME,SCOPE,SCOPE_CATALOG,SCOPE_NAME,SCOPE_SCHEMA,SCROLL,SEARCH,SECOND,SECTION,SECURITY,SELECT,SELECTIVE,SELF,SENSITIVE,SEQUENCE,SEQUENCES,SERIALIZABLE,SERVER,SERVER_NAME,SESSION,S
ESSION_USER,SET,SETOF,SETS,SHARE,SHOW,SIMILAR,SIMPLE,SIZE,SMALLINT,SNAPSHOT,SOME,SOURCE,SPACE,SPECIFIC,SPECIFICTYPE,SPECIFIC_NAME,SQL,SQLCODE,SQLERROR,SQLEXCEPTION,SQLSTATE,SQLWARNING,SQRT,STABLE,STANDALONE,START,STATE,STATEMENT,STATIC,STATISTICS,STDDEV_POP,STDDEV_SAMP,STDIN,STDOUT,STORAGE,STRICT,STRIP,STRUCTURE,STYLE,SUBCLASS_ORIGIN,SUBMULTISET,SUBSTRING,SUBSTRING_REGEX,SUCCEEDS,SUM,SYMMETRIC,SYSID,SYSTEM,SYSTEM_TIME,SYSTEM_USER,T,TABLE,TABLES,TABLESAMPLE,TABLESPACE,TABLE_NAME,TEMP,TEMPLATE,TEMPORARY,TEXT,THEN,TIES,TIME,TIMESTAMP,TIMEZONE_HOUR,TIMEZONE_MINUTE,TO,TOKEN,TOP_LEVEL_COUNT,TRAILING,TRANSACTION,TRANSACTIONS_COMMITTED,TRANSACTIONS_ROLLED_BACK,TRANSACTION_ACTIVE,TRANSFORM,TRANSFORMS,TRANSLATE,TRANSLATE_REGEX,TRANSLATION,TREAT,TRIGGER,TRIGGER_CATALOG,TRIGGER_NAME,TRIGGER_SCHEMA,TRIM,TRIM_ARRAY,TRUE,TRUNCATE,TRUSTED,TYPE,TYPES,UESCAPE,UNBOUNDED,UNCOMMITTED,UNDER,UNENCRYPTED,UNION,UNIQUE,UNKNOWN,UNLINK,UNLISTEN,UNLOGGED,UNNAMED,UNNEST,UNTIL,UNTYPED,UPDATE,UPPER,URI,USAGE,USER,
USER_DEFINED_TYPE_CATALOG,USER_DEFINED_TYPE_CODE,USER_DEFINED_TYPE_NAME,USER_DEFINED_TYPE_SCHEMA,USING,VACUUM,VALID,VALIDATE,VALIDATOR,VALUE,VALUES,VALUE_OF,VARBINARY,VARCHAR,VARIADIC,VARYING,VAR_POP,VAR_SAMP,VERBOSE,VERSION,VERSIONING,VIEW,VIEWS,VOLATILE,WHEN,WHENEVER,WHERE,WHITESPACE,WIDTH_BUCKET,WINDOW,WITH,WITHIN,WITHOUT,WORK,WRAPPER,WRITE,XML,XMLAGG,XMLATTRIBUTES,XMLBINARY,XMLCAST,XMLCOMMENT,XMLCONCAT,XMLDECLARATION,XMLDOCUMENT,XMLELEMENT,XMLEXISTS,XMLFOREST,XMLITERATE,XMLNAMESPACES,XMLPARSE,XMLPI,XMLQUERY,XMLROOT,XMLSCHEMA,XMLSERIALIZE,XMLTABLE,XMLTEXT,XMLVALIDATE,YEAR,YES,ZONE
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/src/test/java/org/apache/zeppelin/postgresql/PostgreSqlInterpreterTest.java
----------------------------------------------------------------------
diff --git
a/postgresql/src/test/java/org/apache/zeppelin/postgresql/PostgreSqlInterpreterTest.java
b/postgresql/src/test/java/org/apache/zeppelin/postgresql/PostgreSqlInterpreterTest.java
deleted file mode 100644
index 9c8eae1..0000000
---
a/postgresql/src/test/java/org/apache/zeppelin/postgresql/PostgreSqlInterpreterTest.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
- * agreements. See the NOTICE file distributed with this work for additional
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
- * or implied. See the License for the specific language governing permissions
and limitations under
- * the License.
- */
-package org.apache.zeppelin.postgresql;
-
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.DEFAULT_JDBC_DRIVER_NAME;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.DEFAULT_JDBC_URL;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.DEFAULT_JDBC_USER_NAME;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.DEFAULT_JDBC_USER_PASSWORD;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.DEFAULT_MAX_RESULT;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.POSTGRESQL_SERVER_DRIVER_NAME;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.POSTGRESQL_SERVER_MAX_RESULT;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.POSTGRESQL_SERVER_PASSWORD;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.POSTGRESQL_SERVER_URL;
-import static
org.apache.zeppelin.postgresql.PostgreSqlInterpreter.POSTGRESQL_SERVER_USER;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.mockito.Mockito.spy;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.sql.SQLException;
-import java.util.Properties;
-
-import org.apache.zeppelin.interpreter.InterpreterResult;
-import org.junit.Before;
-import org.junit.Test;
-
-import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter;
-import com.mockrunner.jdbc.StatementResultSetHandler;
-import com.mockrunner.mock.jdbc.MockConnection;
-import com.mockrunner.mock.jdbc.MockResultSet;
-
-/**
- * PostgreSQL interpreter unit tests
- */
-public class PostgreSqlInterpreterTest extends BasicJDBCTestCaseAdapter {
-
- private PostgreSqlInterpreter psqlInterpreter = null;
- private MockResultSet result = null;
-
- @Before
- public void beforeTest() {
- MockConnection connection = getJDBCMockObjectFactory().getMockConnection();
-
- StatementResultSetHandler statementHandler =
connection.getStatementResultSetHandler();
- result = statementHandler.createResultSet();
- statementHandler.prepareGlobalResultSet(result);
-
- Properties properties = new Properties();
- properties.put(POSTGRESQL_SERVER_DRIVER_NAME, DEFAULT_JDBC_DRIVER_NAME);
- properties.put(POSTGRESQL_SERVER_URL, DEFAULT_JDBC_URL);
- properties.put(POSTGRESQL_SERVER_USER, DEFAULT_JDBC_USER_NAME);
- properties.put(POSTGRESQL_SERVER_PASSWORD, DEFAULT_JDBC_USER_PASSWORD);
- properties.put(POSTGRESQL_SERVER_MAX_RESULT, DEFAULT_MAX_RESULT);
-
- psqlInterpreter = spy(new PostgreSqlInterpreter(properties));
- when(psqlInterpreter.getJdbcConnection()).thenReturn(connection);
- }
-
- @Test
- public void testOpenCommandIndempotency() throws SQLException {
- // Ensure that an attempt to open new connection will clean any remaining
connections
- psqlInterpreter.open();
- psqlInterpreter.open();
- psqlInterpreter.open();
-
- verify(psqlInterpreter, times(3)).open();
- verify(psqlInterpreter, times(3)).close();
- }
-
- @Test
- public void testDefaultProperties() throws SQLException {
-
- PostgreSqlInterpreter psqlInterpreter = new PostgreSqlInterpreter(new
Properties());
-
- assertEquals(DEFAULT_JDBC_DRIVER_NAME,
- psqlInterpreter.getProperty(POSTGRESQL_SERVER_DRIVER_NAME));
- assertEquals(DEFAULT_JDBC_URL,
psqlInterpreter.getProperty(POSTGRESQL_SERVER_URL));
- assertEquals(DEFAULT_JDBC_USER_NAME,
psqlInterpreter.getProperty(POSTGRESQL_SERVER_USER));
- assertEquals(DEFAULT_JDBC_USER_PASSWORD,
- psqlInterpreter.getProperty(POSTGRESQL_SERVER_PASSWORD));
- assertEquals(DEFAULT_MAX_RESULT,
psqlInterpreter.getProperty(POSTGRESQL_SERVER_MAX_RESULT));
- }
-
- @Test
- public void testConnectionClose() throws SQLException {
-
- PostgreSqlInterpreter psqlInterpreter = spy(new PostgreSqlInterpreter(new
Properties()));
-
- when(psqlInterpreter.getJdbcConnection()).thenReturn(
- getJDBCMockObjectFactory().getMockConnection());
-
- psqlInterpreter.close();
-
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- verifyConnectionClosed();
- }
-
- @Test
- public void testStatementCancel() throws SQLException {
-
- PostgreSqlInterpreter psqlInterpreter = spy(new PostgreSqlInterpreter(new
Properties()));
-
- when(psqlInterpreter.getJdbcConnection()).thenReturn(
- getJDBCMockObjectFactory().getMockConnection());
-
- psqlInterpreter.cancel(null);
-
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- assertFalse("Cancel operation should not close the connection",
psqlInterpreter
- .getJdbcConnection().isClosed());
- }
-
- @Test
- public void testNullColumnResult() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1000);
-
- String sqlQuery = "select * from t";
-
- result.addColumn("col1", new String[] {"val11", null});
- result.addColumn("col2", new String[] {null, "val22"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
- assertEquals("col1\tcol2\nval11\t\n\tval22\n",
interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testSelectQuery() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1000);
-
- String sqlQuery = "select * from t";
-
- result.addColumn("col1", new String[] {"val11", "val12"});
- result.addColumn("col2", new String[] {"val21", "val22"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
- assertEquals("col1\tcol2\nval11\tval21\nval12\tval22\n",
interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testSelectQueryMaxResult() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1);
-
- String sqlQuery = "select * from t";
-
- result.addColumn("col1", new String[] {"val11", "val12"});
- result.addColumn("col2", new String[] {"val21", "val22"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
- assertEquals("col1\tcol2\nval11\tval21\n", interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testSelectQueryWithSpecialCharacters() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1000);
-
- String sqlQuery = "select * from t";
-
- result.addColumn("co\tl1", new String[] {"val11", "va\tl1\n2"});
- result.addColumn("co\nl2", new String[] {"v\nal21", "val\t22"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
- assertEquals("co l1\tco l2\nval11\tv al21\nva l1 2\tval 22\n",
interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testExplainQuery() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1000);
-
- String sqlQuery = "explain select * from t";
-
- result.addColumn("col1", new String[] {"val11", "val12"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TEXT, interpreterResult.type());
- assertEquals("col1\nval11\nval12\n", interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testExplainQueryWithSpecialCharachters() throws SQLException {
-
- when(psqlInterpreter.getMaxResult()).thenReturn(1000);
-
- String sqlQuery = "explain select * from t";
-
- result.addColumn("co\tl\n1", new String[] {"va\nl11", "va\tl\n12"});
-
- InterpreterResult interpreterResult = psqlInterpreter.interpret(sqlQuery,
null);
-
- assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
- assertEquals(InterpreterResult.Type.TEXT, interpreterResult.type());
- assertEquals("co\tl\n1\nva\nl11\nva\tl\n12\n",
interpreterResult.message());
-
- verifySQLStatementExecuted(sqlQuery);
- verifyAllResultSetsClosed();
- verifyAllStatementsClosed();
- }
-
- @Test
- public void testAutoCompletion() throws SQLException {
- psqlInterpreter.open();
- assertEquals(1, psqlInterpreter.completion("SEL", 0).size());
- assertEquals("SELECT ", psqlInterpreter.completion("SEL",
0).iterator().next());
- assertEquals(0, psqlInterpreter.completion("SEL", 100).size());
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/postgresql/src/test/java/org/apache/zeppelin/postgresql/SqlCompleterTest.java
----------------------------------------------------------------------
diff --git
a/postgresql/src/test/java/org/apache/zeppelin/postgresql/SqlCompleterTest.java
b/postgresql/src/test/java/org/apache/zeppelin/postgresql/SqlCompleterTest.java
deleted file mode 100644
index 1244476..0000000
---
a/postgresql/src/test/java/org/apache/zeppelin/postgresql/SqlCompleterTest.java
+++ /dev/null
@@ -1,197 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
contributor license
- * agreements. See the NOTICE file distributed with this work for additional
information regarding
- * copyright ownership. The ASF licenses this file to you under the Apache
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express
- * or implied. See the License for the specific language governing permissions
and limitations under
- * the License.
- */
-package org.apache.zeppelin.postgresql;
-
-import static com.google.common.collect.Sets.newHashSet;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-import java.io.IOException;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.Set;
-
-import jline.console.completer.Completer;
-
-import org.junit.Before;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.base.Joiner;
-import com.google.common.collect.Sets;
-import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter;
-
-public class SqlCompleterTest extends BasicJDBCTestCaseAdapter {
-
- private Logger logger = LoggerFactory.getLogger(SqlCompleterTest.class);
-
- private final static Set<String> EMPTY = new HashSet<String>();
-
- private CompleterTester tester;
-
- private SqlCompleter sqlCompleter;
-
- @Before
- public void beforeTest() throws IOException, SQLException {
- Set<String> keywordsCompletions =
-
SqlCompleter.getSqlKeywordsCompletions(getJDBCMockObjectFactory().getMockConnection());
- Set<String> dataModelCompletions =
- SqlCompleter
-
.getDataModelMetadataCompletions(getJDBCMockObjectFactory().getMockConnection());
-
- sqlCompleter =
- new SqlCompleter(Sets.union(keywordsCompletions, dataModelCompletions),
- dataModelCompletions);
- tester = new CompleterTester(sqlCompleter);
- }
-
- @Test
- public void testAfterBufferEnd() {
- String buffer = "ORDER";
- // Up to 2 white spaces after the buffer end, the completer still uses the
last argument
- tester.buffer(buffer).from(0).to(buffer.length() +
1).expect(newHashSet("ORDER ")).test();
- // 2 white spaces or more behind the buffer end the completer returns
empty result
- tester.buffer(buffer).from(buffer.length() + 2).to(buffer.length() +
5).expect(EMPTY).test();
- }
-
- @Test
- public void testEdges() {
- String buffer = " ORDER ";
- tester.buffer(buffer).from(0).to(8).expect(newHashSet("ORDER ")).test();
- tester.buffer(buffer).from(9).to(15).expect(EMPTY).test();
- }
-
- @Test
- public void testMultipleWords() {
- String buffer = " SELE fro LIM";
- tester.buffer(buffer).from(0).to(6).expect(newHashSet("SELECT ")).test();
- tester.buffer(buffer).from(7).to(11).expect(newHashSet("from ")).test();
- tester.buffer(buffer).from(12).to(19).expect(newHashSet("LIMIT ")).test();
- tester.buffer(buffer).from(20).to(24).expect(EMPTY).test();
- }
-
- @Test
- public void testMultiLineBuffer() {
- String buffer = " \n SELE \n fro";
- tester.buffer(buffer).from(0).to(7).expect(newHashSet("SELECT ")).test();
- tester.buffer(buffer).from(8).to(14).expect(newHashSet("from ")).test();
- tester.buffer(buffer).from(15).to(17).expect(EMPTY).test();
- }
-
- @Test
- public void testMultipleCompletionSuggestions() {
- String buffer = " SU";
- tester.buffer(buffer).from(0).to(5).expect(newHashSet("SUBCLASS_ORIGIN",
"SUM", "SUBSTRING"))
- .test();
- tester.buffer(buffer).from(6).to(7).expect(EMPTY).test();
- }
-
- @Test
- public void testDotDelimiter() {
- String buffer = " order.select ";
- tester.buffer(buffer).from(4).to(7).expect(newHashSet("order ")).test();
- tester.buffer(buffer).from(8).to(15).expect(newHashSet("select ")).test();
- tester.buffer(buffer).from(16).to(17).expect(EMPTY).test();
- }
-
- @Test
- public void testSqlDelimiterCharacters() {
- assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("r.", 1));
- assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("SS;", 2));
- assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar(":", 0));
- assertTrue(sqlCompleter.getSqlDelimiter().isDelimiterChar("ttt,", 3));
- }
-
- public class CompleterTester {
-
- private Completer completer;
-
- private String buffer;
- private int fromCursor;
- private int toCursor;
- private Set<String> expectedCompletions;
-
- public CompleterTester(Completer completer) {
- this.completer = completer;
- }
-
- public CompleterTester buffer(String buffer) {
- this.buffer = buffer;
- return this;
- }
-
- public CompleterTester from(int fromCursor) {
- this.fromCursor = fromCursor;
- return this;
- }
-
- public CompleterTester to(int toCursor) {
- this.toCursor = toCursor;
- return this;
- }
-
- public CompleterTester expect(Set<String> expectedCompletions) {
- this.expectedCompletions = expectedCompletions;
- return this;
- }
-
- public void test() {
- for (int c = fromCursor; c <= toCursor; c++) {
- expectedCompletions(buffer, c, expectedCompletions);
- }
- }
-
- private void expectedCompletions(String buffer, int cursor, Set<String>
expected) {
-
- ArrayList<CharSequence> candidates = new ArrayList<CharSequence>();
-
- completer.complete(buffer, cursor, candidates);
-
- String explain = explain(buffer, cursor, candidates);
-
- logger.info(explain);
-
- assertEquals("Buffer [" + buffer.replace(" ", ".") + "] and Cursor[" +
cursor + "] "
- + explain, expected, newHashSet(candidates));
- }
-
- private String explain(String buffer, int cursor, ArrayList<CharSequence>
candidates) {
- StringBuffer sb = new StringBuffer();
-
- for (int i = 0; i <= Math.max(cursor, buffer.length()); i++) {
- if (i == cursor) {
- sb.append("(");
- }
- if (i >= buffer.length()) {
- sb.append("_");
- } else {
- if (Character.isWhitespace(buffer.charAt(i))) {
- sb.append(".");
- } else {
- sb.append(buffer.charAt(i));
- }
- }
- if (i == cursor) {
- sb.append(")");
- }
- }
- sb.append(" >> [").append(Joiner.on(",").join(candidates)).append("]");
-
- return sb.toString();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/404846f9/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
----------------------------------------------------------------------
diff --git
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
index edcf513..67bc86f 100755
---
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
+++
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/conf/ZeppelinConfiguration.java
@@ -409,10 +409,10 @@ public class ZeppelinConfiguration extends
XMLConfiguration {
+ "org.apache.zeppelin.lens.LensInterpreter,"
+ "org.apache.zeppelin.cassandra.CassandraInterpreter,"
+ "org.apache.zeppelin.geode.GeodeOqlInterpreter,"
- + "org.apache.zeppelin.postgresql.PostgreSqlInterpreter,"
+ "org.apache.zeppelin.kylin.KylinInterpreter,"
+ "org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter,"
- + "org.apache.zeppelin.scalding.ScaldingInterpreter"),
+ + "org.apache.zeppelin.scalding.ScaldingInterpreter,"
+ + "org.apache.zeppelin.jdbc.JDBCInterpreter"),
ZEPPELIN_INTERPRETER_DIR("zeppelin.interpreter.dir", "interpreter"),
ZEPPELIN_INTERPRETER_CONNECT_TIMEOUT("zeppelin.interpreter.connect.timeout",
30000),
ZEPPELIN_ENCODING("zeppelin.encoding", "UTF-8"),