Github user denalex commented on a diff in the pull request:
https://github.com/apache/hawq/pull/1353#discussion_r216064863
--- Diff:
pxf/pxf-jdbc/src/main/java/org/apache/hawq/pxf/plugins/jdbc/JdbcPlugin.java ---
@@ -19,95 +19,207 @@
* under the License.
*/
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.hawq.pxf.api.UserDataException;
+import org.apache.hawq.pxf.api.utilities.ColumnDescriptor;
import org.apache.hawq.pxf.api.utilities.InputData;
import org.apache.hawq.pxf.api.utilities.Plugin;
-import java.sql.*;
+import java.util.ArrayList;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.sql.SQLTimeoutException;
+import java.sql.Statement;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
/**
- * This class resolves the jdbc connection parameter and manages the
opening and closing of the jdbc connection.
- * Implemented subclasses: {@link JdbcReadAccessor}.
+ * JDBC tables plugin (base class)
*
+ * Implemented subclasses: {@link JdbcAccessor}, {@link JdbcResolver}.
*/
public class JdbcPlugin extends Plugin {
- private static final Log LOG = LogFactory.getLog(JdbcPlugin.class);
-
- //jdbc connection parameters
- protected String jdbcDriver = null;
- protected String dbUrl = null;
- protected String user = null;
- protected String pass = null;
- protected String tblName = null;
- protected int batchSize = 100;
-
- //jdbc connection
- protected Connection dbConn = null;
- //database type, from DatabaseMetaData.getDatabaseProductName()
- protected String dbProduct = null;
-
/**
- * parse input data
+ * Class constructor
+ *
+ * @param input {@link InputData} provided by PXF
*
- * @param input the input data
- * @throws UserDataException if the request parameter is malformed
+ * @throws UserDataException if one of the required request parameters
is not set
*/
public JdbcPlugin(InputData input) throws UserDataException {
super(input);
+
jdbcDriver = input.getUserProperty("JDBC_DRIVER");
+ if (jdbcDriver == null) {
+ throw new UserDataException("JDBC_DRIVER is a required
parameter");
+ }
+
dbUrl = input.getUserProperty("DB_URL");
- user = input.getUserProperty("USER");
- pass = input.getUserProperty("PASS");
- String strBatch = input.getUserProperty("BATCH_SIZE");
- if (strBatch != null) {
- batchSize = Integer.parseInt(strBatch);
+ if (dbUrl == null) {
+ throw new UserDataException("DB_URL is a required parameter");
}
- if (jdbcDriver == null) {
- throw new UserDataException("JDBC_DRIVER must be set");
+ tableName = input.getDataSource();
+ if (tableName == null) {
+ throw new UserDataException("Data source must be provided");
}
- if (dbUrl == null) {
- throw new UserDataException("DB_URL must be set(read)");
+ /*
+ At the moment, when writing into some table, the table name is
+ concatenated with a special string that is necessary to write into
HDFS.
+ However, a raw table name is necessary in case of JDBC.
+ The correct table name is extracted here.
+ */
+ Matcher matcher = tableNamePattern.matcher(tableName);
+ if (matcher.matches()) {
+ inputData.setDataSource(matcher.group(1));
+ tableName = input.getDataSource();
}
- tblName = input.getDataSource();
- if (tblName == null) {
- throw new UserDataException("TABLE_NAME must be set as
DataSource.");
+ columns = inputData.getTupleDescription();
+ if (columns == null) {
+ throw new UserDataException("Tuple description must be
provided");
+ }
+
+ // This parameter is not required. The default value is null
+ user = input.getUserProperty("USER");
+ if (user != null) {
+ pass = input.getUserProperty("PASS");
+ }
+
+ // This parameter is not required. The default value is 0
+ String batchSizeRaw = input.getUserProperty("BATCH_SIZE");
+ if (batchSizeRaw != null) {
+ try {
+ batchSize = Integer.parseUnsignedInt(batchSizeRaw);
--- End diff --
this breaks compilation on JDK 1.7, since the API is only available in 1.8.
I will make a trivial change to fix this before merging.
---