Repository: phoenix Updated Branches: refs/heads/4.x-HBase-0.98 618213be6 -> 8e309c2b4
PHOENIX-2030 CsvBulkLoadTool should use logical name of the table for output directory suffix(Rajeshbabu) Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/8e309c2b Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/8e309c2b Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/8e309c2b Branch: refs/heads/4.x-HBase-0.98 Commit: 8e309c2b4cd4bb8a2689fbaed9f9c7b348cbae31 Parents: 618213b Author: Rajeshbabu Chintaguntla <rajeshb...@apache.org> Authored: Thu Jun 25 01:17:49 2015 +0530 Committer: Rajeshbabu Chintaguntla <rajeshb...@apache.org> Committed: Thu Jun 25 01:17:49 2015 +0530 ---------------------------------------------------------------------- .../phoenix/mapreduce/CsvBulkLoadToolIT.java | 19 +++++++++++++++++++ .../phoenix/mapreduce/CsvBulkLoadTool.java | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e309c2b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java index 392395d..6bcc221 100644 --- a/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java +++ b/phoenix-core/src/it/java/org/apache/phoenix/mapreduce/CsvBulkLoadToolIT.java @@ -39,6 +39,7 @@ import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest; import org.apache.phoenix.jdbc.PhoenixDriver; import org.apache.phoenix.util.DateUtil; import org.apache.phoenix.util.PhoenixRuntime; +import org.apache.phoenix.util.QueryUtil; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -206,6 +207,8 @@ public class CsvBulkLoadToolIT { String ddl = "CREATE LOCAL INDEX TABLE6_IDX ON TABLE6 " + " (FIRST_NAME ASC)"; stmt.execute(ddl); + ddl = "CREATE LOCAL INDEX TABLE6_IDX2 ON TABLE6 " + " (LAST_NAME ASC)"; + stmt.execute(ddl); FileSystem fs = FileSystem.get(hbaseTestUtil.getConfiguration()); FSDataOutputStream outputStream = fs.create(new Path("/tmp/input3.csv")); @@ -228,6 +231,22 @@ public class CsvBulkLoadToolIT { assertEquals("FirstName 2", rs.getString(2)); rs.close(); + rs = + stmt.executeQuery("EXPLAIN SELECT id, FIRST_NAME FROM TABLE6 where first_name='FirstName 2'"); + assertEquals( + "CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 [-32768,'FirstName 2']\n" + + " SERVER FILTER BY FIRST KEY ONLY", QueryUtil.getExplainPlan(rs)); + rs.close(); + rs = stmt.executeQuery("SELECT id, LAST_NAME FROM TABLE6 where last_name='LastName 2'"); + assertTrue(rs.next()); + assertEquals(2, rs.getInt(1)); + assertEquals("LastName 2", rs.getString(2)); + rs.close(); + rs = + stmt.executeQuery("EXPLAIN SELECT id, LAST_NAME FROM TABLE6 where last_name='LastName 2'"); + assertEquals( + "CLIENT 1-CHUNK PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_TABLE6 [-32767,'LastName 2']\n" + + " SERVER FILTER BY FIRST KEY ONLY", QueryUtil.getExplainPlan(rs)); stmt.close(); } http://git-wip-us.apache.org/repos/asf/phoenix/blob/8e309c2b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java ---------------------------------------------------------------------- diff --git a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java index 9e27bac..5270277 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/mapreduce/CsvBulkLoadTool.java @@ -265,7 +265,7 @@ public class CsvBulkLoadTool extends Configured implements Tool { JobManager.createThreadPoolExec(Integer.MAX_VALUE, 5, 20, useInstrumentedPool); try{ for (TargetTableRef table : tablesToBeLoaded) { - Path tablePath = new Path(outputPath, table.getPhysicalName()); + Path tablePath = new Path(outputPath, table.getLogicalName()); Configuration jobConf = new Configuration(conf); jobConf.set(CsvToKeyValueMapper.TABLE_NAME_CONFKEY, qualifiedTableName); if (qualifiedTableName.compareToIgnoreCase(table.getLogicalName()) != 0) {