[07/10] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-19 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/c53d9ada/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 34292ba..fdfd75b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-while (rs.next()) {
-

[07/10] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-19 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/d56fd3c9/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 72dd26f..558b92e 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-  

[07/10] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-19 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/3987c123/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 34292ba..fdfd75b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-while (rs.next()) {
-

[07/10] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-19 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/93fdd5ba/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 34292ba..fdfd75b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-while (rs.next()) {
-

[07/10] phoenix git commit: PHOENIX-3534 Support multi region SYSTEM.CATALOG table (Thomas D'Silva and Rahul Gidwani)

2018-07-19 Thread tdsilva
http://git-wip-us.apache.org/repos/asf/phoenix/blob/4d6dbf9c/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
--
diff --git a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
index 34292ba..fdfd75b 100644
--- a/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
+++ b/phoenix-core/src/it/java/org/apache/phoenix/end2end/ViewIT.java
@@ -28,172 +28,119 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.io.IOException;
+import java.math.BigDecimal;
 import java.sql.Connection;
 import java.sql.DriverManager;
 import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.hadoop.hbase.DoNotRetryIOException;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.SimpleRegionObserver;
+import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.Pair;
 import org.apache.phoenix.compile.QueryPlan;
+import org.apache.phoenix.exception.PhoenixIOException;
 import org.apache.phoenix.exception.SQLExceptionCode;
 import org.apache.phoenix.jdbc.PhoenixConnection;
+import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
+import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.query.KeyRange;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.schema.ColumnAlreadyExistsException;
+import org.apache.phoenix.schema.PColumn;
+import org.apache.phoenix.schema.PTable;
+import org.apache.phoenix.schema.PTableType;
 import org.apache.phoenix.schema.ReadOnlyTableException;
 import org.apache.phoenix.schema.TableNotFoundException;
+import org.apache.phoenix.util.MetaDataUtil;
 import org.apache.phoenix.util.PhoenixRuntime;
 import org.apache.phoenix.util.QueryUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
 import org.apache.phoenix.util.SchemaUtil;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.BeforeClass;
 import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
+import com.google.common.base.Predicate;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
 
+@RunWith(Parameterized.class)
+public class ViewIT extends SplitSystemCatalogIT {
 
-public class ViewIT extends BaseViewIT {
-   
-public ViewIT(boolean transactional) {
-   super(transactional);
-   }
-
-@Test
-public void testReadOnlyOnReadOnlyView() throws Exception {
-Connection earlierCon = DriverManager.getConnection(getUrl());
-Connection conn = DriverManager.getConnection(getUrl());
-String ddl = "CREATE TABLE " + fullTableName + " (k INTEGER NOT NULL 
PRIMARY KEY, v1 DATE) "+ tableDDLOptions;
-conn.createStatement().execute(ddl);
-String fullParentViewName = "V_" + generateUniqueName();
-ddl = "CREATE VIEW " + fullParentViewName + " (v2 VARCHAR) AS SELECT * 
FROM " + fullTableName + " WHERE k > 5";
-conn.createStatement().execute(ddl);
-try {
-conn.createStatement().execute("UPSERT INTO " + fullParentViewName 
+ " VALUES(1)");
-fail();
-} catch (ReadOnlyTableException e) {
-
-}
-for (int i = 0; i < 10; i++) {
-conn.createStatement().execute("UPSERT INTO " + fullTableName + " 
VALUES(" + i + ")");
-}
-conn.commit();
-
-analyzeTable(conn, fullParentViewName, transactional);
-
-List splits = getAllSplits(conn, fullParentViewName);
-assertEquals(4, splits.size());
-
-int count = 0;
-ResultSet rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullTableName);
-while (rs.next()) {
-assertEquals(count++, rs.getInt(1));
-}
-assertEquals(10, count);
-
-count = 0;
-rs = conn.createStatement().executeQuery("SELECT k FROM " + 
fullParentViewName);
-while (rs.next()) {
-