Github user karanmehta93 commented on a diff in the pull request:

    https://github.com/apache/phoenix/pull/283#discussion_r153582196
  
    --- Diff: 
phoenix-core/src/it/java/org/apache/phoenix/end2end/BasePermissionsIT.java ---
    @@ -0,0 +1,635 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to you under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.phoenix.end2end;
    +
    +import com.google.common.base.Throwables;
    +import org.apache.commons.logging.Log;
    +import org.apache.commons.logging.LogFactory;
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.hbase.AuthUtil;
    +import org.apache.hadoop.hbase.HBaseTestingUtility;
    +import org.apache.hadoop.hbase.HConstants;
    +import org.apache.hadoop.hbase.TableName;
    +import org.apache.hadoop.hbase.security.AccessDeniedException;
    +import org.apache.hadoop.hbase.security.User;
    +import org.apache.hadoop.hbase.security.access.AccessControlClient;
    +import org.apache.hadoop.hbase.security.access.Permission;
    +import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
    +import org.apache.phoenix.query.BaseTest;
    +import org.apache.phoenix.query.QueryConstants;
    +import org.apache.phoenix.query.QueryServices;
    +import org.junit.After;
    +import org.junit.BeforeClass;
    +import org.junit.runner.RunWith;
    +import org.junit.runners.Parameterized;
    +
    +import java.io.IOException;
    +import java.lang.reflect.UndeclaredThrowableException;
    +import java.security.PrivilegedExceptionAction;
    +import java.sql.Connection;
    +import java.sql.DriverManager;
    +import java.sql.PreparedStatement;
    +import java.sql.ResultSet;
    +import java.sql.SQLException;
    +import java.sql.Statement;
    +import java.util.Arrays;
    +import java.util.Collection;
    +import java.util.Collections;
    +import java.util.HashSet;
    +import java.util.List;
    +import java.util.Properties;
    +import java.util.Set;
    +
    +import static org.junit.Assert.assertEquals;
    +import static org.junit.Assert.assertFalse;
    +import static org.junit.Assert.assertNotNull;
    +import static org.junit.Assert.assertTrue;
    +import static org.junit.Assert.fail;
    +
    +@RunWith(Parameterized.class)
    +public class BasePermissionsIT extends BaseTest {
    +
    +    private static final Log LOG = 
LogFactory.getLog(BasePermissionsIT.class);
    +
    +    static String SUPERUSER;
    +
    +    static HBaseTestingUtility testUtil;
    +    static final Set<String> PHOENIX_SYSTEM_TABLES = new 
HashSet<>(Arrays.asList(
    +            "SYSTEM.CATALOG", "SYSTEM.SEQUENCE", "SYSTEM.STATS", 
"SYSTEM.FUNCTION"));
    +
    +    static final Set<String> PHOENIX_SYSTEM_TABLES_IDENTIFIERS = new 
HashSet<>(Arrays.asList(
    +            "SYSTEM.\"CATALOG\"", "SYSTEM.\"SEQUENCE\"", 
"SYSTEM.\"STATS\"", "SYSTEM.\"FUNCTION\""));
    +
    +    static final String SYSTEM_SEQUENCE_IDENTIFIER =
    +            QueryConstants.SYSTEM_SCHEMA_NAME + "." + "\"" + 
PhoenixDatabaseMetaData.SYSTEM_SEQUENCE_TABLE+ "\"";
    +
    +    static final Set<String> PHOENIX_NAMESPACE_MAPPED_SYSTEM_TABLES = new 
HashSet<>(Arrays.asList(
    +            "SYSTEM:CATALOG", "SYSTEM:SEQUENCE", "SYSTEM:STATS", 
"SYSTEM:FUNCTION"));
    +
    +    // Create Multiple users so that we can use Hadoop UGI to run tasks as 
various users
    +    // Permissions can be granted or revoke by superusers and admins only
    +    // DON'T USE HADOOP UserGroupInformation class to create testing users 
since HBase misses some of its functionality
    +    // Instead use org.apache.hadoop.hbase.security.User class for testing 
purposes.
    +
    +    // Super User has all the access
    +    User superUser1 = null;
    +    User superUser2 = null;
    +
    +    // Regular users are granted and revoked permissions as needed
    +    User regularUser1 = null;
    +    User regularUser2 = null;
    +    User regularUser3 = null;
    +    User regularUser4 = null;
    +
    +    // Group User is equivalent of regular user but inside a group
    +    // Permissions can be granted to group should affect this user
    +    static final String GROUP_SYSTEM_ACCESS = "group_system_access";
    +    User groupUser = null;
    +
    +    // Unpriviledged User doesn't have any access and is denied for every 
action
    +    User unprivilegedUser = null;
    +
    +    static final int NUM_RECORDS = 5;
    +
    +    boolean isNamespaceMapped;
    +
    +    public BasePermissionsIT(final boolean isNamespaceMapped) throws 
Exception {
    +        this.isNamespaceMapped = isNamespaceMapped;
    +    }
    +
    +    @BeforeClass
    +    public static void doSetup() throws Exception {
    +        SUPERUSER = System.getProperty("user.name");
    +    }
    +
    +    void startNewMiniCluster() throws Exception {
    +        startNewMiniCluster(new Configuration());
    +    }
    +    
    +    void startNewMiniCluster(Configuration overrideConf) throws Exception{
    +        if (null != testUtil) {
    +            testUtil.shutdownMiniCluster();
    +            testUtil = null;
    +        }
    +
    +        testUtil = new HBaseTestingUtility();
    +
    +        Configuration config = testUtil.getConfiguration();
    +        enablePhoenixHBaseAuthorization(config);
    +        configureNamespacesOnServer(config);
    +        configureRandomHMasterPort(config);
    +        if (overrideConf != null) {
    +            config.addResource(overrideConf);
    +        }
    +
    +        testUtil.startMiniCluster(1);
    +        initializeUsers(testUtil.getConfiguration());
    +    }
    +
    +    private void initializeUsers(Configuration configuration) {
    +
    +        superUser1 = User.createUserForTesting(configuration, SUPERUSER, 
new String[0]);
    +        superUser2 = User.createUserForTesting(configuration, 
"superUser2", new String[0]);
    +
    +        regularUser1 = User.createUserForTesting(configuration, 
"regularUser1", new String[0]);
    +        regularUser2 = User.createUserForTesting(configuration, 
"regularUser2", new String[0]);
    +        regularUser3 = User.createUserForTesting(configuration, 
"regularUser3", new String[0]);
    +        regularUser4 = User.createUserForTesting(configuration, 
"regularUser4", new String[0]);
    +
    +        groupUser = User.createUserForTesting(testUtil.getConfiguration(), 
"groupUser", new String[] {GROUP_SYSTEM_ACCESS});
    +
    +        unprivilegedUser = User.createUserForTesting(configuration, 
"unprivilegedUser", new String[0]);
    +    }
    +
    +    private void configureRandomHMasterPort(Configuration config) {
    +        // Avoid multiple clusters trying to bind the master's info port 
(16010)
    +        config.setInt(HConstants.MASTER_INFO_PORT, -1);
    +    }
    +
    +    void enablePhoenixHBaseAuthorization(Configuration config) {
    +        config.set("hbase.superuser", SUPERUSER + "," + "superUser2");
    +        config.set("hbase.security.authorization", 
Boolean.TRUE.toString());
    +        config.set("hbase.security.exec.permission.checks", 
Boolean.TRUE.toString());
    +        config.set("hbase.coprocessor.master.classes",
    +                
"org.apache.hadoop.hbase.security.access.AccessController");
    +        config.set("hbase.coprocessor.region.classes",
    +                
"org.apache.hadoop.hbase.security.access.AccessController");
    +        config.set("hbase.coprocessor.regionserver.classes",
    +                
"org.apache.hadoop.hbase.security.access.AccessController");
    +
    +        config.set(QueryServices.PHOENIX_ACLS_ENABLED,"true");
    +
    +        config.set("hbase.regionserver.wal.codec", 
"org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec");
    +    }
    +
    +    void configureNamespacesOnServer(Configuration conf) {
    +        conf.set(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, 
Boolean.toString(isNamespaceMapped));
    +    }
    +
    +    @Parameterized.Parameters(name = "isNamespaceMapped={0}") // name is 
used by failsafe as file name in reports
    +    public static Collection<Boolean> data() {
    +        return Arrays.asList(false, true);
    +    }
    +
    +    @After
    +    public void cleanup() throws Exception {
    +        if (testUtil != null) {
    +            testUtil.shutdownMiniCluster();
    +            testUtil = null;
    +        }
    +    }
    +
    +    public static HBaseTestingUtility getUtility(){
    +        return testUtil;
    +    }
    +
    --- End diff --
    
    As discussed yesterday, I am still keeping the old tests to use HBase API 
directly and my tests will use Phoenix GRANT/REVOKE. The old tests verify if a 
connection can be created and used with minimal permissions and it is good to 
have them.


---

Reply via email to