[ 
https://issues.apache.org/jira/browse/DRILL-8061?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17480568#comment-17480568
 ] 

ASF GitHub Bot commented on DRILL-8061:
---------------------------------------

luocooong commented on a change in pull request #2422:
URL: https://github.com/apache/drill/pull/2422#discussion_r790244115



##########
File path: 
contrib/storage-phoenix/src/test/java/org/apache/drill/exec/store/phoenix/secured/QueryServerEnvironment.java
##########
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store.phoenix.secured;
+
+import static org.apache.hadoop.hbase.HConstants.HBASE_DIR;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.net.InetAddress;
+import java.security.PrivilegedAction;
+import java.util.AbstractMap;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.LocalHBaseCluster;
+import org.apache.hadoop.hbase.security.HBaseKerberosUtils;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.minikdc.MiniKdc;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.util.KerberosName;
+import org.apache.phoenix.query.ConfigurationFactory;
+import org.apache.phoenix.queryserver.QueryServerProperties;
+import org.apache.phoenix.queryserver.server.QueryServer;
+import org.apache.phoenix.util.InstanceResolver;
+import org.apache.phoenix.util.ThinClientUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Due to this bug https://bugzilla.redhat.com/show_bug.cgi?id=668830 We need 
to use
+ * `localhost.localdomain` as host name when running these tests on Jenkins 
(Centos) but for Mac OS
+ * it should be `localhost` to pass. The reason is kerberos principals in this 
tests are looked up
+ * from /etc/hosts and a reverse DNS lookup of 127.0.0.1 is resolved to 
`localhost.localdomain`
+ * rather than `localhost` on Centos. KDC sees `localhost` != 
`localhost.localdomain` and as the
+ * result test fails with authentication error. It's also important to note 
these principals are
+ * shared between HDFs and HBase in this mini HBase cluster. Some more reading
+ * https://access.redhat.com/solutions/57330
+ */
+public class QueryServerEnvironment {
+  private static final Logger LOG = 
LoggerFactory.getLogger(QueryServerEnvironment.class);
+
+  private final File TEMP_DIR = new File(getTempDir());
+  private final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
+  private final List<File> USER_KEYTAB_FILES = new ArrayList<>();
+
+  private static final String LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
+  static final String LOGIN_USER;
+
+  static {
+    try {
+       System.setProperty("sun.security.krb5.debug", "true");
+      LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME = 
InetAddress.getByName("127.0.0.1").getCanonicalHostName();
+      String userName = System.getProperty("user.name");
+      LOGIN_USER = userName != null ? userName : "securecluster";
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private static final String SPNEGO_PRINCIPAL = "HTTP/" + 
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
+  private static final String PQS_PRINCIPAL = "phoenixqs/" + 
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
+  private static final String SERVICE_PRINCIPAL = LOGIN_USER + "/" + 
LOCAL_HOST_REVERSE_DNS_LOOKUP_NAME;
+  private File KEYTAB;
+
+  private MiniKdc KDC;
+  private HBaseTestingUtility UTIL = new HBaseTestingUtility();
+  private LocalHBaseCluster HBASE_CLUSTER;
+  private int NUM_CREATED_USERS;
+
+  private ExecutorService PQS_EXECUTOR;
+  private QueryServer PQS;
+  private int PQS_PORT;
+  private String PQS_URL;
+
+  private boolean tls;
+
+  private static String getTempDir() {
+    StringBuilder sb = new StringBuilder(32);
+    sb.append(System.getProperty("user.dir")).append(File.separator);
+    sb.append("target").append(File.separator);
+    sb.append(QueryServerEnvironment.class.getSimpleName());
+    sb.append("-").append(UUID.randomUUID());
+    return sb.toString();
+  }
+
+  public int getPqsPort() {
+    return PQS_PORT;
+  }
+
+  public String getPqsUrl() {
+    return PQS_URL;
+  }
+
+  public boolean getTls() {
+    return tls;
+  }
+
+  public HBaseTestingUtility getUtil() {
+    return UTIL;
+  }
+
+  public String getServicePrincipal() {
+    return SERVICE_PRINCIPAL;
+  }
+
+  public File getServiceKeytab() {
+    return KEYTAB;
+  }
+
+  private static void updateDefaultRealm() throws Exception {
+    // (at least) one other phoenix test triggers the caching of this field 
before the KDC is up
+    // which causes principal parsing to fail.
+    Field f = KerberosName.class.getDeclaredField("defaultRealm");
+    f.setAccessible(true);
+    // Default realm for MiniKDC
+    f.set(null, "EXAMPLE.COM");
+  }
+
+  private void createUsers(int numUsers) throws Exception {
+    assertNotNull("KDC is null, was setup method called?", KDC);
+    NUM_CREATED_USERS = numUsers;
+    for (int i = 1; i <= numUsers; i++) {
+      String principal = "user" + i;
+      File keytabFile = new File(KEYTAB_DIR, principal + ".keytab");
+      KDC.createPrincipal(keytabFile, principal);
+      USER_KEYTAB_FILES.add(keytabFile);
+    }
+  }
+
+  public Map.Entry<String, File> getUser(int offset) {
+    if (!(offset > 0 && offset <= NUM_CREATED_USERS)) {
+      throw new IllegalArgumentException();
+    }
+    return new AbstractMap.SimpleImmutableEntry<String, File>("user" + offset, 
USER_KEYTAB_FILES.get(offset - 1));
+  }
+
+  /**
+   * Setup the security configuration for hdfs.
+   */
+  private void setHdfsSecuredConfiguration(Configuration conf) throws 
Exception {
+    // Set principal+keytab configuration for HDFS
+    conf.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY,
+      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
+    conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
KEYTAB.getAbsolutePath());
+    conf.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY,
+      SERVICE_PRINCIPAL + "@" + KDC.getRealm());
+    conf.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
KEYTAB.getAbsolutePath());
+    conf.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+      SPNEGO_PRINCIPAL + "@" + KDC.getRealm());
+    // Enable token access for HDFS blocks
+    conf.setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    // Only use HTTPS (required because we aren't using "secure" ports)
+    conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
+    // Bind on localhost for spnego to have a chance at working
+    conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    conf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
+
+    // Generate SSL certs
+    File keystoresDir = new 
File(UTIL.getDataTestDir("keystore").toUri().getPath());
+    keystoresDir.mkdirs();
+    String sslConfDir = TlsUtil.getClasspathDir(QueryServerEnvironment.class);
+    TlsUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir, conf, 
false);
+
+    // Magic flag to tell hdfs to not fail on using ports above 1024
+    conf.setBoolean("ignore.secure.ports.for.testing", true);
+  }
+
+  private static void ensureIsEmptyDirectory(File f) throws IOException {
+    if (f.exists()) {
+      if (f.isDirectory()) {
+        FileUtils.deleteDirectory(f);
+      } else {
+        assertTrue("Failed to delete keytab directory", f.delete());
+      }
+    }
+    assertTrue("Failed to create keytab directory", f.mkdirs());
+  }
+
+  /**
+   * Setup and start kerberosed, hbase
+   * @throws Exception
+   */
+  public QueryServerEnvironment(final Configuration confIn, int numberOfUsers, 
boolean tls)

Review comment:
       I have an idea, could we add comments to the heads of all copy of 
classes? for example :
   ```java
   public class QueryServerEnvironment { // This is a copy of {@link 
org.apache.phoenix.end2end.XXX}
   ```
   This can answer the same questions for new developers, thank you.
   ```
   TlsUtil.java
   QueryServerEnvironment.java
   HttpParamImpersonationQueryServerIT.java
   ...
   ```




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: dev-unsubscr...@drill.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


> Add Impersonation Support for Phoenix
> -------------------------------------
>
>                 Key: DRILL-8061
>                 URL: https://issues.apache.org/jira/browse/DRILL-8061
>             Project: Apache Drill
>          Issue Type: Sub-task
>          Components: Storage - Other
>            Reporter: Cong Luo
>            Assignee: Vitalii Diravka
>            Priority: Major
>             Fix For: 1.20.0
>
>
> *Discussion:*
>  * [https://github.com/apache/drill/issues/2296]
> *Documentation:*
>  * [https://phoenix.apache.org/server.html#Impersonation] 
>  * [https://drill.apache.org/docs/configuring-user-impersonation]



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

Reply via email to