Github user sohami commented on a diff in the pull request:

    https://github.com/apache/drill/pull/950#discussion_r140587424
  
    --- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/ssl/SSLConfig.java ---
    @@ -0,0 +1,325 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.drill.exec.ssl;
    +
    +import com.google.common.base.Preconditions;
    +import io.netty.handler.ssl.SslContext;
    +import io.netty.handler.ssl.SslProvider;
    +import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
    +import org.apache.drill.common.config.DrillConfig;
    +import org.apache.drill.common.exceptions.DrillException;
    +import org.apache.drill.exec.ExecConstants;
    +import org.apache.drill.exec.memory.BufferAllocator;
    +import org.apache.hadoop.conf.Configuration;
    +import org.apache.hadoop.security.ssl.SSLFactory;
    +
    +import javax.net.ssl.KeyManagerFactory;
    +import javax.net.ssl.SSLContext;
    +import javax.net.ssl.SSLEngine;
    +import javax.net.ssl.TrustManagerFactory;
    +import java.io.FileInputStream;
    +import java.io.InputStream;
    +import java.security.KeyStore;
    +import java.text.MessageFormat;
    +
    +public abstract class SSLConfig {
    +
    +  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(SSLConfig.class);
    +
    +  public static final String DEFAULT_SSL_PROVIDER = "JDK"; // JDK or 
OPENSSL
    +  public static final String DEFAULT_SSL_PROTOCOL = "TLSv1.2";
    +  public static final int DEFAULT_SSL_HANDSHAKE_TIMEOUT_MS = 10 * 1000; // 
10 seconds
    +
    +  protected final boolean httpsEnabled;
    +  protected final DrillConfig config;
    +  protected final Configuration hadoopConfig;
    +
    +  // Either the Netty SSL context or the JDK SSL context will be 
initialized
    +  // The JDK SSL context is use iff the useSystemTrustStore setting is 
enabled.
    +  protected SslContext nettySslContext;
    +  protected SSLContext jdkSSlContext;
    +
    +  private static final boolean isWindows = 
System.getProperty("os.name").toLowerCase().indexOf("win") >= 0;
    +  private static final boolean isMacOs = 
System.getProperty("os.name").toLowerCase().indexOf("mac") >= 0;
    +
    +  public static final String HADOOP_SSL_CONF_TPL_KEY = 
"hadoop.ssl.{0}.conf";
    +  public static final String HADOOP_SSL_KEYSTORE_LOCATION_TPL_KEY = 
"ssl.{0}.keystore.location";
    +  public static final String HADOOP_SSL_KEYSTORE_PASSWORD_TPL_KEY = 
"ssl.{0}.keystore.password";
    +  public static final String HADOOP_SSL_KEYSTORE_TYPE_TPL_KEY = 
"ssl.{0}.keystore.type";
    +  public static final String HADOOP_SSL_KEYSTORE_KEYPASSWORD_TPL_KEY =
    +      "ssl.{0}.keystore.keypassword";
    +  public static final String HADOOP_SSL_TRUSTSTORE_LOCATION_TPL_KEY = 
"ssl.{0}.truststore.location";
    +  public static final String HADOOP_SSL_TRUSTSTORE_PASSWORD_TPL_KEY = 
"ssl.{0}.truststore.password";
    +  public static final String HADOOP_SSL_TRUSTSTORE_TYPE_TPL_KEY = 
"ssl.{0}.truststore.type";
    +
    +  public SSLConfig(DrillConfig config, Configuration hadoopConfig, 
SSLFactory.Mode mode)
    +      throws DrillException {
    +
    +    this.config = config;
    +    httpsEnabled =
    +        config.hasPath(ExecConstants.HTTP_ENABLE_SSL) && 
config.getBoolean(ExecConstants.HTTP_ENABLE_SSL);
    +    // For testing we will mock up a hadoop configuration, however for 
regular use, we find the actual hadoop config.
    +    boolean enableHadoopConfig = 
config.getBoolean(ExecConstants.SSL_USE_HADOOP_CONF);
    +    if (enableHadoopConfig && this instanceof SSLConfigServer) {
    +      if (hadoopConfig == null) {
    +        this.hadoopConfig = new Configuration(); // get hadoop 
configuration
    +      } else {
    +        this.hadoopConfig = hadoopConfig;
    +      }
    +      String hadoopSSLConfigFile =
    +          
this.hadoopConfig.get(resolveHadoopPropertyName(HADOOP_SSL_CONF_TPL_KEY, mode));
    +      logger.debug("Using Hadoop configuration for SSL");
    +      logger.debug("Hadoop SSL configuration file: {}", 
hadoopSSLConfigFile);
    +      this.hadoopConfig.addResource(hadoopSSLConfigFile);
    +    } else {
    +      this.hadoopConfig = null;
    +    }
    +  }
    +
    +  protected String getConfigParam(String name, String hadoopName) {
    +    String value = "";
    +    if (hadoopConfig != null) {
    +      value = getHadoopConfigParam(hadoopName);
    +    }
    +    if (value.isEmpty() && config.hasPath(name)) {
    --- End diff --
    
    Shouldn't we give preference to application specific configuration over 
Hadoop configuration ? Hadoop Config will be more general config for all the 
ecosystem and if any ecosystem wants to override it with its own config then 
that should be allowed ?


---

Reply via email to