http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoProjectPushDown.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoProjectPushDown.java
 
b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoProjectPushDown.java
index 32666fc..848a6a7 100644
--- 
a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoProjectPushDown.java
+++ 
b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoProjectPushDown.java
@@ -20,11 +20,15 @@ package org.apache.drill.exec.store.mongo;
 import static org.apache.drill.TestBuilder.listOf;
 import static org.apache.drill.TestBuilder.mapOf;
 
+import org.apache.drill.categories.MongoStorageTest;
+import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 @Ignore("DRILL-3775")
+@Category({SlowTest.class, MongoStorageTest.class})
 public class TestMongoProjectPushDown extends MongoTestBase {
 
   /**

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoQueries.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoQueries.java
 
b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoQueries.java
index fdbc2bc..d8043fd 100644
--- 
a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoQueries.java
+++ 
b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/TestMongoQueries.java
@@ -17,8 +17,12 @@
  */
 package org.apache.drill.exec.store.mongo;
 
+import org.apache.drill.categories.MongoStorageTest;
+import org.apache.drill.categories.SlowTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({SlowTest.class, MongoStorageTest.class})
 public class TestMongoQueries extends MongoTestBase {
 
   @Test

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/pom.xml
----------------------------------------------------------------------
diff --git a/exec/java-exec/pom.xml b/exec/java-exec/pom.xml
index 33c12ff..06340d5 100644
--- a/exec/java-exec/pom.xml
+++ b/exec/java-exec/pom.xml
@@ -577,7 +577,7 @@
                   <includes>
                     <include>**/TestLargeFileCompilation.java</include>
                   </includes>
-                  <groups>org.apache.drill.test.SecondaryTest</groups>
+                  <groups>org.apache.drill.categories.SlowTest</groups>
                 </configuration>
               </execution>
             </executions>
@@ -782,9 +782,6 @@
               <excludes>
                 <exclude>**/TestLargeFileCompilation.java</exclude>
               </excludes>
-              <!-- Modern: include using the @Category annotation.
-                   See the Javadoc for SecondaryTest for details. -->
-              
<excludedGroups>org.apache.drill.test.SecondaryTest</excludedGroups>
             </configuration>
           </execution>
         </executions>

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java 
b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 2b32569..72a73fc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -33,229 +33,230 @@ import 
org.apache.drill.exec.server.options.TypeValidators.StringValidator;
 import org.apache.drill.exec.server.options.TypeValidators.MaxWidthValidator;
 import org.apache.drill.exec.testing.ExecutionControls;
 
-public interface ExecConstants {
-  String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
-  String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
-  String ZK_CONNECTION = "drill.exec.zk.connect";
-  String ZK_TIMEOUT = "drill.exec.zk.timeout";
-  String ZK_ROOT = "drill.exec.zk.root";
-  String ZK_REFRESH = "drill.exec.zk.refresh";
-  String BIT_RETRY_TIMES = "drill.exec.rpc.bit.server.retry.count";
-  String BIT_RETRY_DELAY = "drill.exec.rpc.bit.server.retry.delay";
-  String BIT_TIMEOUT = "drill.exec.bit.timeout" ;
-  String SERVICE_NAME = "drill.exec.cluster-id";
-  String INITIAL_BIT_PORT = "drill.exec.rpc.bit.server.port";
-  String INITIAL_DATA_PORT = "drill.exec.rpc.bit.server.dataport";
-  String BIT_RPC_TIMEOUT = "drill.exec.rpc.bit.timeout";
-  String INITIAL_USER_PORT = "drill.exec.rpc.user.server.port";
-  String USER_RPC_TIMEOUT = "drill.exec.rpc.user.timeout";
-  String METRICS_CONTEXT_NAME = "drill.exec.metrics.context";
-  String USE_IP_ADDRESS = "drill.exec.rpc.use.ip";
-  String CLIENT_RPC_THREADS = "drill.exec.rpc.user.client.threads";
-  String BIT_SERVER_RPC_THREADS = "drill.exec.rpc.bit.server.threads";
-  String USER_SERVER_RPC_THREADS = "drill.exec.rpc.user.server.threads";
-  String TRACE_DUMP_DIRECTORY = "drill.exec.trace.directory";
-  String TRACE_DUMP_FILESYSTEM = "drill.exec.trace.filesystem";
-  String TEMP_DIRECTORIES = "drill.exec.tmp.directories";
-  String TEMP_FILESYSTEM = "drill.exec.tmp.filesystem";
-  String INCOMING_BUFFER_IMPL = "drill.exec.buffer.impl";
+public final class ExecConstants {
+  private ExecConstants() {
+    // Don't allow instantiation
+  }
+
+  public static final String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
+  public static final String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
+  public static final String ZK_CONNECTION = "drill.exec.zk.connect";
+  public static final String ZK_TIMEOUT = "drill.exec.zk.timeout";
+  public static final String ZK_ROOT = "drill.exec.zk.root";
+  public static final String ZK_REFRESH = "drill.exec.zk.refresh";
+  public static final String BIT_RETRY_TIMES = 
"drill.exec.rpc.bit.server.retry.count";
+  public static final String BIT_RETRY_DELAY = 
"drill.exec.rpc.bit.server.retry.delay";
+  public static final String BIT_TIMEOUT = "drill.exec.bit.timeout" ;
+  public static final String SERVICE_NAME = "drill.exec.cluster-id";
+  public static final String INITIAL_BIT_PORT = 
"drill.exec.rpc.bit.server.port";
+  public static final String INITIAL_DATA_PORT = 
"drill.exec.rpc.bit.server.dataport";
+  public static final String BIT_RPC_TIMEOUT = "drill.exec.rpc.bit.timeout";
+  public static final String INITIAL_USER_PORT = 
"drill.exec.rpc.user.server.port";
+  public static final String USER_RPC_TIMEOUT = "drill.exec.rpc.user.timeout";
+  public static final String METRICS_CONTEXT_NAME = 
"drill.exec.metrics.context";
+  public static final String USE_IP_ADDRESS = "drill.exec.rpc.use.ip";
+  public static final String CLIENT_RPC_THREADS = 
"drill.exec.rpc.user.client.threads";
+  public static final String BIT_SERVER_RPC_THREADS = 
"drill.exec.rpc.bit.server.threads";
+  public static final String USER_SERVER_RPC_THREADS = 
"drill.exec.rpc.user.server.threads";
+  public static final String TRACE_DUMP_DIRECTORY = 
"drill.exec.trace.directory";
+  public static final String TRACE_DUMP_FILESYSTEM = 
"drill.exec.trace.filesystem";
+  public static final String TEMP_DIRECTORIES = "drill.exec.tmp.directories";
+  public static final String TEMP_FILESYSTEM = "drill.exec.tmp.filesystem";
+  public static final String INCOMING_BUFFER_IMPL = "drill.exec.buffer.impl";
   /** incoming buffer size (number of batches) */
-  String INCOMING_BUFFER_SIZE = "drill.exec.buffer.size";
-  String SPOOLING_BUFFER_DELETE = "drill.exec.buffer.spooling.delete";
-  String SPOOLING_BUFFER_MEMORY = "drill.exec.buffer.spooling.size";
-  String BATCH_PURGE_THRESHOLD = "drill.exec.sort.purge.threshold";
+  public static final String INCOMING_BUFFER_SIZE = "drill.exec.buffer.size";
+  public static final String SPOOLING_BUFFER_DELETE = 
"drill.exec.buffer.spooling.delete";
+  public static final String SPOOLING_BUFFER_MEMORY = 
"drill.exec.buffer.spooling.size";
+  public static final String BATCH_PURGE_THRESHOLD = 
"drill.exec.sort.purge.threshold";
 
   // Spill boot-time Options common to all spilling operators
   // (Each individual operator may override the common options)
 
-  String SPILL_FILESYSTEM = "drill.exec.spill.fs";
-  String SPILL_DIRS = "drill.exec.spill.directories";
+  public static final String SPILL_FILESYSTEM = "drill.exec.spill.fs";
+  public static final String SPILL_DIRS = "drill.exec.spill.directories";
 
   // External Sort Boot configuration
 
-  String EXTERNAL_SORT_TARGET_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.batch.size";
-  String EXTERNAL_SORT_SPILL_GROUP_SIZE = 
"drill.exec.sort.external.spill.group.size";
-  String EXTERNAL_SORT_SPILL_THRESHOLD = 
"drill.exec.sort.external.spill.threshold";
-  String EXTERNAL_SORT_SPILL_DIRS = 
"drill.exec.sort.external.spill.directories";
-  String EXTERNAL_SORT_SPILL_FILESYSTEM = "drill.exec.sort.external.spill.fs";
-  String EXTERNAL_SORT_SPILL_FILE_SIZE = 
"drill.exec.sort.external.spill.file_size";
-  String EXTERNAL_SORT_MSORT_MAX_BATCHSIZE = 
"drill.exec.sort.external.msort.batch.maxsize";
-  String EXTERNAL_SORT_DISABLE_MANAGED = 
"drill.exec.sort.external.disable_managed";
-  String EXTERNAL_SORT_MERGE_LIMIT = "drill.exec.sort.external.merge_limit";
-  String EXTERNAL_SORT_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.spill_batch_size";
-  String EXTERNAL_SORT_MERGE_BATCH_SIZE = 
"drill.exec.sort.external.spill.merge_batch_size";
-  String EXTERNAL_SORT_MAX_MEMORY = "drill.exec.sort.external.mem_limit";
-  String EXTERNAL_SORT_BATCH_LIMIT = "drill.exec.sort.external.batch_limit";
+  public static final String EXTERNAL_SORT_TARGET_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.batch.size";
+  public static final String EXTERNAL_SORT_SPILL_GROUP_SIZE = 
"drill.exec.sort.external.spill.group.size";
+  public static final String EXTERNAL_SORT_SPILL_THRESHOLD = 
"drill.exec.sort.external.spill.threshold";
+  public static final String EXTERNAL_SORT_SPILL_DIRS = 
"drill.exec.sort.external.spill.directories";
+  public static final String EXTERNAL_SORT_SPILL_FILESYSTEM = 
"drill.exec.sort.external.spill.fs";
+  public static final String EXTERNAL_SORT_SPILL_FILE_SIZE = 
"drill.exec.sort.external.spill.file_size";
+  public static final String EXTERNAL_SORT_MSORT_MAX_BATCHSIZE = 
"drill.exec.sort.external.msort.batch.maxsize";
+  public static final String EXTERNAL_SORT_DISABLE_MANAGED = 
"drill.exec.sort.external.disable_managed";
+  public static final String EXTERNAL_SORT_MERGE_LIMIT = 
"drill.exec.sort.external.merge_limit";
+  public static final String EXTERNAL_SORT_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.spill_batch_size";
+  public static final String EXTERNAL_SORT_MERGE_BATCH_SIZE = 
"drill.exec.sort.external.spill.merge_batch_size";
+  public static final String EXTERNAL_SORT_MAX_MEMORY = 
"drill.exec.sort.external.mem_limit";
+  public static final String EXTERNAL_SORT_BATCH_LIMIT = 
"drill.exec.sort.external.batch_limit";
 
   // External Sort Runtime options
 
-  BooleanValidator EXTERNAL_SORT_DISABLE_MANAGED_OPTION = new 
BooleanValidator("exec.sort.disable_managed");
+  public static final BooleanValidator EXTERNAL_SORT_DISABLE_MANAGED_OPTION = 
new BooleanValidator("exec.sort.disable_managed");
 
   // Hash Aggregate Options
-
-  String HASHAGG_NUM_PARTITIONS_KEY = "exec.hashagg.num_partitions";
-  LongValidator HASHAGG_NUM_PARTITIONS_VALIDATOR = new 
RangeLongValidator(HASHAGG_NUM_PARTITIONS_KEY, 1, 128); // 1 means - no spilling
-  String HASHAGG_MAX_MEMORY_KEY = "exec.hashagg.mem_limit";
-  LongValidator HASHAGG_MAX_MEMORY_VALIDATOR = new 
RangeLongValidator(HASHAGG_MAX_MEMORY_KEY, 0, Integer.MAX_VALUE);
+  public static final String HASHAGG_NUM_PARTITIONS_KEY = 
"exec.hashagg.num_partitions";
+  public static final LongValidator HASHAGG_NUM_PARTITIONS_VALIDATOR = new 
RangeLongValidator(HASHAGG_NUM_PARTITIONS_KEY, 1, 128); // 1 means - no spilling
+  public static final String HASHAGG_MAX_MEMORY_KEY = "exec.hashagg.mem_limit";
+  public static final LongValidator HASHAGG_MAX_MEMORY_VALIDATOR = new 
RangeLongValidator(HASHAGG_MAX_MEMORY_KEY, 0, Integer.MAX_VALUE);
   // min batches is used for tuning (each partition needs so many batches when 
planning the number of partitions,
   // or reserve this number when calculating whether the remaining available 
memory is too small and requires a spill.)
   // Low value may OOM (e.g., when incoming rows become wider), higher values 
use fewer partitions but are safer
-  String HASHAGG_MIN_BATCHES_PER_PARTITION_KEY = 
"exec.hashagg.min_batches_per_partition";
-  LongValidator HASHAGG_MIN_BATCHES_PER_PARTITION_VALIDATOR = new 
RangeLongValidator(HASHAGG_MIN_BATCHES_PER_PARTITION_KEY, 1, 5);
+  public static final String HASHAGG_MIN_BATCHES_PER_PARTITION_KEY = 
"exec.hashagg.min_batches_per_partition";
+  public static final LongValidator 
HASHAGG_MIN_BATCHES_PER_PARTITION_VALIDATOR = new 
RangeLongValidator(HASHAGG_MIN_BATCHES_PER_PARTITION_KEY, 1, 5);
   // Can be turned off mainly for testing. Memory prediction is used to decide 
on when to spill to disk; with this option off,
   // spill would be triggered only by another mechanism -- "catch OOMs and 
then spill".
-  String HASHAGG_USE_MEMORY_PREDICTION_KEY = 
"exec.hashagg.use_memory_prediction";
-  BooleanValidator HASHAGG_USE_MEMORY_PREDICTION_VALIDATOR = new 
BooleanValidator(HASHAGG_USE_MEMORY_PREDICTION_KEY);
-
-  String HASHAGG_SPILL_DIRS = "drill.exec.hashagg.spill.directories";
-  String HASHAGG_SPILL_FILESYSTEM = "drill.exec.hashagg.spill.fs";
-  String HASHAGG_FALLBACK_ENABLED_KEY = "drill.exec.hashagg.fallback.enabled";
-  BooleanValidator HASHAGG_FALLBACK_ENABLED_VALIDATOR = new 
BooleanValidator(HASHAGG_FALLBACK_ENABLED_KEY);
-
-  String TEXT_LINE_READER_BATCH_SIZE = 
"drill.exec.storage.file.text.batch.size";
-  String TEXT_LINE_READER_BUFFER_SIZE = 
"drill.exec.storage.file.text.buffer.size";
-  String HAZELCAST_SUBNETS = "drill.exec.cache.hazel.subnets";
-  String HTTP_ENABLE = "drill.exec.http.enabled";
-  String HTTP_MAX_PROFILES = "drill.exec.http.max_profiles";
-  String HTTP_PORT = "drill.exec.http.port";
-  String HTTP_PORT_HUNT = "drill.exec.http.porthunt";
-  String HTTP_ENABLE_SSL = "drill.exec.http.ssl_enabled";
-  String HTTP_CORS_ENABLED = "drill.exec.http.cors.enabled";
-  String HTTP_CORS_ALLOWED_ORIGINS = "drill.exec.http.cors.allowedOrigins";
-  String HTTP_CORS_ALLOWED_METHODS = "drill.exec.http.cors.allowedMethods";
-  String HTTP_CORS_ALLOWED_HEADERS = "drill.exec.http.cors.allowedHeaders";
-  String HTTP_CORS_CREDENTIALS = "drill.exec.http.cors.credentials";
-  String HTTP_SESSION_MEMORY_RESERVATION = 
"drill.exec.http.session.memory.reservation";
-  String HTTP_SESSION_MEMORY_MAXIMUM = 
"drill.exec.http.session.memory.maximum";
-  String HTTP_SESSION_MAX_IDLE_SECS = "drill.exec.http.session_max_idle_secs";
-  String HTTP_KEYSTORE_PATH = "drill.exec.ssl.keyStorePath";
-  String HTTP_KEYSTORE_PASSWORD = "drill.exec.ssl.keyStorePassword";
-  String HTTP_TRUSTSTORE_PATH = "drill.exec.ssl.trustStorePath";
-  String HTTP_TRUSTSTORE_PASSWORD = "drill.exec.ssl.trustStorePassword";
-  String SYS_STORE_PROVIDER_CLASS = "drill.exec.sys.store.provider.class";
-  String SYS_STORE_PROVIDER_LOCAL_PATH = 
"drill.exec.sys.store.provider.local.path";
-  String SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE = 
"drill.exec.sys.store.provider.local.write";
-  String PROFILES_STORE_INMEMORY = "drill.exec.profiles.store.inmemory";
-  String PROFILES_STORE_CAPACITY = "drill.exec.profiles.store.capacity";
-  String IMPERSONATION_ENABLED = "drill.exec.impersonation.enabled";
-  String IMPERSONATION_MAX_CHAINED_USER_HOPS = 
"drill.exec.impersonation.max_chained_user_hops";
-  String AUTHENTICATION_MECHANISMS = "drill.exec.security.auth.mechanisms";
-  String USER_AUTHENTICATION_ENABLED = "drill.exec.security.user.auth.enabled";
-  String USER_AUTHENTICATOR_IMPL = "drill.exec.security.user.auth.impl";
-  String PAM_AUTHENTICATOR_PROFILES = 
"drill.exec.security.user.auth.pam_profiles";
-  String BIT_AUTHENTICATION_ENABLED = "drill.exec.security.bit.auth.enabled";
-  String BIT_AUTHENTICATION_MECHANISM = 
"drill.exec.security.bit.auth.mechanism";
-  String USE_LOGIN_PRINCIPAL = 
"drill.exec.security.bit.auth.use_login_principal";
-  String USER_ENCRYPTION_SASL_ENABLED = 
"drill.exec.security.user.encryption.sasl.enabled";
-  String USER_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = 
"drill.exec.security.user.encryption.sasl.max_wrapped_size";
-  String BIT_ENCRYPTION_SASL_ENABLED = 
"drill.exec.security.bit.encryption.sasl.enabled";
-  String BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = 
"drill.exec.security.bit.encryption.sasl.max_wrapped_size";
+  public static final String HASHAGG_USE_MEMORY_PREDICTION_KEY = 
"exec.hashagg.use_memory_prediction";
+  public static final BooleanValidator HASHAGG_USE_MEMORY_PREDICTION_VALIDATOR 
= new BooleanValidator(HASHAGG_USE_MEMORY_PREDICTION_KEY);
+
+  public static final String HASHAGG_SPILL_DIRS = 
"drill.exec.hashagg.spill.directories";
+  public static final String HASHAGG_SPILL_FILESYSTEM = 
"drill.exec.hashagg.spill.fs";
+  public static final String HASHAGG_FALLBACK_ENABLED_KEY = 
"drill.exec.hashagg.fallback.enabled";
+  public static final BooleanValidator HASHAGG_FALLBACK_ENABLED_VALIDATOR = 
new BooleanValidator(HASHAGG_FALLBACK_ENABLED_KEY);
+
+  public static final String TEXT_LINE_READER_BATCH_SIZE = 
"drill.exec.storage.file.text.batch.size";
+  public static final String TEXT_LINE_READER_BUFFER_SIZE = 
"drill.exec.storage.file.text.buffer.size";
+  public static final String HAZELCAST_SUBNETS = 
"drill.exec.cache.hazel.subnets";
+  public static final String HTTP_ENABLE = "drill.exec.http.enabled";
+  public static final String HTTP_MAX_PROFILES = 
"drill.exec.http.max_profiles";
+  public static final String HTTP_PORT = "drill.exec.http.port";
+  public static final String HTTP_PORT_HUNT = "drill.exec.http.porthunt";
+  public static final String HTTP_ENABLE_SSL = "drill.exec.http.ssl_enabled";
+  public static final String HTTP_CORS_ENABLED = 
"drill.exec.http.cors.enabled";
+  public static final String HTTP_CORS_ALLOWED_ORIGINS = 
"drill.exec.http.cors.allowedOrigins";
+  public static final String HTTP_CORS_ALLOWED_METHODS = 
"drill.exec.http.cors.allowedMethods";
+  public static final String HTTP_CORS_ALLOWED_HEADERS = 
"drill.exec.http.cors.allowedHeaders";
+  public static final String HTTP_CORS_CREDENTIALS = 
"drill.exec.http.cors.credentials";
+  public static final String HTTP_SESSION_MEMORY_RESERVATION = 
"drill.exec.http.session.memory.reservation";
+  public static final String HTTP_SESSION_MEMORY_MAXIMUM = 
"drill.exec.http.session.memory.maximum";
+  public static final String HTTP_SESSION_MAX_IDLE_SECS = 
"drill.exec.http.session_max_idle_secs";
+  public static final String HTTP_KEYSTORE_PATH = 
"drill.exec.ssl.keyStorePath";
+  public static final String HTTP_KEYSTORE_PASSWORD = 
"drill.exec.ssl.keyStorePassword";
+  public static final String HTTP_TRUSTSTORE_PATH = 
"drill.exec.ssl.trustStorePath";
+  public static final String HTTP_TRUSTSTORE_PASSWORD = 
"drill.exec.ssl.trustStorePassword";
+  public static final String SYS_STORE_PROVIDER_CLASS = 
"drill.exec.sys.store.provider.class";
+  public static final String SYS_STORE_PROVIDER_LOCAL_PATH = 
"drill.exec.sys.store.provider.local.path";
+  public static final String SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE = 
"drill.exec.sys.store.provider.local.write";
+  public static final String PROFILES_STORE_INMEMORY = 
"drill.exec.profiles.store.inmemory";
+  public static final String PROFILES_STORE_CAPACITY = 
"drill.exec.profiles.store.capacity";
+  public static final String IMPERSONATION_ENABLED = 
"drill.exec.impersonation.enabled";
+  public static final String IMPERSONATION_MAX_CHAINED_USER_HOPS = 
"drill.exec.impersonation.max_chained_user_hops";
+  public static final String AUTHENTICATION_MECHANISMS = 
"drill.exec.security.auth.mechanisms";
+  public static final String USER_AUTHENTICATION_ENABLED = 
"drill.exec.security.user.auth.enabled";
+  public static final String USER_AUTHENTICATOR_IMPL = 
"drill.exec.security.user.auth.impl";
+  public static final String PAM_AUTHENTICATOR_PROFILES = 
"drill.exec.security.user.auth.pam_profiles";
+  public static final String BIT_AUTHENTICATION_ENABLED = 
"drill.exec.security.bit.auth.enabled";
+  public static final String BIT_AUTHENTICATION_MECHANISM = 
"drill.exec.security.bit.auth.mechanism";
+  public static final String USE_LOGIN_PRINCIPAL = 
"drill.exec.security.bit.auth.use_login_principal";
+  public static final String USER_ENCRYPTION_SASL_ENABLED = 
"drill.exec.security.user.encryption.sasl.enabled";
+  public static final String USER_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = 
"drill.exec.security.user.encryption.sasl.max_wrapped_size";
+  public static final String BIT_ENCRYPTION_SASL_ENABLED = 
"drill.exec.security.bit.encryption.sasl.enabled";
+  public static final String BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = 
"drill.exec.security.bit.encryption.sasl.max_wrapped_size";
 
   /** Size of JDBC batch queue (in batches) above which throttling begins. */
-  String JDBC_BATCH_QUEUE_THROTTLING_THRESHOLD =
+  public static final String JDBC_BATCH_QUEUE_THROTTLING_THRESHOLD =
       "drill.jdbc.batch_queue_throttling_threshold";
   // Thread pool size for scan threads. Used by the Parquet scan.
-  String SCAN_THREADPOOL_SIZE = "drill.exec.scan.threadpool_size";
+  public static final String SCAN_THREADPOOL_SIZE = 
"drill.exec.scan.threadpool_size";
   // The size of the thread pool used by a scan to decode the data. Used by 
Parquet
-  String SCAN_DECODE_THREADPOOL_SIZE = 
"drill.exec.scan.decode_threadpool_size";
+  public static final String SCAN_DECODE_THREADPOOL_SIZE = 
"drill.exec.scan.decode_threadpool_size";
 
   /**
    * Currently if a query is cancelled, but one of the fragments reports the 
status as FAILED instead of CANCELLED or
    * FINISHED we report the query result as CANCELLED by swallowing the 
failures occurred in fragments. This BOOT
    * setting allows the user to see the query status as failure. Useful for 
developers/testers.
    */
-  String RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS =
-      "drill.exec.debug.return_error_for_failure_in_cancelled_fragments";
+  public static final String RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS = 
"drill.exec.debug.return_error_for_failure_in_cancelled_fragments";
 
-  String CLIENT_SUPPORT_COMPLEX_TYPES = "drill.client.supports-complex-types";
+  public static final String CLIENT_SUPPORT_COMPLEX_TYPES = 
"drill.client.supports-complex-types";
 
   /**
    * Configuration properties connected with dynamic UDFs support
    */
-  String UDF_RETRY_ATTEMPTS = "drill.exec.udf.retry-attempts";
-  String UDF_DIRECTORY_LOCAL = "drill.exec.udf.directory.local";
-  String UDF_DIRECTORY_FS = "drill.exec.udf.directory.fs";
-  String UDF_DIRECTORY_ROOT = "drill.exec.udf.directory.root";
-  String UDF_DIRECTORY_STAGING = "drill.exec.udf.directory.staging";
-  String UDF_DIRECTORY_REGISTRY = "drill.exec.udf.directory.registry";
-  String UDF_DIRECTORY_TMP = "drill.exec.udf.directory.tmp";
-  String UDF_DISABLE_DYNAMIC = "drill.exec.udf.disable_dynamic";
+  public static final String UDF_RETRY_ATTEMPTS = 
"drill.exec.udf.retry-attempts";
+  public static final String UDF_DIRECTORY_LOCAL = 
"drill.exec.udf.directory.local";
+  public static final String UDF_DIRECTORY_FS = "drill.exec.udf.directory.fs";
+  public static final String UDF_DIRECTORY_ROOT = 
"drill.exec.udf.directory.root";
+  public static final String UDF_DIRECTORY_STAGING = 
"drill.exec.udf.directory.staging";
+  public static final String UDF_DIRECTORY_REGISTRY = 
"drill.exec.udf.directory.registry";
+  public static final String UDF_DIRECTORY_TMP = 
"drill.exec.udf.directory.tmp";
+  public static final String UDF_DISABLE_DYNAMIC = 
"drill.exec.udf.disable_dynamic";
 
   /**
    * Local temporary directory is used as base for temporary storage of 
Dynamic UDF jars.
    */
-  String DRILL_TMP_DIR = "drill.tmp-dir";
+  public static final String DRILL_TMP_DIR = "drill.tmp-dir";
 
   /**
    * Temporary tables can be created ONLY in default temporary workspace.
    */
-  String DEFAULT_TEMPORARY_WORKSPACE = 
"drill.exec.default_temporary_workspace";
+  public static final String DEFAULT_TEMPORARY_WORKSPACE = 
"drill.exec.default_temporary_workspace";
 
-  String OUTPUT_FORMAT_OPTION = "store.format";
-  OptionValidator OUTPUT_FORMAT_VALIDATOR = new 
StringValidator(OUTPUT_FORMAT_OPTION);
-  String PARQUET_BLOCK_SIZE = "store.parquet.block-size";
-  String PARQUET_WRITER_USE_SINGLE_FS_BLOCK = 
"store.parquet.writer.use_single_fs_block";
-  OptionValidator PARQUET_WRITER_USE_SINGLE_FS_BLOCK_VALIDATOR = new 
BooleanValidator(
+  public static final String OUTPUT_FORMAT_OPTION = "store.format";
+  public static final OptionValidator OUTPUT_FORMAT_VALIDATOR = new 
StringValidator(OUTPUT_FORMAT_OPTION);
+  public static final String PARQUET_BLOCK_SIZE = "store.parquet.block-size";
+  public static final String PARQUET_WRITER_USE_SINGLE_FS_BLOCK = 
"store.parquet.writer.use_single_fs_block";
+  public static final OptionValidator 
PARQUET_WRITER_USE_SINGLE_FS_BLOCK_VALIDATOR = new BooleanValidator(
     PARQUET_WRITER_USE_SINGLE_FS_BLOCK);
-  OptionValidator PARQUET_BLOCK_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_BLOCK_SIZE, Integer.MAX_VALUE);
-  String PARQUET_PAGE_SIZE = "store.parquet.page-size";
-  OptionValidator PARQUET_PAGE_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_PAGE_SIZE, Integer.MAX_VALUE);
-  String PARQUET_DICT_PAGE_SIZE = "store.parquet.dictionary.page-size";
-  OptionValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_DICT_PAGE_SIZE, Integer.MAX_VALUE);
-  String PARQUET_WRITER_COMPRESSION_TYPE = "store.parquet.compression";
-  OptionValidator PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new 
EnumeratedStringValidator(
-      PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "gzip", "none");
-  String PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING = 
"store.parquet.enable_dictionary_encoding";
-  OptionValidator PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR = new 
BooleanValidator(
+  public static final OptionValidator PARQUET_BLOCK_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_BLOCK_SIZE, Integer.MAX_VALUE);
+  public static final String PARQUET_PAGE_SIZE = "store.parquet.page-size";
+  public static final OptionValidator PARQUET_PAGE_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_PAGE_SIZE, Integer.MAX_VALUE);
+  public static final String PARQUET_DICT_PAGE_SIZE = 
"store.parquet.dictionary.page-size";
+  public static final OptionValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = new 
PositiveLongValidator(PARQUET_DICT_PAGE_SIZE, Integer.MAX_VALUE);
+  public static final String PARQUET_WRITER_COMPRESSION_TYPE = 
"store.parquet.compression";
+  public static final OptionValidator 
PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new EnumeratedStringValidator(
+    PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "gzip", "none");
+  public static final String PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING = 
"store.parquet.enable_dictionary_encoding";
+  public static final OptionValidator 
PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR = new BooleanValidator(
       PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
 
-  String PARQUET_VECTOR_FILL_THRESHOLD = "store.parquet.vector_fill_threshold";
-  OptionValidator PARQUET_VECTOR_FILL_THRESHOLD_VALIDATOR = new 
PositiveLongValidator(PARQUET_VECTOR_FILL_THRESHOLD, 99l);
-  String PARQUET_VECTOR_FILL_CHECK_THRESHOLD = 
"store.parquet.vector_fill_check_threshold";
-  OptionValidator PARQUET_VECTOR_FILL_CHECK_THRESHOLD_VALIDATOR = new 
PositiveLongValidator(PARQUET_VECTOR_FILL_CHECK_THRESHOLD, 100l);
-  String PARQUET_NEW_RECORD_READER = "store.parquet.use_new_reader";
-  OptionValidator PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR = new 
BooleanValidator(PARQUET_NEW_RECORD_READER);
-  String PARQUET_READER_INT96_AS_TIMESTAMP = 
"store.parquet.reader.int96_as_timestamp";
-  OptionValidator PARQUET_READER_INT96_AS_TIMESTAMP_VALIDATOR = new 
BooleanValidator(PARQUET_READER_INT96_AS_TIMESTAMP);
+  public static final String PARQUET_VECTOR_FILL_THRESHOLD = 
"store.parquet.vector_fill_threshold";
+  public static final OptionValidator PARQUET_VECTOR_FILL_THRESHOLD_VALIDATOR 
= new PositiveLongValidator(PARQUET_VECTOR_FILL_THRESHOLD, 99l);
+  public static final String PARQUET_VECTOR_FILL_CHECK_THRESHOLD = 
"store.parquet.vector_fill_check_threshold";
+  public static final OptionValidator 
PARQUET_VECTOR_FILL_CHECK_THRESHOLD_VALIDATOR = new 
PositiveLongValidator(PARQUET_VECTOR_FILL_CHECK_THRESHOLD, 100l);
+  public static final String PARQUET_NEW_RECORD_READER = 
"store.parquet.use_new_reader";
+  public static final OptionValidator 
PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR = new 
BooleanValidator(PARQUET_NEW_RECORD_READER);
+  public static final String PARQUET_READER_INT96_AS_TIMESTAMP = 
"store.parquet.reader.int96_as_timestamp";
+  public static final OptionValidator 
PARQUET_READER_INT96_AS_TIMESTAMP_VALIDATOR = new 
BooleanValidator(PARQUET_READER_INT96_AS_TIMESTAMP);
 
-  String PARQUET_PAGEREADER_ASYNC = "store.parquet.reader.pagereader.async";
-  OptionValidator PARQUET_PAGEREADER_ASYNC_VALIDATOR = new 
BooleanValidator(PARQUET_PAGEREADER_ASYNC);
+  public static final String PARQUET_PAGEREADER_ASYNC = 
"store.parquet.reader.pagereader.async";
+  public static final OptionValidator PARQUET_PAGEREADER_ASYNC_VALIDATOR = new 
BooleanValidator(PARQUET_PAGEREADER_ASYNC);
 
   // Number of pages the Async Parquet page reader will read before blocking
-  String PARQUET_PAGEREADER_QUEUE_SIZE = 
"store.parquet.reader.pagereader.queuesize";
-  OptionValidator PARQUET_PAGEREADER_QUEUE_SIZE_VALIDATOR = new  
PositiveLongValidator(PARQUET_PAGEREADER_QUEUE_SIZE, Integer.MAX_VALUE);
+  public static final String PARQUET_PAGEREADER_QUEUE_SIZE = 
"store.parquet.reader.pagereader.queuesize";
+  public static final OptionValidator PARQUET_PAGEREADER_QUEUE_SIZE_VALIDATOR 
= new  PositiveLongValidator(PARQUET_PAGEREADER_QUEUE_SIZE, Integer.MAX_VALUE);
 
-  String PARQUET_PAGEREADER_ENFORCETOTALSIZE = 
"store.parquet.reader.pagereader.enforceTotalSize";
-  OptionValidator PARQUET_PAGEREADER_ENFORCETOTALSIZE_VALIDATOR = new 
BooleanValidator(PARQUET_PAGEREADER_ENFORCETOTALSIZE);
+  public static final String PARQUET_PAGEREADER_ENFORCETOTALSIZE = 
"store.parquet.reader.pagereader.enforceTotalSize";
+  public static final OptionValidator 
PARQUET_PAGEREADER_ENFORCETOTALSIZE_VALIDATOR = new 
BooleanValidator(PARQUET_PAGEREADER_ENFORCETOTALSIZE);
 
-  String PARQUET_COLUMNREADER_ASYNC = 
"store.parquet.reader.columnreader.async";
-  OptionValidator PARQUET_COLUMNREADER_ASYNC_VALIDATOR = new 
BooleanValidator(PARQUET_COLUMNREADER_ASYNC);
+  public static final String PARQUET_COLUMNREADER_ASYNC = 
"store.parquet.reader.columnreader.async";
+  public static final OptionValidator PARQUET_COLUMNREADER_ASYNC_VALIDATOR = 
new BooleanValidator(PARQUET_COLUMNREADER_ASYNC);
 
   // Use a buffering reader for parquet page reader
-  String PARQUET_PAGEREADER_USE_BUFFERED_READ = 
"store.parquet.reader.pagereader.bufferedread";
-  OptionValidator PARQUET_PAGEREADER_USE_BUFFERED_READ_VALIDATOR = new  
BooleanValidator(PARQUET_PAGEREADER_USE_BUFFERED_READ);
+  public static final String PARQUET_PAGEREADER_USE_BUFFERED_READ = 
"store.parquet.reader.pagereader.bufferedread";
+  public static final OptionValidator 
PARQUET_PAGEREADER_USE_BUFFERED_READ_VALIDATOR = new  
BooleanValidator(PARQUET_PAGEREADER_USE_BUFFERED_READ);
 
   // Size in MiB of the buffer the Parquet page reader will use to read from 
disk. Default is 1 MiB
-  String PARQUET_PAGEREADER_BUFFER_SIZE = 
"store.parquet.reader.pagereader.buffersize";
-  OptionValidator PARQUET_PAGEREADER_BUFFER_SIZE_VALIDATOR = new  
LongValidator(PARQUET_PAGEREADER_BUFFER_SIZE);
+  public static final String PARQUET_PAGEREADER_BUFFER_SIZE = 
"store.parquet.reader.pagereader.buffersize";
+  public static final OptionValidator PARQUET_PAGEREADER_BUFFER_SIZE_VALIDATOR 
= new  LongValidator(PARQUET_PAGEREADER_BUFFER_SIZE);
 
   // try to use fadvise if available
-  String PARQUET_PAGEREADER_USE_FADVISE = 
"store.parquet.reader.pagereader.usefadvise";
-  OptionValidator PARQUET_PAGEREADER_USE_FADVISE_VALIDATOR = new  
BooleanValidator(PARQUET_PAGEREADER_USE_FADVISE);
-
-  OptionValidator COMPILE_SCALAR_REPLACEMENT = new 
BooleanValidator("exec.compile.scalar_replacement");
-
-  String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
-  BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = new 
BooleanValidator(JSON_ALL_TEXT_MODE);
-  BooleanValidator JSON_EXTENDED_TYPES = new 
BooleanValidator("store.json.extended_types");
-  BooleanValidator JSON_WRITER_UGLIFY = new 
BooleanValidator("store.json.writer.uglify");
-  BooleanValidator JSON_WRITER_SKIPNULLFIELDS = new 
BooleanValidator("store.json.writer.skip_null_fields");
-  String JSON_READER_SKIP_INVALID_RECORDS_FLAG = 
"store.json.reader.skip_invalid_records";
-  BooleanValidator JSON_SKIP_MALFORMED_RECORDS_VALIDATOR = new 
BooleanValidator(JSON_READER_SKIP_INVALID_RECORDS_FLAG);
-  String JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG = 
"store.json.reader.print_skipped_invalid_record_number";
-  BooleanValidator JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG_VALIDATOR = 
new BooleanValidator(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG);
-  DoubleValidator TEXT_ESTIMATED_ROW_SIZE = new RangeDoubleValidator(
-      "store.text.estimated_row_size_bytes", 1, Long.MAX_VALUE);
+  public static final String PARQUET_PAGEREADER_USE_FADVISE = 
"store.parquet.reader.pagereader.usefadvise";
+  public static final OptionValidator PARQUET_PAGEREADER_USE_FADVISE_VALIDATOR 
= new  BooleanValidator(PARQUET_PAGEREADER_USE_FADVISE);
+
+  public static final OptionValidator COMPILE_SCALAR_REPLACEMENT = new 
BooleanValidator("exec.compile.scalar_replacement");
+
+  public static final String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
+  public static final BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = 
new BooleanValidator(JSON_ALL_TEXT_MODE);
+  public static final BooleanValidator JSON_EXTENDED_TYPES = new 
BooleanValidator("store.json.extended_types");
+  public static final BooleanValidator JSON_WRITER_UGLIFY = new 
BooleanValidator("store.json.writer.uglify");
+  public static final BooleanValidator JSON_WRITER_SKIPNULLFIELDS = new 
BooleanValidator("store.json.writer.skip_null_fields");
+  public static final String JSON_READER_SKIP_INVALID_RECORDS_FLAG = 
"store.json.reader.skip_invalid_records";
+  public static final BooleanValidator JSON_SKIP_MALFORMED_RECORDS_VALIDATOR = 
new BooleanValidator(JSON_READER_SKIP_INVALID_RECORDS_FLAG);
+  public static final String JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG = 
"store.json.reader.print_skipped_invalid_record_number";
+  public static final BooleanValidator 
JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG_VALIDATOR = new 
BooleanValidator(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG);
+  public static final DoubleValidator TEXT_ESTIMATED_ROW_SIZE = new 
RangeDoubleValidator("store.text.estimated_row_size_bytes", 1, Long.MAX_VALUE);
 
   /**
    * The column label (for directory levels) in results when querying files in 
a directory
@@ -264,100 +265,97 @@ public interface ExecConstants {
    *                |-    bar  -  a.parquet
    *                |-    baz  -  b.parquet
    */
-  String FILESYSTEM_PARTITION_COLUMN_LABEL = 
"drill.exec.storage.file.partition.column.label";
-  OptionValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new 
StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL);
+  public static final String FILESYSTEM_PARTITION_COLUMN_LABEL = 
"drill.exec.storage.file.partition.column.label";
+  public static final OptionValidator 
FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new 
StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL);
 
   /**
    * Implicit file columns
    */
-  String IMPLICIT_FILENAME_COLUMN_LABEL = 
"drill.exec.storage.implicit.filename.column.label";
-  OptionValidator IMPLICIT_FILENAME_COLUMN_LABEL_VALIDATOR = new 
StringValidator(IMPLICIT_FILENAME_COLUMN_LABEL);
-  String IMPLICIT_SUFFIX_COLUMN_LABEL = 
"drill.exec.storage.implicit.suffix.column.label";
-  OptionValidator IMPLICIT_SUFFIX_COLUMN_LABEL_VALIDATOR = new 
StringValidator(IMPLICIT_SUFFIX_COLUMN_LABEL);
-  String IMPLICIT_FQN_COLUMN_LABEL = 
"drill.exec.storage.implicit.fqn.column.label";
-  OptionValidator IMPLICIT_FQN_COLUMN_LABEL_VALIDATOR = new 
StringValidator(IMPLICIT_FQN_COLUMN_LABEL);
-  String IMPLICIT_FILEPATH_COLUMN_LABEL = 
"drill.exec.storage.implicit.filepath.column.label";
-  OptionValidator IMPLICIT_FILEPATH_COLUMN_LABEL_VALIDATOR = new 
StringValidator(IMPLICIT_FILEPATH_COLUMN_LABEL);
-
-  String JSON_READ_NUMBERS_AS_DOUBLE = "store.json.read_numbers_as_double";
-  BooleanValidator JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new 
BooleanValidator(JSON_READ_NUMBERS_AS_DOUBLE);
-
-  String MONGO_ALL_TEXT_MODE = "store.mongo.all_text_mode";
-  OptionValidator MONGO_READER_ALL_TEXT_MODE_VALIDATOR = new 
BooleanValidator(MONGO_ALL_TEXT_MODE);
-  String MONGO_READER_READ_NUMBERS_AS_DOUBLE = 
"store.mongo.read_numbers_as_double";
-  OptionValidator MONGO_READER_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new 
BooleanValidator(MONGO_READER_READ_NUMBERS_AS_DOUBLE);
-  String MONGO_BSON_RECORD_READER = "store.mongo.bson.record.reader";
-  OptionValidator MONGO_BSON_RECORD_READER_VALIDATOR = new 
BooleanValidator(MONGO_BSON_RECORD_READER);
-
-  BooleanValidator ENABLE_UNION_TYPE = new 
BooleanValidator("exec.enable_union_type");
+  public static final String IMPLICIT_FILENAME_COLUMN_LABEL = 
"drill.exec.storage.implicit.filename.column.label";
+  public static final OptionValidator IMPLICIT_FILENAME_COLUMN_LABEL_VALIDATOR 
= new StringValidator(IMPLICIT_FILENAME_COLUMN_LABEL);
+  public static final String IMPLICIT_SUFFIX_COLUMN_LABEL = 
"drill.exec.storage.implicit.suffix.column.label";
+  public static final OptionValidator IMPLICIT_SUFFIX_COLUMN_LABEL_VALIDATOR = 
new StringValidator(IMPLICIT_SUFFIX_COLUMN_LABEL);
+  public static final String IMPLICIT_FQN_COLUMN_LABEL = 
"drill.exec.storage.implicit.fqn.column.label";
+  public static final OptionValidator IMPLICIT_FQN_COLUMN_LABEL_VALIDATOR = 
new StringValidator(IMPLICIT_FQN_COLUMN_LABEL);
+  public static final String IMPLICIT_FILEPATH_COLUMN_LABEL = 
"drill.exec.storage.implicit.filepath.column.label";
+  public static final OptionValidator IMPLICIT_FILEPATH_COLUMN_LABEL_VALIDATOR 
= new StringValidator(IMPLICIT_FILEPATH_COLUMN_LABEL);
+
+  public static final String JSON_READ_NUMBERS_AS_DOUBLE = 
"store.json.read_numbers_as_double";
+  public static final BooleanValidator JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR = 
new BooleanValidator(JSON_READ_NUMBERS_AS_DOUBLE);
+
+  public static final String MONGO_ALL_TEXT_MODE = "store.mongo.all_text_mode";
+  public static final OptionValidator MONGO_READER_ALL_TEXT_MODE_VALIDATOR = 
new BooleanValidator(MONGO_ALL_TEXT_MODE);
+  public static final String MONGO_READER_READ_NUMBERS_AS_DOUBLE = 
"store.mongo.read_numbers_as_double";
+  public static final OptionValidator 
MONGO_READER_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new 
BooleanValidator(MONGO_READER_READ_NUMBERS_AS_DOUBLE);
+  public static final String MONGO_BSON_RECORD_READER = 
"store.mongo.bson.record.reader";
+  public static final OptionValidator MONGO_BSON_RECORD_READER_VALIDATOR = new 
BooleanValidator(MONGO_BSON_RECORD_READER);
+
+  public static final BooleanValidator ENABLE_UNION_TYPE = new 
BooleanValidator("exec.enable_union_type");
 
   // TODO: We need to add a feature that enables storage plugins to add their 
own options. Currently we have to declare
   // in core which is not right. Move this option and above two mongo plugin 
related options once we have the feature.
-  String HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS = 
"store.hive.optimize_scan_with_native_readers";
-  OptionValidator HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR =
+  public static final String HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS = 
"store.hive.optimize_scan_with_native_readers";
+  public static final OptionValidator 
HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR =
       new BooleanValidator(HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS);
 
-  String SLICE_TARGET = "planner.slice_target";
-  long SLICE_TARGET_DEFAULT = 100000l;
-  PositiveLongValidator SLICE_TARGET_OPTION = new 
PositiveLongValidator(SLICE_TARGET, Long.MAX_VALUE
-  );
+  public static final String SLICE_TARGET = "planner.slice_target";
+  public static final long SLICE_TARGET_DEFAULT = 100000l;
+  public static final PositiveLongValidator SLICE_TARGET_OPTION = new 
PositiveLongValidator(SLICE_TARGET, Long.MAX_VALUE);
 
-  String CAST_TO_NULLABLE_NUMERIC = 
"drill.exec.functions.cast_empty_string_to_null";
-  BooleanValidator CAST_TO_NULLABLE_NUMERIC_OPTION = new 
BooleanValidator(CAST_TO_NULLABLE_NUMERIC);
+  public static final String CAST_TO_NULLABLE_NUMERIC = 
"drill.exec.functions.cast_empty_string_to_null";
+  public static final BooleanValidator CAST_TO_NULLABLE_NUMERIC_OPTION = new 
BooleanValidator(CAST_TO_NULLABLE_NUMERIC);
 
   /**
    * HashTable runtime settings
    */
-  String MIN_HASH_TABLE_SIZE_KEY = "exec.min_hash_table_size";
-  PositiveLongValidator MIN_HASH_TABLE_SIZE = new 
PositiveLongValidator(MIN_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
-  String MAX_HASH_TABLE_SIZE_KEY = "exec.max_hash_table_size";
-  PositiveLongValidator MAX_HASH_TABLE_SIZE = new 
PositiveLongValidator(MAX_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
+  public static final String MIN_HASH_TABLE_SIZE_KEY = 
"exec.min_hash_table_size";
+  public static final PositiveLongValidator MIN_HASH_TABLE_SIZE = new 
PositiveLongValidator(MIN_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
+  public static final String MAX_HASH_TABLE_SIZE_KEY = 
"exec.max_hash_table_size";
+  public static final PositiveLongValidator MAX_HASH_TABLE_SIZE = new 
PositiveLongValidator(MAX_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
 
   /**
    * Limits the maximum level of parallelization to this factor time the 
number of Drillbits
    */
-  String CPU_LOAD_AVERAGE_KEY = "planner.cpu_load_average";
-  DoubleValidator CPU_LOAD_AVERAGE = new DoubleValidator(CPU_LOAD_AVERAGE_KEY);
-  String MAX_WIDTH_PER_NODE_KEY = "planner.width.max_per_node";
-  MaxWidthValidator MAX_WIDTH_PER_NODE = new 
MaxWidthValidator(MAX_WIDTH_PER_NODE_KEY);
+  public static final String CPU_LOAD_AVERAGE_KEY = "planner.cpu_load_average";
+  public static final DoubleValidator CPU_LOAD_AVERAGE = new 
DoubleValidator(CPU_LOAD_AVERAGE_KEY);
+  public static final String MAX_WIDTH_PER_NODE_KEY = 
"planner.width.max_per_node";
+  public static final MaxWidthValidator MAX_WIDTH_PER_NODE = new 
MaxWidthValidator(MAX_WIDTH_PER_NODE_KEY);
 
   /**
    * The maximum level or parallelization any stage of the query can do. Note 
that while this
    * might be the number of active Drillbits, realistically, this could be 
well beyond that
    * number of we want to do things like speed results return.
    */
-  String MAX_WIDTH_GLOBAL_KEY = "planner.width.max_per_query";
-  OptionValidator MAX_WIDTH_GLOBAL = new 
PositiveLongValidator(MAX_WIDTH_GLOBAL_KEY, Integer.MAX_VALUE);
+  public static final String MAX_WIDTH_GLOBAL_KEY = 
"planner.width.max_per_query";
+  public static final OptionValidator MAX_WIDTH_GLOBAL = new 
PositiveLongValidator(MAX_WIDTH_GLOBAL_KEY, Integer.MAX_VALUE);
 
   /**
    * Factor by which a node with endpoint affinity will be favored while 
creating assignment
    */
-  String AFFINITY_FACTOR_KEY = "planner.affinity_factor";
-  OptionValidator AFFINITY_FACTOR = new DoubleValidator(AFFINITY_FACTOR_KEY);
+  public static final String AFFINITY_FACTOR_KEY = "planner.affinity_factor";
+  public static final OptionValidator AFFINITY_FACTOR = new 
DoubleValidator(AFFINITY_FACTOR_KEY);
 
-  String EARLY_LIMIT0_OPT_KEY = "planner.enable_limit0_optimization";
-  BooleanValidator EARLY_LIMIT0_OPT = new 
BooleanValidator(EARLY_LIMIT0_OPT_KEY);
+  public static final String EARLY_LIMIT0_OPT_KEY = 
"planner.enable_limit0_optimization";
+  public static final BooleanValidator EARLY_LIMIT0_OPT = new 
BooleanValidator(EARLY_LIMIT0_OPT_KEY);
 
-  String ENABLE_MEMORY_ESTIMATION_KEY = 
"planner.memory.enable_memory_estimation";
-  OptionValidator ENABLE_MEMORY_ESTIMATION = new 
BooleanValidator(ENABLE_MEMORY_ESTIMATION_KEY);
+  public static final String ENABLE_MEMORY_ESTIMATION_KEY = 
"planner.memory.enable_memory_estimation";
+  public static final OptionValidator ENABLE_MEMORY_ESTIMATION = new 
BooleanValidator(ENABLE_MEMORY_ESTIMATION_KEY);
 
   /**
    * Maximum query memory per node (in MB). Re-plan with cheaper operators if 
memory estimation exceeds this limit.
    * <p/>
    * DEFAULT: 2048 MB
    */
-  String MAX_QUERY_MEMORY_PER_NODE_KEY = 
"planner.memory.max_query_memory_per_node";
-  LongValidator MAX_QUERY_MEMORY_PER_NODE = new RangeLongValidator(
-      MAX_QUERY_MEMORY_PER_NODE_KEY, 1024 * 1024, Long.MAX_VALUE);
+  public static final String MAX_QUERY_MEMORY_PER_NODE_KEY = 
"planner.memory.max_query_memory_per_node";
+  public static final LongValidator MAX_QUERY_MEMORY_PER_NODE = new 
RangeLongValidator(MAX_QUERY_MEMORY_PER_NODE_KEY, 1024 * 1024, Long.MAX_VALUE);
 
   /**
    * Minimum memory alocated to each buffered operator instance.
    * <p/>
    * DEFAULT: 40 MB
    */
-  String MIN_MEMORY_PER_BUFFERED_OP_KEY = 
"planner.memory.min_memory_per_buffered_op";
-  LongValidator MIN_MEMORY_PER_BUFFERED_OP = new RangeLongValidator(
-      MIN_MEMORY_PER_BUFFERED_OP_KEY, 1024 * 1024, Long.MAX_VALUE);
+  public static final String MIN_MEMORY_PER_BUFFERED_OP_KEY = 
"planner.memory.min_memory_per_buffered_op";
+  public static final LongValidator MIN_MEMORY_PER_BUFFERED_OP = new 
RangeLongValidator(MIN_MEMORY_PER_BUFFERED_OP_KEY, 1024 * 1024, Long.MAX_VALUE);
 
   /**
    * Extra query memory per node for non-blocking operators.
@@ -366,74 +364,72 @@ public interface ExecConstants {
    * DEFAULT: 64 MB
    * MAXIMUM: 2048 MB
    */
-  String NON_BLOCKING_OPERATORS_MEMORY_KEY = 
"planner.memory.non_blocking_operators_memory";
-  OptionValidator NON_BLOCKING_OPERATORS_MEMORY = new PowerOfTwoLongValidator(
+  public static final String NON_BLOCKING_OPERATORS_MEMORY_KEY = 
"planner.memory.non_blocking_operators_memory";
+  public static final OptionValidator NON_BLOCKING_OPERATORS_MEMORY = new 
PowerOfTwoLongValidator(
     NON_BLOCKING_OPERATORS_MEMORY_KEY, 1 << 11);
 
-  String HASH_JOIN_TABLE_FACTOR_KEY = "planner.memory.hash_join_table_factor";
-  OptionValidator HASH_JOIN_TABLE_FACTOR = new 
DoubleValidator(HASH_JOIN_TABLE_FACTOR_KEY);
+  public static final String HASH_JOIN_TABLE_FACTOR_KEY = 
"planner.memory.hash_join_table_factor";
+  public static final OptionValidator HASH_JOIN_TABLE_FACTOR = new 
DoubleValidator(HASH_JOIN_TABLE_FACTOR_KEY);
 
-  String HASH_AGG_TABLE_FACTOR_KEY = "planner.memory.hash_agg_table_factor";
-  OptionValidator HASH_AGG_TABLE_FACTOR = new 
DoubleValidator(HASH_AGG_TABLE_FACTOR_KEY);
+  public static final String HASH_AGG_TABLE_FACTOR_KEY = 
"planner.memory.hash_agg_table_factor";
+  public static final OptionValidator HASH_AGG_TABLE_FACTOR = new 
DoubleValidator(HASH_AGG_TABLE_FACTOR_KEY);
 
-  String AVERAGE_FIELD_WIDTH_KEY = "planner.memory.average_field_width";
-  OptionValidator AVERAGE_FIELD_WIDTH = new 
PositiveLongValidator(AVERAGE_FIELD_WIDTH_KEY, Long.MAX_VALUE);
+  public static final String AVERAGE_FIELD_WIDTH_KEY = 
"planner.memory.average_field_width";
+  public static final OptionValidator AVERAGE_FIELD_WIDTH = new 
PositiveLongValidator(AVERAGE_FIELD_WIDTH_KEY, Long.MAX_VALUE);
 
-  BooleanValidator ENABLE_QUEUE = new BooleanValidator("exec.queue.enable");
-  LongValidator LARGE_QUEUE_SIZE = new 
PositiveLongValidator("exec.queue.large", 1000);
-  LongValidator SMALL_QUEUE_SIZE = new 
PositiveLongValidator("exec.queue.small", 100000);
-  LongValidator QUEUE_THRESHOLD_SIZE = new 
PositiveLongValidator("exec.queue.threshold",
-      Long.MAX_VALUE);
-  LongValidator QUEUE_TIMEOUT = new 
PositiveLongValidator("exec.queue.timeout_millis",
-      Long.MAX_VALUE);
+  public static final BooleanValidator ENABLE_QUEUE = new 
BooleanValidator("exec.queue.enable");
+  public static final LongValidator LARGE_QUEUE_SIZE = new 
PositiveLongValidator("exec.queue.large", 1000);
+  public static final LongValidator SMALL_QUEUE_SIZE = new 
PositiveLongValidator("exec.queue.small", 100000);
+  public static final LongValidator QUEUE_THRESHOLD_SIZE = new 
PositiveLongValidator("exec.queue.threshold", Long.MAX_VALUE);
+  public static final LongValidator QUEUE_TIMEOUT = new 
PositiveLongValidator("exec.queue.timeout_millis", Long.MAX_VALUE);
 
-  String ENABLE_VERBOSE_ERRORS_KEY = "exec.errors.verbose";
-  OptionValidator ENABLE_VERBOSE_ERRORS = new 
BooleanValidator(ENABLE_VERBOSE_ERRORS_KEY);
+  public static final String ENABLE_VERBOSE_ERRORS_KEY = "exec.errors.verbose";
+  public static final OptionValidator ENABLE_VERBOSE_ERRORS = new 
BooleanValidator(ENABLE_VERBOSE_ERRORS_KEY);
 
-  String ENABLE_NEW_TEXT_READER_KEY = "exec.storage.enable_new_text_reader";
-  OptionValidator ENABLE_NEW_TEXT_READER = new 
BooleanValidator(ENABLE_NEW_TEXT_READER_KEY);
+  public static final String ENABLE_NEW_TEXT_READER_KEY = 
"exec.storage.enable_new_text_reader";
+  public static final OptionValidator ENABLE_NEW_TEXT_READER = new 
BooleanValidator(ENABLE_NEW_TEXT_READER_KEY);
 
-  String BOOTSTRAP_STORAGE_PLUGINS_FILE = "bootstrap-storage-plugins.json";
+  public static final String BOOTSTRAP_STORAGE_PLUGINS_FILE = 
"bootstrap-storage-plugins.json";
 
-  String DRILL_SYS_FILE_SUFFIX = ".sys.drill";
+  public static final String DRILL_SYS_FILE_SUFFIX = ".sys.drill";
 
-  String ENABLE_WINDOW_FUNCTIONS = "window.enable";
-  OptionValidator ENABLE_WINDOW_FUNCTIONS_VALIDATOR = new 
BooleanValidator(ENABLE_WINDOW_FUNCTIONS);
+  public static final String ENABLE_WINDOW_FUNCTIONS = "window.enable";
+  public static final OptionValidator ENABLE_WINDOW_FUNCTIONS_VALIDATOR = new 
BooleanValidator(ENABLE_WINDOW_FUNCTIONS);
 
-  String DRILLBIT_CONTROL_INJECTIONS = "drill.exec.testing.controls";
-  OptionValidator DRILLBIT_CONTROLS_VALIDATOR =
-    new ExecutionControls.ControlsOptionValidator(DRILLBIT_CONTROL_INJECTIONS, 
1);
+  public static final String DRILLBIT_CONTROL_INJECTIONS = 
"drill.exec.testing.controls";
+  public static final OptionValidator DRILLBIT_CONTROLS_VALIDATOR = new 
ExecutionControls.ControlsOptionValidator(DRILLBIT_CONTROL_INJECTIONS, 1);
 
-  String NEW_VIEW_DEFAULT_PERMS_KEY = "new_view_default_permissions";
-  OptionValidator NEW_VIEW_DEFAULT_PERMS_VALIDATOR =
-      new StringValidator(NEW_VIEW_DEFAULT_PERMS_KEY);
+  public static final String NEW_VIEW_DEFAULT_PERMS_KEY = 
"new_view_default_permissions";
+  public static final OptionValidator NEW_VIEW_DEFAULT_PERMS_VALIDATOR = new 
StringValidator(NEW_VIEW_DEFAULT_PERMS_KEY);
 
-  String CTAS_PARTITIONING_HASH_DISTRIBUTE = "store.partition.hash_distribute";
-  BooleanValidator CTAS_PARTITIONING_HASH_DISTRIBUTE_VALIDATOR = new 
BooleanValidator(CTAS_PARTITIONING_HASH_DISTRIBUTE);
+  public static final String CTAS_PARTITIONING_HASH_DISTRIBUTE = 
"store.partition.hash_distribute";
+  public static final BooleanValidator 
CTAS_PARTITIONING_HASH_DISTRIBUTE_VALIDATOR = new 
BooleanValidator(CTAS_PARTITIONING_HASH_DISTRIBUTE);
 
-  String ENABLE_BULK_LOAD_TABLE_LIST_KEY = "exec.enable_bulk_load_table_list";
-  BooleanValidator ENABLE_BULK_LOAD_TABLE_LIST = new 
BooleanValidator(ENABLE_BULK_LOAD_TABLE_LIST_KEY);
+  public static final String ENABLE_BULK_LOAD_TABLE_LIST_KEY = 
"exec.enable_bulk_load_table_list";
+  public static final BooleanValidator ENABLE_BULK_LOAD_TABLE_LIST = new 
BooleanValidator(ENABLE_BULK_LOAD_TABLE_LIST_KEY);
 
   /**
    * When getting Hive Table information with exec.enable_bulk_load_table_list 
set to true,
    * use the exec.bulk_load_table_list.bulk_size to determine how many tables 
to fetch from HiveMetaStore
    * at a time. (The number of tables can get to be quite large.)
    */
-  String BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY = 
"exec.bulk_load_table_list.bulk_size";
-  PositiveLongValidator BULK_LOAD_TABLE_LIST_BULK_SIZE = new 
PositiveLongValidator(BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY, Integer.MAX_VALUE);
+  public static final String BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY = 
"exec.bulk_load_table_list.bulk_size";
+  public static final PositiveLongValidator BULK_LOAD_TABLE_LIST_BULK_SIZE = 
new PositiveLongValidator(BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY, 
Integer.MAX_VALUE);
 
   /**
    * Option whose value is a comma separated list of admin usernames. Admin 
users are users who have special privileges
    * such as changing system options.
    */
-  String ADMIN_USERS_KEY = "security.admin.users";
-  StringValidator ADMIN_USERS_VALIDATOR = new StringValidator(ADMIN_USERS_KEY);
+
+  public static final String ADMIN_USERS_KEY = "security.admin.users";
+  public static final StringValidator ADMIN_USERS_VALIDATOR = new 
StringValidator(ADMIN_USERS_KEY);
 
   /**
    * Option whose value is a comma separated list of admin usergroups.
    */
-  String ADMIN_USER_GROUPS_KEY = "security.admin.user_groups";
-  StringValidator ADMIN_USER_GROUPS_VALIDATOR = new 
StringValidator(ADMIN_USER_GROUPS_KEY);
+
+  public static final String ADMIN_USER_GROUPS_KEY = 
"security.admin.user_groups";
+  public static final StringValidator ADMIN_USER_GROUPS_VALIDATOR = new 
StringValidator(ADMIN_USER_GROUPS_KEY);
   /**
    * Option whose value is a string representing list of inbound impersonation 
policies.
    *
@@ -446,37 +442,37 @@ public interface ExecConstants {
    *   ...
    * ]
    */
-  String IMPERSONATION_POLICIES_KEY = "exec.impersonation.inbound_policies";
-  StringValidator IMPERSONATION_POLICY_VALIDATOR =
-      new 
InboundImpersonationManager.InboundImpersonationPolicyValidator(IMPERSONATION_POLICIES_KEY);
+  public static final String IMPERSONATION_POLICIES_KEY = 
"exec.impersonation.inbound_policies";
+  public static final StringValidator IMPERSONATION_POLICY_VALIDATOR =
+    new 
InboundImpersonationManager.InboundImpersonationPolicyValidator(IMPERSONATION_POLICIES_KEY);
 
 
   /**
    * Web settings
    */
-  String WEB_LOGS_MAX_LINES = "web.logs.max_lines";
-  OptionValidator WEB_LOGS_MAX_LINES_VALIDATOR = new 
PositiveLongValidator(WEB_LOGS_MAX_LINES, Integer.MAX_VALUE);
+  public static final String WEB_LOGS_MAX_LINES = "web.logs.max_lines";
+  public static final OptionValidator WEB_LOGS_MAX_LINES_VALIDATOR = new 
PositiveLongValidator(WEB_LOGS_MAX_LINES, Integer.MAX_VALUE);
 
-  String CODE_GEN_EXP_IN_METHOD_SIZE = "exec.java.compiler.exp_in_method_size";
-  LongValidator CODE_GEN_EXP_IN_METHOD_SIZE_VALIDATOR = new 
LongValidator(CODE_GEN_EXP_IN_METHOD_SIZE);
+  public static final String CODE_GEN_EXP_IN_METHOD_SIZE = 
"exec.java.compiler.exp_in_method_size";
+  public static final LongValidator CODE_GEN_EXP_IN_METHOD_SIZE_VALIDATOR = 
new LongValidator(CODE_GEN_EXP_IN_METHOD_SIZE);
 
   /**
    * Timeout for create prepare statement request. If the request exceeds this 
timeout, then request is timed out.
    * Default value is 10mins.
    */
-  String CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS = 
"prepare.statement.create_timeout_ms";
-  OptionValidator CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS_VALIDATOR =
+  public static final String CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS = 
"prepare.statement.create_timeout_ms";
+  public static final OptionValidator 
CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS_VALIDATOR =
       new PositiveLongValidator(CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS, 
Integer.MAX_VALUE);
 
-  String DYNAMIC_UDF_SUPPORT_ENABLED = "exec.udf.enable_dynamic_support";
-  BooleanValidator DYNAMIC_UDF_SUPPORT_ENABLED_VALIDATOR = new 
BooleanValidator(DYNAMIC_UDF_SUPPORT_ENABLED);
+  public static final String DYNAMIC_UDF_SUPPORT_ENABLED = 
"exec.udf.enable_dynamic_support";
+  public static final BooleanValidator DYNAMIC_UDF_SUPPORT_ENABLED_VALIDATOR = 
new BooleanValidator(DYNAMIC_UDF_SUPPORT_ENABLED);
 
   /**
    * Option to save query profiles. If false, no query profile will be saved
    * for any query.
    */
-  String ENABLE_QUERY_PROFILE_OPTION = "exec.query_profile.save";
-  BooleanValidator ENABLE_QUERY_PROFILE_VALIDATOR = new 
BooleanValidator(ENABLE_QUERY_PROFILE_OPTION);
+  public static final String ENABLE_QUERY_PROFILE_OPTION = 
"exec.query_profile.save";
+  public static final BooleanValidator ENABLE_QUERY_PROFILE_VALIDATOR = new 
BooleanValidator(ENABLE_QUERY_PROFILE_OPTION);
 
   /**
    * Profiles are normally written after the last client message to reduce 
latency.
@@ -484,17 +480,17 @@ public interface ExecConstants {
    * return so that the client can immediately read the profile for test
    * verification.
    */
-  String QUERY_PROFILE_DEBUG_OPTION = "exec.query_profile.debug_mode";
-  BooleanValidator QUERY_PROFILE_DEBUG_VALIDATOR = new 
BooleanValidator(QUERY_PROFILE_DEBUG_OPTION);
+  public static final String QUERY_PROFILE_DEBUG_OPTION = 
"exec.query_profile.debug_mode";
+  public static final BooleanValidator QUERY_PROFILE_DEBUG_VALIDATOR = new 
BooleanValidator(QUERY_PROFILE_DEBUG_OPTION);
 
-  String USE_DYNAMIC_UDFS_KEY = "exec.udf.use_dynamic";
-  BooleanValidator USE_DYNAMIC_UDFS = new 
BooleanValidator(USE_DYNAMIC_UDFS_KEY);
+  public static final String USE_DYNAMIC_UDFS_KEY = "exec.udf.use_dynamic";
+  public static final BooleanValidator USE_DYNAMIC_UDFS = new 
BooleanValidator(USE_DYNAMIC_UDFS_KEY);
 
-  String QUERY_TRANSIENT_STATE_UPDATE_KEY = "exec.query.progress.update";
-  BooleanValidator QUERY_TRANSIENT_STATE_UPDATE = new 
BooleanValidator(QUERY_TRANSIENT_STATE_UPDATE_KEY);
+  public static final String QUERY_TRANSIENT_STATE_UPDATE_KEY = 
"exec.query.progress.update";
+  public static final BooleanValidator QUERY_TRANSIENT_STATE_UPDATE = new 
BooleanValidator(QUERY_TRANSIENT_STATE_UPDATE_KEY);
 
-  String PERSISTENT_TABLE_UMASK = "exec.persistent_table.umask";
-  StringValidator PERSISTENT_TABLE_UMASK_VALIDATOR = new 
StringValidator(PERSISTENT_TABLE_UMASK);
+  public static final String PERSISTENT_TABLE_UMASK = 
"exec.persistent_table.umask";
+  public static final StringValidator PERSISTENT_TABLE_UMASK_VALIDATOR = new 
StringValidator(PERSISTENT_TABLE_UMASK);
 
   /**
    * Enables batch iterator (operator) validation. Validation is normally 
enabled
@@ -502,28 +498,33 @@ public interface ExecConstants {
    * if assertions are not enabled. That is, it allows iterator validation 
even on
    * a "production" Drill instance.
    */
-  String ENABLE_ITERATOR_VALIDATION_OPTION = "debug.validate_iterators";
-  BooleanValidator ENABLE_ITERATOR_VALIDATOR = new 
BooleanValidator(ENABLE_ITERATOR_VALIDATION_OPTION);
+  public static final String ENABLE_ITERATOR_VALIDATION_OPTION = 
"debug.validate_iterators";
+  public static final BooleanValidator ENABLE_ITERATOR_VALIDATOR = new 
BooleanValidator(ENABLE_ITERATOR_VALIDATION_OPTION);
 
   /**
    * Boot-time config option to enable validation. Primarily used for tests.
    * If true, overrrides the above. (That is validation is done if assertions 
are on,
    * if the above session option is set to true, or if this config option is 
set to true.
    */
-
-  String ENABLE_ITERATOR_VALIDATION = "drill.exec.debug.validate_iterators";
+  public static final String ENABLE_ITERATOR_VALIDATION = 
"drill.exec.debug.validate_iterators";
 
   /**
    * When iterator validation is enabled, additionally validates the vectors in
    * each batch passed to each iterator.
    */
-  String ENABLE_VECTOR_VALIDATION_OPTION = "debug.validate_vectors";
-  BooleanValidator ENABLE_VECTOR_VALIDATOR = new 
BooleanValidator(ENABLE_VECTOR_VALIDATION_OPTION);
+  public static final String ENABLE_VECTOR_VALIDATION_OPTION = 
"debug.validate_vectors";
+  public static final BooleanValidator ENABLE_VECTOR_VALIDATOR = new 
BooleanValidator(ENABLE_VECTOR_VALIDATION_OPTION);
 
   /**
    * Boot-time config option to enable vector validation. Primarily used for
    * tests. Add the following to the command line to enable:<br>
    * <tt>-ea -Ddrill.exec.debug.validate_vectors=true</tt>
    */
-  String ENABLE_VECTOR_VALIDATION = "drill.exec.debug.validate_vectors";
+  public static final String ENABLE_VECTOR_VALIDATION = 
"drill.exec.debug.validate_vectors";
+
+  public static final String OPTION_DEFAULTS_ROOT = "drill.exec.options.";
+
+  public static String bootDefaultFor(String name) {
+    return OPTION_DEFAULTS_ROOT + name;
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValidator.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValidator.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValidator.java
index 1477339..5418408 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValidator.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/OptionValidator.java
@@ -19,6 +19,7 @@ package org.apache.drill.exec.server.options;
 
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.server.options.OptionValue.Kind;
 
 /**
@@ -28,7 +29,6 @@ public abstract class OptionValidator {
   // Stored here as well as in the option static class to allow insertion of 
option optionName into
   // the error messages produced by the validator
   private final String optionName;
-  public static final String OPTION_DEFAULTS_ROOT = "drill.exec.options.";
 
   /** By default, if admin option value is not specified, it would be set to 
false.*/
   public OptionValidator(String optionName) {
@@ -89,6 +89,6 @@ public abstract class OptionValidator {
   public abstract Kind getKind();
 
   public String getConfigProperty() {
-    return OPTION_DEFAULTS_ROOT + getOptionName();
+    return ExecConstants.bootDefaultFor(getOptionName());
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
index b9861a6..fc4c673 100644
--- 
a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
+++ 
b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/TypeValidators.java
@@ -21,6 +21,7 @@ import java.util.Set;
 
 import com.google.common.collect.Sets;
 import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.server.options.OptionValue.Kind;
 
 public class TypeValidators {
@@ -243,5 +244,9 @@ public class TypeValidators {
     public Kind getKind() {
       return kind;
     }
+
+    public String getConfigProperty() {
+      return ExecConstants.bootDefaultFor(getOptionName());
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java 
b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
index d40233d..e59c384 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/BaseTestQuery.java
@@ -271,6 +271,10 @@ public class BaseTestQuery extends ExecTest {
     return allocator;
   }
 
+  public static int getUserPort() {
+    return bits[0].getUserPort();
+  }
+
   public static TestBuilder newTest() {
     return testBuilder();
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestAggNullable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestAggNullable.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestAggNullable.java
index 34850ba..4085fb4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestAggNullable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestAggNullable.java
@@ -19,9 +19,12 @@ package org.apache.drill;
 
 import static org.junit.Assert.assertEquals;
 
+import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.util.TestTools;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category(OperatorTest.class)
 public class TestAggNullable extends BaseTestQuery{
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestAggNullable.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
index a554db2..cc32dbe 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
@@ -17,8 +17,12 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.categories.OperatorTest;
+import org.apache.drill.categories.SqlTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({SqlTest.class, OperatorTest.class})
 public class TestAltSortQueries extends BaseTestQuery{
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestAltSortQueries.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestBugFixes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestBugFixes.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestBugFixes.java
index 9b25860..8b608c6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestBugFixes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestBugFixes.java
@@ -18,16 +18,19 @@
 package org.apache.drill;
 
 import com.google.common.collect.ImmutableList;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
+@Category(UnlikelyTest.class)
 public class TestBugFixes extends BaseTestQuery {
   private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestBugFixes.class);
   private static final String WORKING_PATH = TestTools.getWorkingPath();

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
index 9953e9c..0a157eb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
@@ -18,8 +18,12 @@
 
 package org.apache.drill;
 
+import org.apache.drill.categories.SqlTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({UnlikelyTest.class, SqlTest.class})
 public class TestCaseSensitivity extends BaseTestQuery {
 
   @Test //DRILL-4707

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestCorrelation.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestCorrelation.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestCorrelation.java
index ebfb4e8..133e5be 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestCorrelation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestCorrelation.java
@@ -17,8 +17,12 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.categories.OperatorTest;
+import org.apache.drill.categories.SqlTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({SqlTest.class, OperatorTest.class})
 public class TestCorrelation extends PlanTestBase {
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestCorrelation.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
index 5f6cd9c..cfd8fbd 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
@@ -16,6 +16,7 @@
  * limitations under the License.
  */
 package org.apache.drill;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.work.ExecErrorConstants;
@@ -24,7 +25,9 @@ import 
org.apache.drill.exec.work.foreman.UnsupportedDataTypeException;
 import org.apache.drill.exec.work.foreman.UnsupportedFunctionException;
 import org.apache.drill.exec.work.foreman.UnsupportedRelOperatorException;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category(UnlikelyTest.class)
 public class TestDisabledFunctionality extends BaseTestQuery{
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
index 4b6dd5f..f270d1e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
@@ -17,12 +17,15 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.categories.SqlTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 import org.junit.Assert;
+import org.junit.experimental.categories.Category;
 
-
+@Category(SqlTest.class)
 public class TestDropTable extends PlanTestBase {
 
   private static final String CREATE_SIMPLE_TABLE = "create table %s as select 
1 from cp.`employee.json`";
@@ -176,6 +179,7 @@ public class TestDropTable extends PlanTestBase {
   }
 
   @Test // DRILL-4673
+  @Category(UnlikelyTest.class)
   public void testDropTableIfExistsWhileTableExists() throws Exception {
     final String existentTableName = "test_table_exists";
     test("use dfs_test.tmp");
@@ -191,6 +195,7 @@ public class TestDropTable extends PlanTestBase {
   }
 
   @Test // DRILL-4673
+  @Category(UnlikelyTest.class)
   public void testDropTableIfExistsWhileTableDoesNotExist() throws Exception {
     final String nonExistentTableName = "test_table_not_exists";
     test("use dfs_test.tmp");
@@ -205,6 +210,7 @@ public class TestDropTable extends PlanTestBase {
   }
 
   @Test // DRILL-4673
+  @Category(UnlikelyTest.class)
   public void testDropTableIfExistsWhileItIsAView() throws Exception {
     final String viewName = "test_view";
     try{

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
index 7f5b116..f147f21 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
@@ -19,6 +19,8 @@ package org.apache.drill;
 
 import com.google.common.collect.Lists;
 import mockit.Deencapsulation;
+import org.apache.drill.categories.SlowTest;
+import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.common.config.CommonConstants;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
@@ -39,6 +41,7 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 import org.junit.runner.RunWith;
 import org.mockito.invocation.InvocationOnMock;
@@ -66,6 +69,7 @@ import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 
 @RunWith(MockitoJUnitRunner.class)
+@Category({SlowTest.class, SqlFunctionTest.class})
 public class TestDynamicUDFSupport extends BaseTestQuery {
 
   private static final Path jars = new Path(TestTools.getWorkingPath(), 
"src/test/resources/jars");

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
index bbfe093..0fafe53 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
@@ -22,18 +22,24 @@ import static org.junit.Assert.assertEquals;
 
 import java.math.BigDecimal;
 
+import org.apache.drill.categories.OperatorTest;
+import org.apache.drill.categories.PlannerTest;
+import org.apache.drill.categories.SqlFunctionTest;
+import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.compile.ClassTransformer;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category({SqlFunctionTest.class, OperatorTest.class, PlannerTest.class})
 public class TestExampleQueries extends BaseTestQuery {
 //  private static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 
   @Test // see DRILL-2328
+  @Category(UnlikelyTest.class)
   public void testConcatOnNull() throws Exception {
     try {
       test("use dfs_test.tmp");
@@ -218,11 +224,13 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testCaseReturnValueVarChar() throws Exception {
     test("select case when employee_id < 1000 then 'ABC' else 'DEF' end from 
cp.`employee.json` limit 5");
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testCaseReturnValueBigInt() throws Exception {
     test("select case when employee_id < 1000 then 1000 else 2000 end from 
cp.`employee.json` limit 5");
   }
@@ -347,6 +355,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   // cast non-exist column from json file. Should return null value.
   public void testDrill428() throws Exception {
     test("select cast(NON_EXIST_COL as varchar(10)) from cp.`employee.json` 
limit 2; ");
@@ -415,6 +424,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testCase() throws Exception {
     test("select case when n_nationkey > 0 and n_nationkey < 2 then 
concat(n_name, '_abc') when n_nationkey >=2 and n_nationkey < 4 then '_EFG' 
else concat(n_name,'_XYZ') end, n_comment from cp.`tpch/nation.parquet` ;");
   }
@@ -442,6 +452,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-1544
+  @Category(UnlikelyTest.class)
   public void testLikeEscape() throws Exception {
     int actualRecordCount = testSql("select id, name from 
cp.`jsoninput/specialchar.json` where name like '%#_%' ESCAPE '#'");
     int expectedRecordCount = 1;
@@ -451,6 +462,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testSimilarEscape() throws Exception {
     int actualRecordCount = testSql("select id, name from 
cp.`jsoninput/specialchar.json` where name similar to '(N|S)%#_%' ESCAPE '#'");
     int expectedRecordCount = 1;
@@ -459,6 +471,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testImplicitDownwardCast() throws Exception {
     int actualRecordCount = testSql("select o_totalprice from 
cp.`tpch/orders.parquet` where o_orderkey=60000 and o_totalprice=299402");
     int expectedRecordCount = 0;
@@ -467,6 +480,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-1470
+  @Category(UnlikelyTest.class)
   public void testCastToVarcharWithLength() throws Exception {
     // cast from varchar with unknown length to a fixed length.
     int actualRecordCount = testSql("select first_name from cp.`employee.json` 
where cast(first_name as varchar(2)) = 'Sh'");
@@ -495,6 +509,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-1488
+  @Category(UnlikelyTest.class)
   public void testIdentifierMaxLength() throws Exception {
     // use long column alias name (approx 160 chars)
     test("select employee_id as  
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
 from cp.`employee.json` limit 1");
@@ -520,6 +535,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-1788
+  @Category(UnlikelyTest.class)
   public void testCaseInsensitiveJoin() throws Exception {
     test("select n3.n_name from (select n2.n_name from 
cp.`tpch/nation.parquet` n1, cp.`tpch/nation.parquet` n2 where n1.N_name = 
n2.n_name) n3 " +
         " join cp.`tpch/nation.parquet` n4 on n3.n_name = n4.n_name");
@@ -682,6 +698,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testSimilar() throws Exception {
     String query = "select n_nationkey " +
         "from cp.`tpch/nation.parquet` " +
@@ -722,6 +739,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-1943, DRILL-1911
+  @Category(UnlikelyTest.class)
   public void testColumnNamesDifferInCaseOnly() throws Exception {
     testBuilder()
         .sqlQuery("select r_regionkey a, r_regionkey A FROM 
cp.`tpch/region.parquet`")
@@ -1095,6 +1113,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // see DRILL-3557
+  @Category(UnlikelyTest.class)
   public void testEmptyCSVinDirectory() throws Exception {
     final String root = 
FileUtils.getResourceAsFile("/store/text/directoryWithEmpyCSV").toURI().toString();
     final String toFile = 
FileUtils.getResourceAsFile("/store/text/directoryWithEmpyCSV/empty.csv").toURI().toString();
@@ -1107,6 +1126,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test
+  @Category(UnlikelyTest.class)
   public void testNegativeExtractOperator() throws Exception {
     String query = "select -EXTRACT(DAY FROM birth_date) as col \n" +
         "from cp.`employee.json` \n" +
@@ -1175,6 +1195,7 @@ public class TestExampleQueries extends BaseTestQuery {
   }
 
   @Test // DRILL-2190
+  @Category(UnlikelyTest.class)
   public void testDateImplicitCasting() throws Exception {
     String query = "SELECT birth_date \n" +
         "FROM cp.`employee.json` \n" +

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
index 8be8781..f565665 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsQuery.java
@@ -17,18 +17,21 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.joda.time.DateTime;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import java.math.BigDecimal;
 
 import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatDate;
 import static org.apache.drill.exec.expr.fn.impl.DateUtility.formatTimeStamp;
 
+@Category(SqlFunctionTest.class)
 public class TestFunctionsQuery extends BaseTestQuery {
 
   // enable decimal data type

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsWithTypeExpoQueries.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsWithTypeExpoQueries.java
 
b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsWithTypeExpoQueries.java
index 46a4823..e73fd41 100644
--- 
a/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsWithTypeExpoQueries.java
+++ 
b/exec/java-exec/src/test/java/org/apache/drill/TestFunctionsWithTypeExpoQueries.java
@@ -19,13 +19,16 @@ package org.apache.drill;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.lang3.tuple.Pair;
+import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.util.FileUtils;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
 import java.util.List;
 
+@Category(SqlFunctionTest.class)
 public class TestFunctionsWithTypeExpoQueries extends BaseTestQuery {
   @Test
   public void testConcatWithMoreThanTwoArgs() throws Exception {

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestImplicitCasting.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestImplicitCasting.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestImplicitCasting.java
index ae94e36..8b47016 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestImplicitCasting.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestImplicitCasting.java
@@ -17,17 +17,19 @@
  */
 package org.apache.drill;
 
-import org.apache.drill.common.types.MinorType;
+import org.apache.drill.categories.SqlTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.exec.resolver.TypeCastRules;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
+import org.junit.experimental.categories.Category;
 
 import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 
+@Category(SqlTest.class)
 public class TestImplicitCasting {
   @Test
   public void testTimeStampAndTime() {

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestInList.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestInList.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestInList.java
index b6218ec..79dfa13 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestInList.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestInList.java
@@ -19,9 +19,11 @@ package org.apache.drill;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.categories.SqlTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category(SqlTest.class)
 public class TestInList extends BaseTestQuery{
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestInList.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
index 2bd2811..690f793 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestJoinNullable.java
@@ -19,9 +19,12 @@ package org.apache.drill;
 
 import static org.junit.Assert.assertEquals;
 
+import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.util.TestTools;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category(OperatorTest.class)
 public class TestJoinNullable extends BaseTestQuery{
   static final org.slf4j.Logger logger = 
org.slf4j.LoggerFactory.getLogger(TestJoinNullable.class);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/030189f9/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
----------------------------------------------------------------------
diff --git 
a/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java 
b/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
index 2de4ea2..ab0acd2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestMergeFilterPlan.java
@@ -18,8 +18,11 @@
 
 package org.apache.drill;
 
+import org.apache.drill.categories.PlannerTest;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 
+@Category(PlannerTest.class)
 public class TestMergeFilterPlan extends PlanTestBase {
 
   @Test

Reply via email to