Github user paul-rogers commented on a diff in the pull request:

    https://github.com/apache/drill/pull/536#discussion_r82485277
  
    --- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java ---
    @@ -33,110 +33,122 @@
     import org.apache.drill.exec.testing.ExecutionControls;
     import org.apache.drill.exec.util.ImpersonationUtil;
     
    -public interface ExecConstants {
    -  String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
    -  String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
    -  String ZK_CONNECTION = "drill.exec.zk.connect";
    -  String ZK_TIMEOUT = "drill.exec.zk.timeout";
    -  String ZK_ROOT = "drill.exec.zk.root";
    -  String ZK_REFRESH = "drill.exec.zk.refresh";
    -  String BIT_RETRY_TIMES = "drill.exec.rpc.bit.server.retry.count";
    -  String BIT_RETRY_DELAY = "drill.exec.rpc.bit.server.retry.delay";
    -  String BIT_TIMEOUT = "drill.exec.bit.timeout" ;
    -  String SERVICE_NAME = "drill.exec.cluster-id";
    -  String INITIAL_BIT_PORT = "drill.exec.rpc.bit.server.port";
    -  String BIT_RPC_TIMEOUT = "drill.exec.rpc.bit.timeout";
    -  String INITIAL_USER_PORT = "drill.exec.rpc.user.server.port";
    -  String USER_RPC_TIMEOUT = "drill.exec.rpc.user.timeout";
    -  String METRICS_CONTEXT_NAME = "drill.exec.metrics.context";
    -  String USE_IP_ADDRESS = "drill.exec.rpc.use.ip";
    -  String CLIENT_RPC_THREADS = "drill.exec.rpc.user.client.threads";
    -  String BIT_SERVER_RPC_THREADS = "drill.exec.rpc.bit.server.threads";
    -  String USER_SERVER_RPC_THREADS = "drill.exec.rpc.user.server.threads";
    -  String TRACE_DUMP_DIRECTORY = "drill.exec.trace.directory";
    -  String TRACE_DUMP_FILESYSTEM = "drill.exec.trace.filesystem";
    -  String TEMP_DIRECTORIES = "drill.exec.tmp.directories";
    -  String TEMP_FILESYSTEM = "drill.exec.tmp.filesystem";
    -  String INCOMING_BUFFER_IMPL = "drill.exec.buffer.impl";
    +public final class ExecConstants {
    +
    +  public static final String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
    +  public static final String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
    +  public static final String ZK_CONNECTION = "drill.exec.zk.connect";
    +  public static final String ZK_TIMEOUT = "drill.exec.zk.timeout";
    +  public static final String ZK_ROOT = "drill.exec.zk.root";
    +  public static final String ZK_REFRESH = "drill.exec.zk.refresh";
    +  public static final String BIT_RETRY_TIMES = 
"drill.exec.rpc.bit.server.retry.count";
    +  public static final String BIT_RETRY_DELAY = 
"drill.exec.rpc.bit.server.retry.delay";
    +  public static final String BIT_TIMEOUT = "drill.exec.bit.timeout";
    +  public static final String SERVICE_NAME = "drill.exec.cluster-id";
    +  public static final String INITIAL_BIT_PORT = 
"drill.exec.rpc.bit.server.port";
    +  public static final String BIT_RPC_TIMEOUT = 
"drill.exec.rpc.bit.timeout";
    +  public static final String INITIAL_USER_PORT = 
"drill.exec.rpc.user.server.port";
    +  public static final String USER_RPC_TIMEOUT = 
"drill.exec.rpc.user.timeout";
    +  public static final String METRICS_CONTEXT_NAME = 
"drill.exec.metrics.context";
    +  public static final String USE_IP_ADDRESS = "drill.exec.rpc.use.ip";
    +  public static final String CLIENT_RPC_THREADS = 
"drill.exec.rpc.user.client.threads";
    +  public static final String BIT_SERVER_RPC_THREADS = 
"drill.exec.rpc.bit.server.threads";
    +  public static final String USER_SERVER_RPC_THREADS = 
"drill.exec.rpc.user.server.threads";
    +  public static final String TRACE_DUMP_DIRECTORY = 
"drill.exec.trace.directory";
    +  public static final String TRACE_DUMP_FILESYSTEM = 
"drill.exec.trace.filesystem";
    +  public static final String TEMP_DIRECTORIES = 
"drill.exec.tmp.directories";
    +  public static final String TEMP_FILESYSTEM = "drill.exec.tmp.filesystem";
    +  public static final String INCOMING_BUFFER_IMPL = 
"drill.exec.buffer.impl";
       /** incoming buffer size (number of batches) */
    -  String INCOMING_BUFFER_SIZE = "drill.exec.buffer.size";
    -  String SPOOLING_BUFFER_DELETE = "drill.exec.buffer.spooling.delete";
    -  String SPOOLING_BUFFER_MEMORY = "drill.exec.buffer.spooling.size";
    -  String BATCH_PURGE_THRESHOLD = "drill.exec.sort.purge.threshold";
    -  String EXTERNAL_SORT_TARGET_BATCH_SIZE = 
"drill.exec.sort.external.batch.size";
    -  String EXTERNAL_SORT_TARGET_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.batch.size";
    -  String EXTERNAL_SORT_SPILL_GROUP_SIZE = 
"drill.exec.sort.external.spill.group.size";
    -  String EXTERNAL_SORT_SPILL_THRESHOLD = 
"drill.exec.sort.external.spill.threshold";
    -  String EXTERNAL_SORT_SPILL_DIRS = 
"drill.exec.sort.external.spill.directories";
    -  String EXTERNAL_SORT_SPILL_FILESYSTEM = 
"drill.exec.sort.external.spill.fs";
    -  String EXTERNAL_SORT_MSORT_MAX_BATCHSIZE = 
"drill.exec.sort.external.msort.batch.maxsize";
    -  String TEXT_LINE_READER_BATCH_SIZE = 
"drill.exec.storage.file.text.batch.size";
    -  String TEXT_LINE_READER_BUFFER_SIZE = 
"drill.exec.storage.file.text.buffer.size";
    -  String HAZELCAST_SUBNETS = "drill.exec.cache.hazel.subnets";
    -  String HTTP_ENABLE = "drill.exec.http.enabled";
    -  String HTTP_PORT = "drill.exec.http.port";
    -  String HTTP_ENABLE_SSL = "drill.exec.http.ssl_enabled";
    -  String HTTP_SESSION_MAX_IDLE_SECS = 
"drill.exec.http.session_max_idle_secs";
    -  String HTTP_KEYSTORE_PATH = "javax.net.ssl.keyStore";
    -  String HTTP_KEYSTORE_PASSWORD = "javax.net.ssl.keyStorePassword";
    -  String HTTP_TRUSTSTORE_PATH = "javax.net.ssl.trustStore";
    -  String HTTP_TRUSTSTORE_PASSWORD = "javax.net.ssl.trustStorePassword";
    -  String SYS_STORE_PROVIDER_CLASS = "drill.exec.sys.store.provider.class";
    -  String SYS_STORE_PROVIDER_LOCAL_PATH = 
"drill.exec.sys.store.provider.local.path";
    -  String SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE = 
"drill.exec.sys.store.provider.local.write";
    -  String IMPERSONATION_ENABLED = "drill.exec.impersonation.enabled";
    -  String IMPERSONATION_MAX_CHAINED_USER_HOPS = 
"drill.exec.impersonation.max_chained_user_hops";
    -  String USER_AUTHENTICATION_ENABLED = 
"drill.exec.security.user.auth.enabled";
    -  String USER_AUTHENTICATOR_IMPL = "drill.exec.security.user.auth.impl";
    -  String PAM_AUTHENTICATOR_PROFILES = 
"drill.exec.security.user.auth.pam_profiles";
    +  public static final String INCOMING_BUFFER_SIZE = 
"drill.exec.buffer.size";
    +  public static final String SPOOLING_BUFFER_DELETE = 
"drill.exec.buffer.spooling.delete";
    +  public static final String SPOOLING_BUFFER_MEMORY = 
"drill.exec.buffer.spooling.size";
    +  public static final String BATCH_PURGE_THRESHOLD = 
"drill.exec.sort.purge.threshold";
    +  public static final String EXTERNAL_SORT_TARGET_BATCH_SIZE = 
"drill.exec.sort.external.batch.size";
    +  public static final String EXTERNAL_SORT_TARGET_SPILL_BATCH_SIZE = 
"drill.exec.sort.external.spill.batch.size";
    +  public static final String EXTERNAL_SORT_SPILL_GROUP_SIZE = 
"drill.exec.sort.external.spill.group.size";
    +  public static final String EXTERNAL_SORT_SPILL_THRESHOLD = 
"drill.exec.sort.external.spill.threshold";
    +  public static final String EXTERNAL_SORT_SPILL_DIRS = 
"drill.exec.sort.external.spill.directories";
    +  public static final String EXTERNAL_SORT_SPILL_FILESYSTEM = 
"drill.exec.sort.external.spill.fs";
    +  public static final String EXTERNAL_SORT_MSORT_MAX_BATCHSIZE = 
"drill.exec.sort.external.msort.batch.maxsize";
    +  public static final String TEXT_LINE_READER_BATCH_SIZE = 
"drill.exec.storage.file.text.batch.size";
    +  public static final String TEXT_LINE_READER_BUFFER_SIZE = 
"drill.exec.storage.file.text.buffer.size";
    +  public static final String HAZELCAST_SUBNETS = 
"drill.exec.cache.hazel.subnets";
    +  public static final String HTTP_ENABLE = "drill.exec.http.enabled";
    +  public static final String HTTP_PORT = "drill.exec.http.port";
    +  public static final String HTTP_ENABLE_SSL = 
"drill.exec.http.ssl_enabled";
    +  public static final String HTTP_SESSION_MAX_IDLE_SECS = 
"drill.exec.http.session_max_idle_secs";
    +  public static final String HTTP_KEYSTORE_PATH = "javax.net.ssl.keyStore";
    +  public static final String HTTP_KEYSTORE_PASSWORD = 
"javax.net.ssl.keyStorePassword";
    +  public static final String HTTP_TRUSTSTORE_PATH = 
"javax.net.ssl.trustStore";
    +  public static final String HTTP_TRUSTSTORE_PASSWORD = 
"javax.net.ssl.trustStorePassword";
    +  public static final String SYS_STORE_PROVIDER_CLASS = 
"drill.exec.sys.store.provider.class";
    +  public static final String SYS_STORE_PROVIDER_LOCAL_PATH = 
"drill.exec.sys.store.provider.local.path";
    +  public static final String SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE = 
"drill.exec.sys.store.provider.local.write";
    +  public static final String IMPERSONATION_ENABLED = 
"drill.exec.impersonation.enabled";
    +  public static final String IMPERSONATION_MAX_CHAINED_USER_HOPS = 
"drill.exec.impersonation.max_chained_user_hops";
    +  public static final String USER_AUTHENTICATION_ENABLED = 
"drill.exec.security.user.auth.enabled";
    +  public static final String USER_AUTHENTICATOR_IMPL = 
"drill.exec.security.user.auth.impl";
    +  public static final String PAM_AUTHENTICATOR_PROFILES = 
"drill.exec.security.user.auth.pam_profiles";
       /** Size of JDBC batch queue (in batches) above which throttling begins. 
*/
    -  String JDBC_BATCH_QUEUE_THROTTLING_THRESHOLD =
    +  public static final String JDBC_BATCH_QUEUE_THROTTLING_THRESHOLD =
           "drill.jdbc.batch_queue_throttling_threshold";
     
       /**
        * Currently if a query is cancelled, but one of the fragments reports 
the status as FAILED instead of CANCELLED or
        * FINISHED we report the query result as CANCELLED by swallowing the 
failures occurred in fragments. This BOOT
        * setting allows the user to see the query status as failure. Useful 
for developers/testers.
        */
    -  String RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS =
    +  public static final String 
RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS =
           "drill.exec.debug.return_error_for_failure_in_cancelled_fragments";
     
    -
    -
    -
    -  String CLIENT_SUPPORT_COMPLEX_TYPES = 
"drill.client.supports-complex-types";
    -
    -  String OUTPUT_FORMAT_OPTION = "store.format";
    -  OptionValidator OUTPUT_FORMAT_VALIDATOR = new 
StringValidator(OUTPUT_FORMAT_OPTION, "parquet");
    -  String PARQUET_BLOCK_SIZE = "store.parquet.block-size";
    -  OptionValidator PARQUET_BLOCK_SIZE_VALIDATOR = new 
LongValidator(PARQUET_BLOCK_SIZE, 512*1024*1024);
    -  String PARQUET_PAGE_SIZE = "store.parquet.page-size";
    -  OptionValidator PARQUET_PAGE_SIZE_VALIDATOR = new 
LongValidator(PARQUET_PAGE_SIZE, 1024*1024);
    -  String PARQUET_DICT_PAGE_SIZE = "store.parquet.dictionary.page-size";
    -  OptionValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = new 
LongValidator(PARQUET_DICT_PAGE_SIZE, 1024*1024);
    -  String PARQUET_WRITER_COMPRESSION_TYPE = "store.parquet.compression";
    -  OptionValidator PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new 
EnumeratedStringValidator(
    -      PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "snappy", "gzip", "none");
    -  String PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING = 
"store.parquet.enable_dictionary_encoding";
    -  OptionValidator PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR = 
new BooleanValidator(
    +  public static final String CLIENT_SUPPORT_COMPLEX_TYPES = 
"drill.client.supports-complex-types";
    +
    +  public static final String OUTPUT_FORMAT_OPTION = "store.format";
    +  public static final OptionValidator OUTPUT_FORMAT_VALIDATOR = new 
StringValidator(OUTPUT_FORMAT_OPTION, "parquet");
    +  public static final String PARQUET_BLOCK_SIZE = 
"store.parquet.block-size";
    +  public static final OptionValidator PARQUET_BLOCK_SIZE_VALIDATOR = new 
LongValidator(PARQUET_BLOCK_SIZE,
    +      512 * 1024 * 1024);
    +  public static final String PARQUET_PAGE_SIZE = "store.parquet.page-size";
    +  public static final OptionValidator PARQUET_PAGE_SIZE_VALIDATOR = new 
LongValidator(PARQUET_PAGE_SIZE,
    +      1024 * 1024);
    +  public static final String PARQUET_DICT_PAGE_SIZE = 
"store.parquet.dictionary.page-size";
    +  public static final OptionValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = 
new LongValidator(PARQUET_DICT_PAGE_SIZE,
    +      1024 * 1024);
    +  public static final String PARQUET_WRITER_COMPRESSION_TYPE = 
"store.parquet.compression";
    +  public static final OptionValidator 
PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new EnumeratedStringValidator(
    +      PARQUET_WRITER_COMPRESSION_TYPE, "snappy",
    +      "Compression type for storing Parquet output. Allowed values: snappy 
(default), gzip, none.",
    +      "snappy", "gzip", "none");
    +  public static final String PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING = 
"store.parquet.enable_dictionary_encoding";
    +  public static final OptionValidator 
PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR = new BooleanValidator(
           PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING, false);
     
    -  String PARQUET_VECTOR_FILL_THRESHOLD = 
"store.parquet.vector_fill_threshold";
    -  OptionValidator PARQUET_VECTOR_FILL_THRESHOLD_VALIDATOR = new 
PositiveLongValidator(PARQUET_VECTOR_FILL_THRESHOLD, 99l, 85l);
    -  String PARQUET_VECTOR_FILL_CHECK_THRESHOLD = 
"store.parquet.vector_fill_check_threshold";
    -  OptionValidator PARQUET_VECTOR_FILL_CHECK_THRESHOLD_VALIDATOR = new 
PositiveLongValidator(PARQUET_VECTOR_FILL_CHECK_THRESHOLD, 100l, 10l);
    -  String PARQUET_NEW_RECORD_READER = "store.parquet.use_new_reader";
    -  OptionValidator PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR = new 
BooleanValidator(PARQUET_NEW_RECORD_READER, false);
    -
    -  OptionValidator COMPILE_SCALAR_REPLACEMENT = new 
BooleanValidator("exec.compile.scalar_replacement", false);
    -
    -  String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
    -  BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = new 
BooleanValidator(JSON_ALL_TEXT_MODE, false);
    -  BooleanValidator JSON_EXTENDED_TYPES = new 
BooleanValidator("store.json.extended_types", false);
    -  BooleanValidator JSON_WRITER_UGLIFY = new 
BooleanValidator("store.json.writer.uglify", false);
    -  BooleanValidator JSON_WRITER_SKIPNULLFIELDS = new 
BooleanValidator("store.json.writer.skip_null_fields", true);
    -
    -  DoubleValidator TEXT_ESTIMATED_ROW_SIZE = new RangeDoubleValidator(
    +  public static final String PARQUET_VECTOR_FILL_THRESHOLD = 
"store.parquet.vector_fill_threshold";
    +  public static final OptionValidator 
PARQUET_VECTOR_FILL_THRESHOLD_VALIDATOR = new PositiveLongValidator(
    +      PARQUET_VECTOR_FILL_THRESHOLD, 99l, 85l);
    +  public static final String PARQUET_VECTOR_FILL_CHECK_THRESHOLD = 
"store.parquet.vector_fill_check_threshold";
    +  public static final OptionValidator 
PARQUET_VECTOR_FILL_CHECK_THRESHOLD_VALIDATOR = new PositiveLongValidator(
    +      PARQUET_VECTOR_FILL_CHECK_THRESHOLD, 100l, 10l);
    +  public static final String PARQUET_NEW_RECORD_READER = 
"store.parquet.use_new_reader";
    +  public static final OptionValidator 
PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR = new BooleanValidator(
    +      PARQUET_NEW_RECORD_READER, false,
    +      "Enables the text reader that complies with the RFC 4180 standard 
for text/csv files.");
    --- End diff --
    
    Very cool. Should these strings reside in a resource file? That is the 
traditional Java solution for user-visible strings of this sort...


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to