http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java 
b/src/test/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java
new file mode 100644
index 0000000..58f835c
--- /dev/null
+++ 
b/src/test/java/org/apache/pirk/test/distributed/testsuite/DistTestSuite.java
@@ -0,0 +1,458 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.distributed.testsuite;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.pirk.encryption.Paillier;
+import org.apache.pirk.inputformat.hadoop.InputFormatConst;
+import org.apache.pirk.inputformat.hadoop.json.JSONInputFormatBase;
+import org.apache.pirk.querier.wideskies.Querier;
+import org.apache.pirk.querier.wideskies.decrypt.DecryptResponse;
+import org.apache.pirk.querier.wideskies.encrypt.EncryptQuery;
+import org.apache.pirk.query.wideskies.Query;
+import org.apache.pirk.query.wideskies.QueryInfo;
+import org.apache.pirk.responder.wideskies.ResponderProps;
+import org.apache.pirk.responder.wideskies.mapreduce.ComputeResponseTool;
+import org.apache.pirk.response.wideskies.Response;
+import org.apache.pirk.schema.response.QueryResponseJSON;
+import org.apache.pirk.serialization.HadoopFileSystemStore;
+import org.apache.pirk.test.distributed.DistributedTestDriver;
+import org.apache.pirk.test.utils.BaseTests;
+import org.apache.pirk.test.utils.Inputs;
+import org.apache.pirk.test.utils.TestUtils;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.apache.spark.launcher.SparkLauncher;
+import org.json.simple.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Distributed test class for PIR
+ * 
+ */
+public class DistTestSuite
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(DistTestSuite.class);
+
+  // This method also tests all non-query specific configuration 
options/properties
+  // for the MapReduce version of PIR
+  public static void testJSONInputMR(FileSystem fs, List<JSONObject> 
dataElements) throws Exception
+  {
+    logger.info("Starting testJSONInputMR");
+
+    // Pull original data and query schema properties
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "100");
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+
+    // Set up base configs
+    SystemConfiguration.setProperty("pir.dataInputFormat", 
InputFormatConst.BASE_FORMAT);
+    SystemConfiguration.setProperty("pir.inputData", 
SystemConfiguration.getProperty(DistributedTestDriver.JSON_PIR_INPUT_FILE_PROPERTY));
+    SystemConfiguration.setProperty("pir.baseQuery", "?q=rcode:0");
+
+    // Run tests
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 1);
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 1);
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 2);
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 2);
+
+    BaseTests.testSRCIPQueryNoFilter(dataElements, fs, false, true, 2);
+
+    // Test hit limits per selector
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "true");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 3);
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1000");
+
+    // Test the local cache for modular exponentiation
+    SystemConfiguration.setProperty("pir.useLocalCache", "true");
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 2);
+    BaseTests.testSRCIPQuery(dataElements, fs, false, true, 2);
+    SystemConfiguration.setProperty("pir.useLocalCache", "false");
+
+    // Change query for NXDOMAIN
+    SystemConfiguration.setProperty("pir.baseQuery", "?q=rcode:3");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, false, true, 2);
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, false, true, 2);
+    SystemConfiguration.setProperty("pir.baseQuery", "?q=rcode:0");
+
+    // Test the expTable cases
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+
+    // In memory table
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "true");
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 2);
+
+    // Create exp table in hdfs
+    SystemConfiguration.setProperty("mapreduce.map.memory.mb", "10000");
+    SystemConfiguration.setProperty("mapreduce.reduce.memory.mb", "10000");
+    SystemConfiguration.setProperty("mapreduce.map.java.opts", "-Xmx9000m");
+    SystemConfiguration.setProperty("mapreduce.reduce.java.opts", "-Xmx9000m");
+
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "true");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+    SystemConfiguration.setProperty("pir.expCreationSplits", "50");
+    SystemConfiguration.setProperty("pir.numExpLookupPartitions", "150");
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 2);
+
+    // Reset exp properties
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+
+    // Reset property
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+
+    // Test embedded QuerySchema
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "true");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 1);
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "true");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 1);
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 1);
+
+    logger.info("Completed testJSONInputMR");
+  }
+
+  public static void testESInputMR(FileSystem fs, List<JSONObject> 
dataElements) throws Exception
+  {
+    logger.info("Starting testESInputMR");
+
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1000");
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+
+    // Set up ES configs
+    SystemConfiguration.setProperty("pir.dataInputFormat", 
InputFormatConst.ES);
+    SystemConfiguration.setProperty("pir.esQuery", "?q=rcode:0");
+    SystemConfiguration.setProperty("pir.esResource", 
SystemConfiguration.getProperty(DistributedTestDriver.ES_PIR_INPUT_RESOURCE_PROPERTY));
+
+    // Run tests
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 1);
+    BaseTests.testSRCIPQuery(dataElements, fs, false, true, 2);
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 1);
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, false, true, 2);
+    BaseTests.testDNSIPQuery(dataElements, fs, false, true, 2);
+
+    // Change query for NXDOMAIN
+    SystemConfiguration.setProperty("pir.esQuery", "?q=rcode:3");
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, false, true, 3);
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, false, true, 3);
+
+    logger.info("Completed testESInputMR");
+  }
+
+  public static void testJSONInputSpark(FileSystem fs, List<JSONObject> 
dataElements) throws Exception
+  {
+    logger.info("Starting testJSONInputSpark");
+
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+    SystemConfiguration.setProperty("pir.useModExpJoin", "false");
+
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1000");
+
+    SystemConfiguration.setProperty("pir.numColMultPartitions", "20");
+    SystemConfiguration.setProperty("pir.colMultReduceByKey", "false");
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+
+    // Set up JSON configs
+    SystemConfiguration.setProperty("pir.dataInputFormat", 
InputFormatConst.BASE_FORMAT);
+    SystemConfiguration.setProperty("pir.inputData", 
SystemConfiguration.getProperty(DistributedTestDriver.JSON_PIR_INPUT_FILE_PROPERTY));
+    SystemConfiguration.setProperty("pir.baseQuery", "?q=rcode:0");
+
+    // Run tests
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 1);
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 2);
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 2);
+    BaseTests.testSRCIPQuery(dataElements, fs, true, true, 2);
+
+    BaseTests.testSRCIPQueryNoFilter(dataElements, fs, true, true, 2);
+
+    // Test embedded QuerySchema
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "true");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "true");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+
+    // Test pad columns
+    SystemConfiguration.setProperty("pir.padEmptyColumns", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+    SystemConfiguration.setProperty("pir.padEmptyColumns", "false");
+
+    // Test hit limits per selector
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "true");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 3);
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1000");
+
+    // Test the local cache for modular exponentiation
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    SystemConfiguration.setProperty("pir.useLocalCache", "true");
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 3);
+
+    // Test the join functionality for the modular exponentiation table
+    SystemConfiguration.setProperty("pir.useModExpJoin", "true");
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 3);
+    SystemConfiguration.setProperty("pir.useModExpJoin", "false");
+
+    // Test file based exp lookup table for modular exponentiation
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "true");
+    SystemConfiguration.setProperty("pir.expCreationSplits", "500");
+    SystemConfiguration.setProperty("pir.numExpLookupPartitions", "150");
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 2);
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+
+    // Change query for NXDOMAIN
+    SystemConfiguration.setProperty("pir.baseQuery", "?q=rcode:3");
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, true, true, 3);
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, true, true, 3);
+
+    // Test with reduceByKey for column mult
+    SystemConfiguration.setProperty("pir.colMultReduceByKey", "true");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, true, true, 3);
+
+    logger.info("Completed testJSONInputSpark");
+  }
+
+  public static void testESInputSpark(FileSystem fs, List<JSONObject> 
dataElements) throws Exception
+  {
+    logger.info("Starting testESInputSpark");
+
+    SystemConfiguration.setProperty("pirTest.useHDFSExpLookupTable", "false");
+    SystemConfiguration.setProperty("pirTest.useExpLookupTable", "false");
+
+    SystemConfiguration.setProperty("pir.limitHitsPerSelector", "false");
+    SystemConfiguration.setProperty("pir.maxHitsPerSelector", "1000");
+
+    SystemConfiguration.setProperty("pir.allowAdHocQuerySchemas", "false");
+    SystemConfiguration.setProperty("pir.embedQuerySchema", "false");
+
+    // Set up ES configs
+    SystemConfiguration.setProperty("pir.dataInputFormat", 
InputFormatConst.ES);
+    SystemConfiguration.setProperty("pir.esQuery", "?q=rcode:0");
+    SystemConfiguration.setProperty("pir.esResource", 
SystemConfiguration.getProperty(DistributedTestDriver.ES_PIR_INPUT_RESOURCE_PROPERTY));
+
+    // Run tests
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 1);
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 1);
+    BaseTests.testSRCIPQuery(dataElements, fs, true, true, 2);
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSHostnameQuery(dataElements, fs, true, true, 2);
+    BaseTests.testDNSIPQuery(dataElements, fs, true, true, 2);
+
+    // Change query for NXDOMAIN
+    SystemConfiguration.setProperty("pir.esQuery", "?q=rcode:3");
+
+    SystemConfiguration.setProperty("pirTest.embedSelector", "true");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, true, true, 3);
+    SystemConfiguration.setProperty("pirTest.embedSelector", "false");
+    BaseTests.testDNSNXDOMAINQuery(dataElements, fs, true, true, 3);
+
+    logger.info("Completed testESInputSpark");
+  }
+
+  // Base method to perform query
+  public static List<QueryResponseJSON> performQuery(String queryType, 
ArrayList<String> selectors, FileSystem fs, boolean isSpark, int numThreads)
+      throws Exception
+  {
+    logger.info("performQuery: ");
+
+    String queryInputDir = 
SystemConfiguration.getProperty(DistributedTestDriver.PIR_QUERY_INPUT_DIR);
+    String outputFile = 
SystemConfiguration.getProperty(DistributedTestDriver.OUTPUT_DIRECTORY_PROPERTY);
+    fs.delete(new Path(outputFile), true); // Ensure old output does not exist.
+
+    SystemConfiguration.setProperty("pir.queryInput", queryInputDir);
+    SystemConfiguration.setProperty("pir.outputFile", outputFile);
+    SystemConfiguration.setProperty("pir.numReduceTasks", "1");
+    SystemConfiguration.setProperty("pir.stopListFile", 
SystemConfiguration.getProperty(DistributedTestDriver.PIR_STOPLIST_FILE));
+
+    // Create the temp result file
+    File fileFinalResults = File.createTempFile("finalResultsFile", ".txt");
+    fileFinalResults.deleteOnExit();
+    logger.info("fileFinalResults = " + fileFinalResults.getAbsolutePath());
+
+    boolean embedSelector = 
SystemConfiguration.getBooleanProperty("pirTest.embedSelector", false);
+    boolean useExpLookupTable = 
SystemConfiguration.getBooleanProperty("pirTest.useExpLookupTable", false);
+    boolean useHDFSExpLookupTable = 
SystemConfiguration.getBooleanProperty("pirTest.useHDFSExpLookupTable", false);
+
+    // Set the necessary objects
+    QueryInfo queryInfo = new QueryInfo(BaseTests.queryIdentifier, 
selectors.size(), BaseTests.hashBitSize, BaseTests.hashKey, 
BaseTests.dataPartitionBitSize,
+        queryType, useExpLookupTable, embedSelector, useHDFSExpLookupTable);
+
+    Paillier paillier = new Paillier(BaseTests.paillierBitSize, 
BaseTests.certainty);
+
+    // Perform the encryption
+    logger.info("Performing encryption of the selectors - forming encrypted 
query vectors:");
+    EncryptQuery encryptQuery = new EncryptQuery(queryInfo, selectors, 
paillier);
+    encryptQuery.encrypt(numThreads);
+    logger.info("Completed encryption of the selectors - completed formation 
of the encrypted query vectors:");
+
+    // Grab the necessary objects
+    Querier querier = encryptQuery.getQuerier();
+    Query query = encryptQuery.getQuery();
+
+    // Write the Querier object to a file
+    Path queryInputDirPath = new Path(queryInputDir);
+    new HadoopFileSystemStore(fs).store(queryInputDirPath, query);
+    fs.deleteOnExit(queryInputDirPath);
+
+    // Grab the original data and query schema properties to reset upon 
completion
+    String dataSchemaProp = SystemConfiguration.getProperty("data.schemas");
+    String querySchemaProp = SystemConfiguration.getProperty("query.schemas");
+
+    // Get the correct input format class name
+    JSONInputFormatBase jFormat = new JSONInputFormatBase();
+    String jsonBaseInputFormatString = jFormat.getClass().getName();
+    SystemConfiguration.setProperty("pir.baseInputFormat", 
jsonBaseInputFormatString);
+
+    // Submitting the tool for encrypted query
+    logger.info("Performing encrypted query:");
+    if (isSpark)
+    {
+      // Build args
+      String inputFormat = 
SystemConfiguration.getProperty("pir.dataInputFormat");
+      logger.info("inputFormat = " + inputFormat);
+      ArrayList<String> args = new ArrayList<>();
+      args.add("-" + ResponderProps.PLATFORM + "=spark");
+      args.add("-" + ResponderProps.DATAINPUTFORMAT + "=" + inputFormat);
+      args.add("-" + ResponderProps.QUERYINPUT + "=" + 
SystemConfiguration.getProperty("pir.queryInput"));
+      args.add("-" + ResponderProps.OUTPUTFILE + "=" + 
SystemConfiguration.getProperty("pir.outputFile"));
+      args.add("-" + ResponderProps.STOPLISTFILE + "=" + 
SystemConfiguration.getProperty("pir.stopListFile"));
+      args.add("-" + ResponderProps.USELOCALCACHE + "=" + 
SystemConfiguration.getProperty("pir.useLocalCache", "true"));
+      args.add("-" + ResponderProps.LIMITHITSPERSELECTOR + "=" + 
SystemConfiguration.getProperty("pir.limitHitsPerSelector", "false"));
+      args.add("-" + ResponderProps.MAXHITSPERSELECTOR + "=" + 
SystemConfiguration.getProperty("pir.maxHitsPerSelector", "1000"));
+      args.add("-" + ResponderProps.QUERYSCHEMAS + "=" + 
Inputs.HDFS_QUERY_FILES);
+      args.add("-" + ResponderProps.DATASCHEMAS + "=" + 
Inputs.DATA_SCHEMA_FILE_HDFS);
+      args.add("-" + ResponderProps.NUMEXPLOOKUPPARTS + "=" + 
SystemConfiguration.getProperty("pir.numExpLookupPartitions", "100"));
+      args.add("-" + ResponderProps.USEMODEXPJOIN + "=" + 
SystemConfiguration.getProperty("pir.useModExpJoin", "false"));
+      args.add("-" + ResponderProps.NUMCOLMULTPARTITIONS + "=" + 
SystemConfiguration.getProperty("pir.numColMultPartitions", "20"));
+      args.add("-" + ResponderProps.COLMULTREDUCEBYKEY + "=" + 
SystemConfiguration.getProperty("pir.colMultReduceByKey", "false"));
+      if (inputFormat.equals(InputFormatConst.BASE_FORMAT))
+      {
+        args.add("-" + ResponderProps.INPUTDATA + "=" + 
SystemConfiguration.getProperty("pir.inputData"));
+        args.add("-" + ResponderProps.BASEQUERY + "=" + 
SystemConfiguration.getProperty("pir.baseQuery"));
+        args.add("-" + ResponderProps.BASEINPUTFORMAT + "=" + 
SystemConfiguration.getProperty("pir.baseInputFormat"));
+      }
+      else if (inputFormat.equals(InputFormatConst.ES))
+      {
+        args.add("-" + ResponderProps.ESQUERY + "=" + 
SystemConfiguration.getProperty("pir.esQuery"));
+        args.add("-" + ResponderProps.ESRESOURCE + "=" + 
SystemConfiguration.getProperty("pir.esResource"));
+      }
+
+      for (String arg : args)
+      {
+        logger.info("arg = " + arg);
+      }
+
+      // Run spark application
+      Process sLauncher = new 
SparkLauncher().setAppResource(SystemConfiguration.getProperty("jarFile"))
+          
.setSparkHome(SystemConfiguration.getProperty("spark.home")).setMainClass("org.apache.pirk.responder.wideskies.ResponderDriver")
+          .addAppArgs(args.toArray(new 
String[args.size()])).setMaster("yarn-cluster").setConf(SparkLauncher.EXECUTOR_MEMORY,
 "2g")
+          .setConf(SparkLauncher.DRIVER_MEMORY, 
"2g").setConf(SparkLauncher.EXECUTOR_CORES, "1").launch();
+      sLauncher.waitFor();
+    }
+    else
+    {
+      SystemConfiguration.setProperty("data.schemas", 
Inputs.DATA_SCHEMA_FILE_HDFS);
+      SystemConfiguration.setProperty("query.schemas", 
Inputs.HDFS_QUERY_FILES);
+
+      ComputeResponseTool responseTool = new ComputeResponseTool();
+      ToolRunner.run(responseTool, new String[] {});
+    }
+    logger.info("Completed encrypted query");
+
+    // Perform decryption
+    // Reconstruct the necessary objects from the files
+    logger.info("Performing decryption; writing final results file");
+    Response response = new HadoopFileSystemStore(fs).recall(outputFile, 
Response.class);
+
+    // Perform decryption and output the result file
+    DecryptResponse decryptResponse = new DecryptResponse(response, querier);
+    decryptResponse.decrypt(numThreads);
+    decryptResponse.writeResultFile(fileFinalResults);
+    logger.info("Completed performing decryption and writing final results 
file");
+
+    // Read in results
+    logger.info("Reading in and checking results");
+    List<QueryResponseJSON> results = 
TestUtils.readResultsFile(fileFinalResults);
+
+    // Reset data and query schema properties
+    SystemConfiguration.setProperty("data.schemas", dataSchemaProp);
+    SystemConfiguration.setProperty("query.schemas", querySchemaProp);
+
+    // Clean up output dir in hdfs
+    fs.delete(new Path(outputFile), true);
+
+    return results;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/general/ISO8601DateParserTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/general/ISO8601DateParserTest.java 
b/src/test/java/org/apache/pirk/test/general/ISO8601DateParserTest.java
new file mode 100644
index 0000000..9821db3
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/general/ISO8601DateParserTest.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.general;
+
+import static org.junit.Assert.assertEquals;
+
+import java.text.ParseException;
+
+import org.apache.pirk.utils.ISO8601DateParser;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Class to test basic functionality of ISO8601DateParser class
+ */
+public class ISO8601DateParserTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(ISO8601DateParserTest.class);
+
+  @Test
+  public void testDateParsing() throws ParseException
+  {
+    logger.info("Starting testDateParsing: ");
+
+    String date = "2016-02-20T23:29:05.000Z";
+    long longDate = Long.parseLong("1456010945000"); // date in UTC
+
+    assertEquals(longDate, ISO8601DateParser.getLongDate(date));
+    assertEquals(date, ISO8601DateParser.fromLongDate(longDate));
+
+    logger.info("Successfully completed testDateParsing");
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/general/KeyedHashTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/test/general/KeyedHashTest.java 
b/src/test/java/org/apache/pirk/test/general/KeyedHashTest.java
new file mode 100644
index 0000000..e53cebf
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/general/KeyedHashTest.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.general;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.pirk.utils.KeyedHash;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Basic functional tests for KeyedHash
+ * 
+ */
+public class KeyedHashTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(KeyedHashTest.class);
+
+  @Test
+  public void testKeyedHash()
+  {
+    logger.info("Starting testKeyedHash: ");
+
+    int hash1 = KeyedHash.hash("someKey", 12, "someInput");
+    logger.info("hash1 = " + hash1 + " hash1 = " + Integer.toString(hash1, 2));
+
+    int hash2 = KeyedHash.hash("someKey", 32, "someInput");
+    logger.info("hash2 = " + hash2 + " hash2 = " + Integer.toString(hash2, 2));
+
+    int hash3 = KeyedHash.hash("someKey", 34, "someInput");
+    logger.info("hash3 = " + hash3 + " hash3 = " + Integer.toString(hash3, 2));
+
+    assertEquals(hash2, hash3);
+    assertEquals(hash1, hash2 & 0xFFF);
+
+    logger.info("Successfully completed testKeyedHash");
+  }
+
+  @Test
+  public void testKeyedHashWithType()
+  {
+    testKeyedHashType("MD5");
+    testKeyedHashType("SHA-1");
+    testKeyedHashType("SHA-256");
+    testKeyedHashType("FAKE-HASH-TYPE");
+  }
+
+  private void testKeyedHashType(String type)
+  {
+    logger.info("Starting testKeyedHashType with type: " + type);
+
+    int hash1 = KeyedHash.hash("someKey", 12, "someInput", type);
+    logger.info("hash1 = " + hash1 + " hash1 = " + Integer.toString(hash1, 2));
+
+    int hash2 = KeyedHash.hash("someKey", 32, "someInput", type);
+    logger.info("hash2 = " + hash2 + " hash2 = " + Integer.toString(hash2, 2));
+
+    int hash3 = KeyedHash.hash("someKey", 34, "someInput", type);
+    logger.info("hash3 = " + hash3 + " hash3 = " + Integer.toString(hash3, 2));
+
+    assertEquals(hash2, hash3);
+    assertEquals(hash1, hash2 & 0xFFF);
+
+    logger.info("Successfully completed testKeyedHashType with type: " + type);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/general/PaillierTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/test/general/PaillierTest.java 
b/src/test/java/org/apache/pirk/test/general/PaillierTest.java
new file mode 100644
index 0000000..6aa1396
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/general/PaillierTest.java
@@ -0,0 +1,303 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.general;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
+import java.math.BigInteger;
+import java.util.Random;
+
+import org.apache.pirk.encryption.Paillier;
+import org.apache.pirk.utils.PIRException;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Basic test functionality for Paillier library
+ * 
+ */
+public class PaillierTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(PaillierTest.class);
+
+  private static BigInteger p = null; // large prime
+  private static BigInteger q = null; // large prime
+  private static BigInteger N = null; // N=pq, RSA modulus
+  private static BigInteger NSquared = null; // N^2
+  private static BigInteger lambdaN = null; // lambda(N) = lcm(p-1,q-1)
+
+  private static int bitLength = 0; // bit length of the modulus N
+  private static int certainty = 64; // prob that new BigInteger values 
represents primes will exceed (1 - (1/2)^certainty)
+
+  private static BigInteger r1 = null; // random number in (Z/NZ)*
+  private static BigInteger r2 = null; // random number in (Z/NZ)*
+
+  private static BigInteger m1 = null; // message to encrypt
+  private static BigInteger m2 = null; // message to encrypt
+
+  @BeforeClass
+  public static void setup()
+  {
+    p = BigInteger.valueOf(7);
+    q = BigInteger.valueOf(17);
+    N = p.multiply(q);
+    NSquared = N.multiply(N);
+
+    lambdaN = BigInteger.valueOf(48);
+
+    r1 = BigInteger.valueOf(3);
+    r2 = BigInteger.valueOf(4);
+
+    m1 = BigInteger.valueOf(5);
+    m2 = BigInteger.valueOf(2);
+
+    bitLength = 201;// bitLength = 384;
+    certainty = 128;
+
+    logger.info("p = " + p.intValue() + " q = " + q.intValue() + " N = " + 
N.intValue() + " bitLength = " + N.bitLength() + " lambdaN = " + lambdaN + " m1 
= "
+        + m1.intValue() + " m2 = " + m2.intValue() + " r1 = " + r1.intValue() 
+ " r2 = " + r2.intValue());
+  }
+
+  @Test
+  public void testPIRExceptions()
+  {
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(2), 
BigInteger.valueOf(2), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for p,q < 3");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(2), 
BigInteger.valueOf(3), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for p < 3");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(3), 
BigInteger.valueOf(2), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for q < 3");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(7), 
BigInteger.valueOf(7), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for p = q");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(8), 
BigInteger.valueOf(7), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for p not prime");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier paillier = new Paillier(BigInteger.valueOf(7), 
BigInteger.valueOf(10), 128);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for q not prime");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      int systemPrimeCertainty = 
SystemConfiguration.getIntProperty("pir.primeCertainty", 128);
+      Paillier paillier = new Paillier(3072, systemPrimeCertainty - 10);
+      assertNotNull(paillier);
+      fail("Paillier constructor did not throw PIRException for certainty less 
than system default of " + systemPrimeCertainty);
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier pailler = new Paillier(p, q, bitLength);
+      BigInteger encM1 = pailler.encrypt(N);
+      assertNotNull(encM1);
+      fail("Paillier encryption did not throw PIRException for message m = N");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier pailler = new Paillier(p, q, bitLength);
+      BigInteger encM1 = pailler.encrypt(N.add(BigInteger.TEN));
+      assertNotNull(encM1);
+      fail("Paillier encryption did not throw PIRException for message m > N");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier pailler = new Paillier(bitLength, 128, bitLength);
+      assertNotNull(pailler);
+      fail("Paillier constructor did not throw PIRException for ensureBitSet = 
bitLength");
+    } catch (PIRException ignore)
+    {}
+
+    try
+    {
+      Paillier pailler = new Paillier(bitLength, 128, bitLength + 1);
+      assertNotNull(pailler);
+      fail("Paillier constructor did not throw PIRException for ensureBitSet > 
bitLength");
+    } catch (PIRException ignore)
+    {}
+  }
+
+  @Test
+  public void testPaillierGivenAllParameters() throws Exception
+  {
+    logger.info("Starting testPaillierGivenAllParameters: ");
+
+    Paillier pailler = new Paillier(p, q, bitLength);
+
+    assertEquals(pailler.getN(), N);
+    assertEquals(pailler.getLambdaN(), lambdaN);
+
+    // Check encryption
+    BigInteger encM1 = pailler.encrypt(m1, r1);
+    BigInteger encM2 = pailler.encrypt(m2, r2);
+    logger.info("encM1 = " + encM1.intValue() + " encM2 = " + 
encM2.intValue());
+
+    assertEquals(encM1, BigInteger.valueOf(14019));
+    assertEquals(encM2, BigInteger.valueOf(8836));
+
+    // Check decryption
+    BigInteger decM1 = pailler.decrypt(encM1);
+    BigInteger decM2 = pailler.decrypt(encM2);
+    logger.info("decM1 = " + decM1.intValue() + " decM2 = " + 
decM2.intValue());
+
+    assertEquals(decM1, m1);
+    assertEquals(decM2, m2);
+
+    // Check homomorphic property: E_r1(m1)*E_r2(m2) mod N^2 = E_r1r2((m1+m2) 
mod N) mod N^2
+    BigInteger encM1_times_encM2 = (encM1.multiply(encM2)).mod(NSquared);
+    BigInteger encM1plusM2 = pailler.encrypt((m1.add(m2)).mod(N), 
r1.multiply(r2));
+    logger.info("encM1_times_encM2 = " + encM1_times_encM2.intValue() + " 
encM1plusM2 = " + encM1plusM2.intValue());
+
+    assertEquals(encM1_times_encM2, BigInteger.valueOf(5617));
+    assertEquals(encM1plusM2, BigInteger.valueOf(5617));
+
+    logger.info("Successfully completed testPaillierGivenAllParameters: ");
+  }
+
+  @Test
+  public void testPaillierWithKeyGeneration() throws Exception
+  {
+    logger.info("Starting testPaillierWithKeyGeneration: ");
+
+    // Test with and without gmp optimization for modPow
+    SystemConfiguration.setProperty("pallier.FIPSPrimeGenerationChecks", 
"true");
+    SystemConfiguration.setProperty("paillier.useGMPForModPow", "true");
+    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "true");
+    testPaillerWithKeyGenerationGeneral();
+
+    SystemConfiguration.setProperty("pallier.FIPSPrimeGenerationChecks", 
"false");
+
+    SystemConfiguration.setProperty("paillier.useGMPForModPow", "true");
+    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "true");
+    testPaillerWithKeyGenerationGeneral();
+
+    SystemConfiguration.setProperty("paillier.useGMPForModPow", "true");
+    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "false");
+    testPaillerWithKeyGenerationGeneral();
+
+    SystemConfiguration.setProperty("paillier.useGMPForModPow", "false");
+    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "false");
+    testPaillerWithKeyGenerationGeneral();
+
+    // Reset the properties
+    SystemConfiguration.initialize();
+
+    logger.info("Ending testPaillierWithKeyGeneration: ");
+  }
+
+  public void testPaillerWithKeyGenerationGeneral() throws Exception
+  {
+    // Test without requiring highest bit to be set
+    logger.info("Starting testPaillierWithKeyGenerationBitSetOption with 
ensureHighBitSet = false");
+    testPaillierWithKeyGenerationBitSetOption(-1);
+
+    // Test requiring highest bit to be set
+    logger.info("Starting testPaillierWithKeyGenerationBitSetOption with 
ensureHighBitSet = true");
+    testPaillierWithKeyGenerationBitSetOption(5);
+  }
+
+  public void testPaillierWithKeyGenerationBitSetOption(int ensureBitSet) 
throws Exception
+  {
+    Random r = new Random();
+    int lowBitLength = 3073; // inclusive
+    int highBitLength = 7001; // exclusive
+
+    int loopVal = 1; // int loopVal = 1000; //change this and re-test for high 
loop testing
+    for (int i = 0; i < loopVal; ++i)
+    {
+      logger.info("i = " + i);
+
+      basicTestPaillierWithKeyGeneration(bitLength, certainty, ensureBitSet);
+      basicTestPaillierWithKeyGeneration(3072, certainty, ensureBitSet);
+
+      // Test with random bit length between 3073 and 7000
+      int randomLargeBitLength = r.nextInt(highBitLength - lowBitLength) + 
lowBitLength;
+      basicTestPaillierWithKeyGeneration(randomLargeBitLength, certainty, 
ensureBitSet);
+    }
+  }
+
+  private void basicTestPaillierWithKeyGeneration(int bitLengthInput, int 
certaintyInput, int ensureBitSet) throws Exception
+  {
+    Paillier pailler = new Paillier(bitLengthInput, certaintyInput, 
ensureBitSet);
+    BigInteger generatedN = pailler.getN();
+    BigInteger geneartedNsquared = generatedN.multiply(generatedN);
+
+    // Check the decrypting the encryption yields the message
+    BigInteger encM1 = pailler.encrypt(m1);
+    BigInteger encM2 = pailler.encrypt(m2);
+    logger.info("encM1 = " + encM1.intValue() + " encM2 = " + 
encM2.intValue());
+
+    BigInteger decM1 = pailler.decrypt(encM1);
+    BigInteger decM2 = pailler.decrypt(encM2);
+    logger.info("decM1 = " + decM1.intValue() + " decM2 = " + 
decM2.intValue());
+
+    assertEquals(decM1, m1);
+    assertEquals(decM2, m2);
+
+    // Check homomorphic property: E_r1(m1)*E_r2(m2) mod N^2 = E_r1r2((m1+m2) 
mod N) mod N^2
+    BigInteger encM1_times_encM2 = 
(encM1.multiply(encM2)).mod(geneartedNsquared);
+    BigInteger multDecrypt = pailler.decrypt(encM1_times_encM2);
+    BigInteger m1_plus_m2 = (m1.add(m2)).mod(N);
+
+    logger.info("encM1_times_encM2 = " + encM1_times_encM2.intValue() + " 
multDecrypt = " + multDecrypt.intValue() + " m1_plus_m2 = " + 
m1_plus_m2.intValue());
+
+    assertEquals(multDecrypt, m1_plus_m2);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/general/PartitionUtilsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/test/general/PartitionUtilsTest.java 
b/src/test/java/org/apache/pirk/test/general/PartitionUtilsTest.java
new file mode 100644
index 0000000..c1022ad
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/general/PartitionUtilsTest.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.general;
+
+import org.apache.pirk.schema.data.partitioner.IPDataPartitioner;
+import org.apache.pirk.schema.data.partitioner.ISO8601DatePartitioner;
+import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner;
+import org.apache.pirk.utils.PIRException;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+/**
+ * Class to functionally test the bit conversion utils
+ */
+public class PartitionUtilsTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(PartitionUtilsTest.class);
+
+  @Test
+  public void testMask()
+  {
+    logger.info("Starting testMask: ");
+
+    assertEquals(0, PrimitiveTypePartitioner.formBitMask(0).intValue());
+
+    assertEquals(0b000000000000001, 
PrimitiveTypePartitioner.formBitMask(1).intValue());
+    assertEquals(0b000000000001111, 
PrimitiveTypePartitioner.formBitMask(4).intValue());
+    assertEquals(0b000000001111111, 
PrimitiveTypePartitioner.formBitMask(7).intValue());
+    assertEquals(0b111111111111111, 
PrimitiveTypePartitioner.formBitMask(15).intValue());
+    
+    assertEquals(new BigInteger("FFFFF", 16), 
PrimitiveTypePartitioner.formBitMask(20));
+    assertEquals(new BigInteger("FFFFFFFF", 16), 
PrimitiveTypePartitioner.formBitMask(32));
+    assertEquals(new BigInteger("3FFFFFFFFFF", 16), 
PrimitiveTypePartitioner.formBitMask(42));
+    assertEquals(new BigInteger("7FFFFFFFFFFFFFFF", 16), 
PrimitiveTypePartitioner.formBitMask(63));
+
+    logger.info("Successfully completed testMask");
+  }
+
+  @Test
+  public void testPartitionBits() throws PIRException
+  {
+    logger.info("Starting testPartitionBits: ");
+
+    BigInteger value = new BigInteger("245"); // 11110101
+    BigInteger value2 = new BigInteger("983"); // 1111010111
+
+    BigInteger mask4 = PrimitiveTypePartitioner.formBitMask(4); // 1111
+    BigInteger mask8 = PrimitiveTypePartitioner.formBitMask(8); // 11111111
+
+    List<BigInteger> partitions = 
PrimitiveTypePartitioner.partitionBits(value, 4, mask4);
+    assertEquals(2, partitions.size());
+    assertEquals(0b1111, partitions.get(0).intValue());
+    assertEquals(0b0101, partitions.get(1).intValue());
+
+    partitions = PrimitiveTypePartitioner.partitionBits(value2, 4, mask4);
+    assertEquals(3, partitions.size());
+    assertEquals(0b1111, partitions.get(0).intValue()); 
+    assertEquals(0b0101, partitions.get(1).intValue());
+    assertEquals(0b0011, partitions.get(2).intValue());
+
+    partitions = PrimitiveTypePartitioner.partitionBits(value, 8, mask8);
+    assertEquals(1, partitions.size());
+    assertEquals(0b11110101, partitions.get(0).intValue());
+
+    try
+    {
+      partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask8);
+      fail("BitConversionUtils.partitionBits did not throw error for 
mismatched partitionSize and mask size");
+    } catch (Exception ignore)
+    {
+      // Expected.
+    }
+
+    logger.info("Successfully completed testPartitionBits");
+  }
+
+  @Test
+  public void testPartitions() throws Exception
+  {
+    logger.info("Starting testToPartitions:");
+
+    PrimitiveTypePartitioner primitivePartitioner = new 
PrimitiveTypePartitioner();
+    IPDataPartitioner ipPartitioner = new IPDataPartitioner();
+    ISO8601DatePartitioner datePartitioner = new ISO8601DatePartitioner();
+
+    // Test IP
+    String ipTest = "127.0.0.1";
+    List<BigInteger> partsIP = ipPartitioner.toPartitions(ipTest, 
PrimitiveTypePartitioner.STRING);
+    assertEquals(4, partsIP.size());
+    assertEquals(ipTest, ipPartitioner.fromPartitions(partsIP, 0, 
PrimitiveTypePartitioner.STRING));
+
+    // Test Date
+    String dateTest = "2016-02-20T23:29:05.000Z";
+    List<BigInteger> partsDate = datePartitioner.toPartitions(dateTest, null);
+    assertEquals(8, partsDate.size());
+    assertEquals(dateTest, datePartitioner.fromPartitions(partsDate, 0, null));
+
+    // Test byte
+    byte bTest = Byte.parseByte("10");
+    List<BigInteger> partsByte = primitivePartitioner.toPartitions(bTest, 
PrimitiveTypePartitioner.BYTE);
+    assertEquals(1, partsByte.size());
+    assertEquals(bTest, primitivePartitioner.fromPartitions(partsByte, 0, 
PrimitiveTypePartitioner.BYTE));
+
+    partsByte = primitivePartitioner.toPartitions("12", 
PrimitiveTypePartitioner.BYTE);
+    assertEquals(1, partsByte.size());
+    assertEquals((byte) 12, primitivePartitioner.fromPartitions(partsByte, 0, 
PrimitiveTypePartitioner.BYTE));
+
+    List<BigInteger> partsByteMax = 
primitivePartitioner.toPartitions(Byte.MAX_VALUE, 
PrimitiveTypePartitioner.BYTE);
+    assertEquals(1, partsByteMax.size());
+    assertEquals(Byte.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsByteMax, 0, 
PrimitiveTypePartitioner.BYTE));
+
+    // Test string
+    String stringBits = SystemConfiguration.getProperty("pir.stringBits");
+    SystemConfiguration.setProperty("pir.stringBits", "64");
+    testString("testString"); // over the allowed bit size
+    testString("t"); // under the allowed bit size
+    SystemConfiguration.setProperty("pir.stringBits", stringBits);
+
+    // Test short
+    short shortTest = Short.valueOf("2456");
+    List<BigInteger> partsShort = primitivePartitioner.toPartitions(shortTest, 
PrimitiveTypePartitioner.SHORT);
+    assertEquals(2, partsShort.size());
+    assertEquals(shortTest, primitivePartitioner.fromPartitions(partsShort, 0, 
PrimitiveTypePartitioner.SHORT));
+
+    partsShort = primitivePartitioner.toPartitions("32767", 
PrimitiveTypePartitioner.SHORT);
+    assertEquals(2, partsShort.size());
+    assertEquals((short) 32767, 
primitivePartitioner.fromPartitions(partsShort, 0, 
PrimitiveTypePartitioner.SHORT));
+
+    partsShort = primitivePartitioner.toPartitions((short) -42, 
PrimitiveTypePartitioner.SHORT);
+    assertEquals(2, partsShort.size());
+    assertEquals((short) -42, primitivePartitioner.fromPartitions(partsShort, 
0, PrimitiveTypePartitioner.SHORT));
+
+    List<BigInteger> partsShortMax = 
primitivePartitioner.toPartitions(Short.MAX_VALUE, 
PrimitiveTypePartitioner.SHORT);
+    assertEquals(2, partsShortMax.size());
+    assertEquals(Short.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsShortMax, 0, 
PrimitiveTypePartitioner.SHORT));
+
+    // Test int
+    int intTest = Integer.parseInt("-5789");
+    List<BigInteger> partsInt = primitivePartitioner.toPartitions(intTest, 
PrimitiveTypePartitioner.INT);
+    assertEquals(4, partsInt.size());
+    assertEquals(intTest, primitivePartitioner.fromPartitions(partsInt, 0, 
PrimitiveTypePartitioner.INT));
+
+    partsInt = primitivePartitioner.toPartitions("2016", 
PrimitiveTypePartitioner.INT);
+    assertEquals(4, partsInt.size());
+    assertEquals(2016, primitivePartitioner.fromPartitions(partsInt, 0, 
PrimitiveTypePartitioner.INT));
+
+    partsInt = primitivePartitioner.toPartitions(1386681237, 
PrimitiveTypePartitioner.INT);
+    assertEquals(4, partsInt.size());
+    assertEquals(1386681237, primitivePartitioner.fromPartitions(partsInt, 0, 
PrimitiveTypePartitioner.INT));
+
+    List<BigInteger> partsIntMax = 
primitivePartitioner.toPartitions(Integer.MAX_VALUE, 
PrimitiveTypePartitioner.INT);
+    assertEquals(4, partsIntMax.size());
+    assertEquals(Integer.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsIntMax, 0, 
PrimitiveTypePartitioner.INT));
+
+    // Test long
+    long longTest = Long.parseLong("56789");
+    List<BigInteger> partsLong = primitivePartitioner.toPartitions(longTest, 
PrimitiveTypePartitioner.LONG);
+    assertEquals(8, partsLong.size());
+    assertEquals(longTest, primitivePartitioner.fromPartitions(partsLong, 0, 
PrimitiveTypePartitioner.LONG));
+
+    List<BigInteger> partsLongMax = 
primitivePartitioner.toPartitions(Long.MAX_VALUE, 
PrimitiveTypePartitioner.LONG);
+    assertEquals(8, partsLongMax.size());
+    assertEquals(Long.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsLongMax, 0, 
PrimitiveTypePartitioner.LONG));
+
+    // Test float
+    float floatTest = Float.parseFloat("567.77");
+    List<BigInteger> partsFloat = primitivePartitioner.toPartitions(floatTest, 
PrimitiveTypePartitioner.FLOAT);
+    assertEquals(4, partsFloat.size());
+    assertEquals(floatTest, primitivePartitioner.fromPartitions(partsFloat, 0, 
PrimitiveTypePartitioner.FLOAT));
+
+    partsFloat = primitivePartitioner.toPartitions(-99.99f, 
PrimitiveTypePartitioner.FLOAT);
+    assertEquals(4, partsFloat.size());
+    assertEquals(-99.99f, primitivePartitioner.fromPartitions(partsFloat, 0, 
PrimitiveTypePartitioner.FLOAT));
+
+    List<BigInteger> partsFloatMax = 
primitivePartitioner.toPartitions(Float.MAX_VALUE, 
PrimitiveTypePartitioner.FLOAT);
+    assertEquals(4, partsFloatMax.size());
+    assertEquals(Float.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsFloatMax, 0, 
PrimitiveTypePartitioner.FLOAT));
+
+    // Test double
+    double doubleTest = Double.parseDouble("567.77");
+    List<BigInteger> partsDouble = 
primitivePartitioner.toPartitions(doubleTest, PrimitiveTypePartitioner.DOUBLE);
+    assertEquals(8, partsDouble.size());
+    assertEquals(doubleTest, primitivePartitioner.fromPartitions(partsDouble, 
0, PrimitiveTypePartitioner.DOUBLE));
+
+    List<BigInteger> partsDoubleMax = 
primitivePartitioner.toPartitions(Double.MAX_VALUE, 
PrimitiveTypePartitioner.DOUBLE);
+    assertEquals(8, partsDoubleMax.size());
+    assertEquals(Double.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsDoubleMax, 0, 
PrimitiveTypePartitioner.DOUBLE));
+
+    // Test char
+    char charTest = 'b';
+    List<BigInteger> partsChar = primitivePartitioner.toPartitions(charTest, 
PrimitiveTypePartitioner.CHAR);
+    assertEquals(2, partsChar.size());
+    assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, 
PrimitiveTypePartitioner.CHAR));
+
+    // Ensure Endianness preserved
+    charTest = '\uFFFE';
+    partsChar = primitivePartitioner.toPartitions(charTest, 
PrimitiveTypePartitioner.CHAR);
+    assertEquals(2, partsChar.size());
+    assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, 
PrimitiveTypePartitioner.CHAR));
+
+    charTest = '\uFEFF';
+    partsChar = primitivePartitioner.toPartitions(charTest, 
PrimitiveTypePartitioner.CHAR);
+    assertEquals(2, partsChar.size());
+    assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, 
PrimitiveTypePartitioner.CHAR));
+
+    List<BigInteger> partsCharMax = 
primitivePartitioner.toPartitions(Character.MAX_VALUE, 
PrimitiveTypePartitioner.CHAR);
+    assertEquals(2, partsCharMax.size());
+    assertEquals(Character.MAX_VALUE, 
primitivePartitioner.fromPartitions(partsCharMax, 0, 
PrimitiveTypePartitioner.CHAR));
+
+    logger.info("Sucessfully completed testToPartitions:");
+  }
+
+  @Test
+  public void testPaddedPartitions() throws PIRException
+  {
+    PrimitiveTypePartitioner primitivePartitioner = new 
PrimitiveTypePartitioner();
+
+    List<String> primitiveTypes = Arrays.asList(PrimitiveTypePartitioner.BYTE, 
PrimitiveTypePartitioner.CHAR, PrimitiveTypePartitioner.SHORT,
+        PrimitiveTypePartitioner.INT, PrimitiveTypePartitioner.LONG, 
PrimitiveTypePartitioner.FLOAT, PrimitiveTypePartitioner.DOUBLE,
+        PrimitiveTypePartitioner.STRING);
+    for (String type : primitiveTypes)
+    {
+      assertEquals(primitivePartitioner.getNumPartitions(type), 
primitivePartitioner.getPaddedPartitions(type).size());
+    }
+  }
+
+  private void testString(String testString) throws Exception
+  {
+    PrimitiveTypePartitioner ptp = new PrimitiveTypePartitioner();
+
+    List<BigInteger> partsString = ptp.toPartitions(testString, 
PrimitiveTypePartitioner.STRING);
+    int numParts = 
Integer.parseInt(SystemConfiguration.getProperty("pir.stringBits")) / 8;
+    assertEquals(numParts, partsString.size());
+
+    logger.info("testString.getBytes().length = " + 
testString.getBytes().length);
+    int offset = numParts;
+    if (testString.getBytes().length < numParts)
+    {
+      offset = testString.getBytes().length;
+    }
+    String element = new String(testString.getBytes(), 0, offset);
+    assertEquals(element, ptp.fromPartitions(partsString, 0, 
PrimitiveTypePartitioner.STRING));
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/general/QueryParserUtilsTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/general/QueryParserUtilsTest.java 
b/src/test/java/org/apache/pirk/test/general/QueryParserUtilsTest.java
new file mode 100644
index 0000000..b3db140
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/general/QueryParserUtilsTest.java
@@ -0,0 +1,421 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.general;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.hadoop.io.MapWritable;
+import org.apache.pirk.schema.data.DataSchema;
+import org.apache.pirk.schema.data.DataSchemaRegistry;
+import org.apache.pirk.schema.query.QuerySchemaRegistry;
+import org.apache.pirk.test.utils.Inputs;
+import org.apache.pirk.utils.QueryParserUtils;
+import org.apache.pirk.utils.StringUtils;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.json.simple.JSONObject;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Class for testing the QueryParser methods
+ */
+public class QueryParserUtilsTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(QueryParserUtilsTest.class);
+
+  private static MapWritable doc = null; // MapWritable with arrays in json 
string representation
+  private static MapWritable docWAW = null; // MapWritable with arrays as 
WritableArrayWritable objects
+  private static Map<String,Object> docMap = null; // arrays as 
ArrayList<String>
+
+  private static DataSchema dSchema = null;
+
+  @BeforeClass
+  public static void setup() throws Exception
+  {
+    ArrayList<JSONObject> dataElementsJSON = Inputs.createJSONDataElements();
+
+    // Reset the schema properties and registries
+    DataSchemaRegistry.clearRegistry();
+    QuerySchemaRegistry.clearRegistry();
+    SystemConfiguration.setProperty("data.schemas", "none");
+    SystemConfiguration.setProperty("query.schemas", "none");
+
+    Inputs.createSchemaFiles(null, false, null);
+
+    dSchema = DataSchemaRegistry.get(Inputs.TEST_DATA_SCHEMA_NAME);
+
+    // ProcessBuilder pAdd1 = new ProcessBuilder("curl", "-XPUT", 
indexTypeNum1, "-d",
+    // 
"{\"qname\":\"a.b.c.com\",\"date\":\"2016-02-20T23:29:05.000Z\",\"qtype\":[\"1\"]"
+    // + ",\"rcode\":\"0\",\"src_ip\":\"55.55.55.55\",\"dest_ip\":\"1.2.3.6\"" 
+ ",\"ip\":[\"10.20.30.40\",\"10.20.30.60\"]}");
+    //
+    doc = 
StringUtils.jsonStringToMapWritableWithArrayWritable(dataElementsJSON.get(0).toJSONString(),
 dSchema);
+    docWAW = 
StringUtils.jsonStringToMapWritableWithWritableArrayWritable(dataElementsJSON.get(0).toJSONString(),
 dSchema);
+    docMap = 
StringUtils.jsonStringToMap(dataElementsJSON.get(0).toJSONString(), dSchema);
+  }
+
+  @AfterClass
+  public static void teardown()
+  {
+    // Reset the schema properties and registries
+    DataSchemaRegistry.clearRegistry();
+    QuerySchemaRegistry.clearRegistry();
+    SystemConfiguration.setProperty("data.schemas", "none");
+    SystemConfiguration.setProperty("query.schemas", "none");
+  }
+
+  @Test
+  public void testSingleQuery()
+  {
+    String query1 = "?q=src_ip:55.55.55.55";
+    assertTrue(QueryParserUtils.checkRecord(query1, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query1, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query1, docMap, dSchema));
+
+    String query2 = "?q=qname:a.b.c.com";
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    String query3 = "?q=qname:d.b.c.com";
+    assertFalse(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+  }
+
+  @Test
+  public void testQueryFieldDoesNotExist()
+  {
+    logger.info("running testQueryFieldDoesNotExist");
+
+    // Field does not exist, this should not be found
+    String query = "?q=nonexistent-field:*check*";
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, docMap, dSchema));
+
+    // First field does not exist, but second should be found
+    String query2 = "?q=nonexistent-field:*check*+OR+qname:*a.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    // First field does not exist, second field does, but AND operator makes 
query false
+    String query3 = "?q=nonexistent-field:*check*+AND+qname:*a.b.c.com*";
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+
+    logger.info("completed testQueryFieldDoesNotExist");
+  }
+
+  @Test
+  public void testIgnoreCase()
+  {
+    logger.info("running testIgnoreCase");
+
+    // with case sensitivity, should NOT be found
+    String query = "?q=qname:*A.b.c.com*";
+    assertFalse(QueryParserUtils.checkRecord(query, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, docMap, dSchema));
+
+    // with case sensitivity, should be found
+    String query2 = "?q=qname:*a.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    // adds @ flag = case insensitivity, thus should be found
+    String query3 = "?q=qname@:*A.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+
+    logger.info("completed testIgnoreCase");
+  }
+
+  @Test
+  public void testSingleValueRangeQuery()
+  {
+    testSingleValueRangeQueryMapWritable();
+    testSingleValueRangeQueryMap();
+    testSingleValueRangeQueryMapWritableWAW();
+  }
+
+  private void testSingleValueRangeQueryMapWritable()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[0+TO+2]", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:{-1+TO+2}", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[-1+TO+0]", doc, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:{0+TO+3}", doc, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:[3+TO+10]", doc, 
dSchema));
+  }
+
+  private void testSingleValueRangeQueryMap()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[0+TO+2]", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:{-1+TO+2}", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[-1+TO+0]", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:{0+TO+3}", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:[3+TO+10]", docMap, 
dSchema));
+  }
+
+  private void testSingleValueRangeQueryMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[0+TO+2]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:{-1+TO+2}",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[-1+TO+0]",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:{0+TO+3}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[3+TO+10]",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testIPRangeQuery()
+  {
+    testIPRangeQueryMapWritable();
+    testIPRangeQueryMap();
+    testIPRangeQueryMapWritableWAW();
+  }
+
+  public void testIPRangeQueryMapWritable()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 doc, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=ip:[10.20.30.50+TO+10.20.30.69]", 
doc, dSchema));
+  }
+
+  public void testIPRangeQueryMapWritableWAW()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=ip:[10.20.30.50+TO+10.20.30.69]",
 docWAW, dSchema));
+  }
+
+  public void testIPRangeQueryMap()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 docMap, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=ip:[10.20.30.50+TO+10.20.30.69]", 
docMap, dSchema));
+  }
+
+  @Test
+  public void testDateRangeQuery()
+  {
+    testDateRangeQueryMapWritable();
+    testDateRangeQueryMapWritableWAW();
+    testDateRangeQueryMap();
+  }
+
+  private void testDateRangeQueryMapWritable()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 doc, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 doc, dSchema));
+  }
+
+  private void testDateRangeQueryMap()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 docMap, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 docMap, dSchema));
+  }
+
+  private void testDateRangeQueryMapWritableWAW()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testBooleanQuery()
+  {
+    testBooleanQueryMapWritable();
+    testBooleanQueryMapMapWritableWAW();
+    testBooleanQueryMap();
+  }
+
+  private void testBooleanQueryMapWritable()
+  {
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+qtype:2+OR+rcode:0", 
doc, dSchema));
+  }
+
+  private void testBooleanQueryMap()
+  {
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+qtype:2+OR+rcode:0", 
docMap, dSchema));
+  }
+
+  private void testBooleanQueryMapMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
+        docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
+        docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
+        docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:5+OR+qtype:2+OR+rcode:0",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testAllQuery()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=*", doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=*", docMap, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=*", 
docWAW, dSchema));
+  }
+
+  @Test
+  public void testWildcardQuery()
+  {
+    testWildcardQueryMapWritable();
+    testWildcardQueryMap();
+    testWildcardQueryMapWritableWAW();
+  }
+
+  private void testWildcardQueryMapWritable()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:*.com", doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c*m", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b*", doc, dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:*.org", doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:mrtf*", doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljnik*.uk", doc, 
dSchema));
+
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c?m", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.?.com", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:?.b.c.com", doc, 
dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:medelj?ikafera.com", 
doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljntkafer?.com", 
doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:?edeljnikrfera.com", 
doc, dSchema));
+  }
+
+  private void testWildcardQueryMap()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:*.com", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c*m", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b*", docMap, dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:*.org", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:mrtf*", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljnik*.uk", docMap, 
dSchema));
+
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c?m", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.?.com", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:?.b.c.com", docMap, 
dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:medelj?ikafera.com", 
docMap, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljntkafer?.com", 
docMap, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:?edeljnikrfera.com", 
docMap, dSchema));
+  }
+
+  private void testWildcardQueryMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:*.com", 
docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.c.c*m",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b*", 
docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:*.org", 
docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:mrtf*", 
docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:nedeljnik*.uk",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.c.c?m",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.?.com",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:?.b.c.com",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:medelj?ikafera.com",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:nedeljntkafer?.com",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:?edeljnikrfera.com",
 docWAW, dSchema));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/test/schema/data/LoadDataSchemaTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/schema/data/LoadDataSchemaTest.java 
b/src/test/java/org/apache/pirk/test/schema/data/LoadDataSchemaTest.java
new file mode 100644
index 0000000..9f52af3
--- /dev/null
+++ b/src/test/java/org/apache/pirk/test/schema/data/LoadDataSchemaTest.java
@@ -0,0 +1,327 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.test.schema.data;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.apache.pirk.schema.data.DataSchema;
+import org.apache.pirk.schema.data.DataSchemaLoader;
+import org.apache.pirk.schema.data.DataSchemaRegistry;
+import org.apache.pirk.schema.data.partitioner.IPDataPartitioner;
+import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner;
+import org.apache.pirk.test.utils.TestUtils;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * Test suite for LoadDataSchema and DataSchema
+ */
+public class LoadDataSchemaTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(LoadDataSchemaTest.class);
+
+  private String dataSchemaName = "fakeDataSchema";
+
+  private String element1 = "elementName1";
+  private String element2 = "elementName2";
+  private String element3 = "elementName3";
+
+  @Test
+  public void testGeneralSchemaLoad() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
+
+    // Write the schema file
+    try
+    {
+      createDataSchema("schemaFile");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+
+    // Check the entries
+    DataSchema dSchema = DataSchemaRegistry.get(dataSchemaName);
+
+    assertEquals(dataSchemaName, dSchema.getSchemaName());
+
+    assertEquals(3, dSchema.getElementNames().size());
+
+    // TODO: check Hadoop text names
+
+    assertEquals(PrimitiveTypePartitioner.STRING, 
dSchema.getElementType(element1));
+    assertEquals(PrimitiveTypePartitioner.INT, 
dSchema.getElementType(element2));
+    assertEquals(PrimitiveTypePartitioner.STRING, 
dSchema.getElementType(element3));
+
+    assertEquals(PrimitiveTypePartitioner.class.getName(), 
dSchema.getPartitionerTypeName(element1));
+    if (!(dSchema.getPartitionerForElement(element1) instanceof 
PrimitiveTypePartitioner))
+    {
+      fail("Partitioner instance for element1 must be 
PrimitiveTypePartitioner");
+    }
+    assertEquals(IPDataPartitioner.class.getName(), 
dSchema.getPartitionerTypeName(element3));
+    if (!(dSchema.getPartitionerForElement(element3) instanceof 
IPDataPartitioner))
+    {
+      fail("Partitioner instance for element3 must be IPDataPartitioner");
+    }
+
+    assertEquals(2, dSchema.getArrayElements().size());
+    assertTrue(dSchema.getArrayElements().contains(element2));
+    assertTrue(dSchema.getArrayElements().contains(element3));
+
+    assertEquals(1, dSchema.getNonArrayElements().size());
+    assertTrue(dSchema.getNonArrayElements().contains(element1));
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    if (!schemasProp.equals("none"))
+    {
+      DataSchemaLoader.initialize();
+    }
+  }
+
+  @Test
+  public void testIncorrectJavaType() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas");
+
+    // Write the schema file
+    try
+    {
+      createDataSchemaIncorrectJavaType("wrongJavaType");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    try
+    {
+      // Force the schema to load
+      DataSchemaLoader.initialize();
+      fail("DataSchemaLoader did not throw exception for incorrect javaType");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+  }
+
+  @Test
+  public void testUnknownPartitioner() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas");
+
+    // Write the schema file
+    try
+    {
+      createDataSchemaUnknownPartitioner("unknownPartitioner");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    try
+    {
+      // Force the schema to load
+      DataSchemaLoader.initialize();
+      fail("DataSchemaLoader did not throw exception for unknown partitioner");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+  }
+
+  // Create the file that contains an unknown partitioner
+  private void createDataSchemaUnknownPartitioner(String schemaFile) throws 
IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the element - unknown partitioner
+      TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.INT, "false", "fakePartitioner");
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+
+  // Create the test data schema file
+  private void createDataSchema(String schemaFile) throws IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the elements
+      // element1 -- single String
+      // TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.STRING, "false", 
PrimitiveTypePartitioner.class.getName());
+      TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.STRING, "false", null);
+
+      // element2 - -- array of Integers
+      TestUtils.addElement(doc, rootElement, element2, 
PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
+
+      // element3 -- array of IP addresses
+      TestUtils.addElement(doc, rootElement, element3, 
PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+
+  // Create the test schema file
+  private void createDataSchemaIncorrectJavaType(String schemaFile) throws 
IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the element - unknown Java type
+      TestUtils.addElement(doc, rootElement, element1, "bogus", "false", 
PrimitiveTypePartitioner.class.getName());
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+}


Reply via email to