http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/general/QueryParserUtilsTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/general/QueryParserUtilsTest.java 
b/src/test/java/org/apache/pirk/general/QueryParserUtilsTest.java
new file mode 100644
index 0000000..9ac2522
--- /dev/null
+++ b/src/test/java/org/apache/pirk/general/QueryParserUtilsTest.java
@@ -0,0 +1,421 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.general;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import org.apache.hadoop.io.MapWritable;
+import org.apache.pirk.schema.data.DataSchema;
+import org.apache.pirk.schema.data.DataSchemaRegistry;
+import org.apache.pirk.schema.query.QuerySchemaRegistry;
+import org.apache.pirk.test.utils.Inputs;
+import org.apache.pirk.utils.QueryParserUtils;
+import org.apache.pirk.utils.StringUtils;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.json.simple.JSONObject;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Class for testing the QueryParser methods
+ */
+public class QueryParserUtilsTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(QueryParserUtilsTest.class);
+
+  private static MapWritable doc = null; // MapWritable with arrays in json 
string representation
+  private static MapWritable docWAW = null; // MapWritable with arrays as 
WritableArrayWritable objects
+  private static Map<String,Object> docMap = null; // arrays as 
ArrayList<String>
+
+  private static DataSchema dSchema = null;
+
+  @BeforeClass
+  public static void setup() throws Exception
+  {
+    ArrayList<JSONObject> dataElementsJSON = Inputs.createJSONDataElements();
+
+    // Reset the schema properties and registries
+    DataSchemaRegistry.clearRegistry();
+    QuerySchemaRegistry.clearRegistry();
+    SystemConfiguration.setProperty("data.schemas", "none");
+    SystemConfiguration.setProperty("query.schemas", "none");
+
+    Inputs.createSchemaFiles(null, false, null);
+
+    dSchema = DataSchemaRegistry.get(Inputs.TEST_DATA_SCHEMA_NAME);
+
+    // ProcessBuilder pAdd1 = new ProcessBuilder("curl", "-XPUT", 
indexTypeNum1, "-d",
+    // 
"{\"qname\":\"a.b.c.com\",\"date\":\"2016-02-20T23:29:05.000Z\",\"qtype\":[\"1\"]"
+    // + ",\"rcode\":\"0\",\"src_ip\":\"55.55.55.55\",\"dest_ip\":\"1.2.3.6\"" 
+ ",\"ip\":[\"10.20.30.40\",\"10.20.30.60\"]}");
+    //
+    doc = 
StringUtils.jsonStringToMapWritableWithArrayWritable(dataElementsJSON.get(0).toJSONString(),
 dSchema);
+    docWAW = 
StringUtils.jsonStringToMapWritableWithWritableArrayWritable(dataElementsJSON.get(0).toJSONString(),
 dSchema);
+    docMap = 
StringUtils.jsonStringToMap(dataElementsJSON.get(0).toJSONString(), dSchema);
+  }
+
+  @AfterClass
+  public static void teardown()
+  {
+    // Reset the schema properties and registries
+    DataSchemaRegistry.clearRegistry();
+    QuerySchemaRegistry.clearRegistry();
+    SystemConfiguration.setProperty("data.schemas", "none");
+    SystemConfiguration.setProperty("query.schemas", "none");
+  }
+
+  @Test
+  public void testSingleQuery()
+  {
+    String query1 = "?q=src_ip:55.55.55.55";
+    assertTrue(QueryParserUtils.checkRecord(query1, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query1, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query1, docMap, dSchema));
+
+    String query2 = "?q=qname:a.b.c.com";
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    String query3 = "?q=qname:d.b.c.com";
+    assertFalse(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+  }
+
+  @Test
+  public void testQueryFieldDoesNotExist()
+  {
+    logger.info("running testQueryFieldDoesNotExist");
+
+    // Field does not exist, this should not be found
+    String query = "?q=nonexistent-field:*check*";
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, docMap, dSchema));
+
+    // First field does not exist, but second should be found
+    String query2 = "?q=nonexistent-field:*check*+OR+qname:*a.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    // First field does not exist, second field does, but AND operator makes 
query false
+    String query3 = "?q=nonexistent-field:*check*+AND+qname:*a.b.c.com*";
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+
+    logger.info("completed testQueryFieldDoesNotExist");
+  }
+
+  @Test
+  public void testIgnoreCase()
+  {
+    logger.info("running testIgnoreCase");
+
+    // with case sensitivity, should NOT be found
+    String query = "?q=qname:*A.b.c.com*";
+    assertFalse(QueryParserUtils.checkRecord(query, doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecordWritableArrayWritable(query, 
docWAW, dSchema));
+    assertFalse(QueryParserUtils.checkRecord(query, docMap, dSchema));
+
+    // with case sensitivity, should be found
+    String query2 = "?q=qname:*a.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecord(query2, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query2, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query2, docMap, dSchema));
+
+    // adds @ flag = case insensitivity, thus should be found
+    String query3 = "?q=qname@:*A.b.c.com*";
+    assertTrue(QueryParserUtils.checkRecord(query3, doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable(query3, 
docWAW, dSchema));
+    assertTrue(QueryParserUtils.checkRecord(query3, docMap, dSchema));
+
+    logger.info("completed testIgnoreCase");
+  }
+
+  @Test
+  public void testSingleValueRangeQuery()
+  {
+    testSingleValueRangeQueryMapWritable();
+    testSingleValueRangeQueryMap();
+    testSingleValueRangeQueryMapWritableWAW();
+  }
+
+  private void testSingleValueRangeQueryMapWritable()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[0+TO+2]", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:{-1+TO+2}", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[-1+TO+0]", doc, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:{0+TO+3}", doc, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:[3+TO+10]", doc, 
dSchema));
+  }
+
+  private void testSingleValueRangeQueryMap()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[0+TO+2]", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:{-1+TO+2}", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=rcode:[-1+TO+0]", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:{0+TO+3}", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=rcode:[3+TO+10]", docMap, 
dSchema));
+  }
+
+  private void testSingleValueRangeQueryMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[0+TO+2]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:{-1+TO+2}",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[-1+TO+0]",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:{0+TO+3}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=rcode:[3+TO+10]",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testIPRangeQuery()
+  {
+    testIPRangeQueryMapWritable();
+    testIPRangeQueryMap();
+    testIPRangeQueryMapWritableWAW();
+  }
+
+  public void testIPRangeQueryMapWritable()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 doc, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=ip:[10.20.30.50+TO+10.20.30.69]", 
doc, dSchema));
+  }
+
+  public void testIPRangeQueryMapWritableWAW()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=ip:[10.20.30.50+TO+10.20.30.69]",
 docWAW, dSchema));
+  }
+
+  public void testIPRangeQueryMap()
+  {
+    // src_ip: 55.55.55.55
+    // ip: 10.20.30.40,10.20.30.60
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+173.248.255.255]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.0+TO+55.55.55.100]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:[55.55.55.2+TO+55.55.55.55]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.57}",
 docMap, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{173.248.188.0+TO+173.248.188.10}",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=src_ip:{55.55.55.2+TO+55.55.55.55}",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=ip:[10.20.30.50+TO+10.20.30.69]", 
docMap, dSchema));
+  }
+
+  @Test
+  public void testDateRangeQuery()
+  {
+    testDateRangeQueryMapWritable();
+    testDateRangeQueryMapWritableWAW();
+    testDateRangeQueryMap();
+  }
+
+  private void testDateRangeQueryMapWritable()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 doc, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 doc, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 doc, dSchema));
+  }
+
+  private void testDateRangeQueryMap()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 docMap, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 docMap, dSchema));
+    
assertFalse(QueryParserUtils.checkRecord("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 docMap, dSchema));
+  }
+
+  private void testDateRangeQueryMapWritableWAW()
+  {
+    // date: 2016-02-20T23:29:05.000Z
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2014-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2015-05-05T20:33:07.000Z+TO+2016-04-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2016-02-20T23:29:05.000Z+TO+2017-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-06-05T20:33:07.000Z+TO+2016-02-20T23:30:05.000Z}",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z}",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=date:{2015-06-05T20:33:07.000Z+TO+2015-07-05T20:33:07.000Z}",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testBooleanQuery()
+  {
+    testBooleanQueryMapWritable();
+    testBooleanQueryMapMapWritableWAW();
+    testBooleanQueryMap();
+  }
+
+  private void testBooleanQueryMapWritable()
+  {
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 doc, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 doc, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+qtype:2+OR+rcode:0", 
doc, dSchema));
+  }
+
+  private void testBooleanQueryMap()
+  {
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
 docMap, dSchema));
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docMap, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecord("?q=qtype:5+OR+qtype:2+OR+rcode:0", 
docMap, dSchema));
+  }
+
+  private void testBooleanQueryMapMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:5+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:0+AND+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
+        docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:0+OR+date:[2013-05-05T20:33:07.000Z+TO+2014-07-05T20:33:07.000Z]",
+        docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:1+AND+rcode:1+OR+date:[2015-05-05T20:33:07.000Z+TO+2016-02-20T23:29:05.000Z]",
+        docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qtype:5+OR+qtype:2+OR+rcode:0",
 docWAW, dSchema));
+  }
+
+  @Test
+  public void testAllQuery()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=*", doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=*", docMap, dSchema));
+    assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=*", 
docWAW, dSchema));
+  }
+
+  @Test
+  public void testWildcardQuery()
+  {
+    testWildcardQueryMapWritable();
+    testWildcardQueryMap();
+    testWildcardQueryMapWritableWAW();
+  }
+
+  private void testWildcardQueryMapWritable()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:*.com", doc, dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c*m", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b*", doc, dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:*.org", doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:mrtf*", doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljnik*.uk", doc, 
dSchema));
+
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c?m", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.?.com", doc, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:?.b.c.com", doc, 
dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:medelj?ikafera.com", 
doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljntkafer?.com", 
doc, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:?edeljnikrfera.com", 
doc, dSchema));
+  }
+
+  private void testWildcardQueryMap()
+  {
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:*.com", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c*m", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b*", docMap, dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:*.org", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:mrtf*", docMap, 
dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljnik*.uk", docMap, 
dSchema));
+
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.c.c?m", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:a.b.?.com", docMap, 
dSchema));
+    assertTrue(QueryParserUtils.checkRecord("?q=qname:?.b.c.com", docMap, 
dSchema));
+
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:medelj?ikafera.com", 
docMap, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:nedeljntkafer?.com", 
docMap, dSchema));
+    assertFalse(QueryParserUtils.checkRecord("?q=qname:?edeljnikrfera.com", 
docMap, dSchema));
+  }
+
+  private void testWildcardQueryMapWritableWAW()
+  {
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:*.com", 
docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.c.c*m",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b*", 
docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:*.org", 
docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:mrtf*", 
docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:nedeljnik*.uk",
 docWAW, dSchema));
+
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.c.c?m",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:a.b.?.com",
 docWAW, dSchema));
+    
assertTrue(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:?.b.c.com",
 docWAW, dSchema));
+
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:medelj?ikafera.com",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:nedeljntkafer?.com",
 docWAW, dSchema));
+    
assertFalse(QueryParserUtils.checkRecordWritableArrayWritable("?q=qname:?edeljnikrfera.com",
 docWAW, dSchema));
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/schema/data/LoadDataSchemaTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/schema/data/LoadDataSchemaTest.java 
b/src/test/java/org/apache/pirk/schema/data/LoadDataSchemaTest.java
new file mode 100644
index 0000000..3aa500b
--- /dev/null
+++ b/src/test/java/org/apache/pirk/schema/data/LoadDataSchemaTest.java
@@ -0,0 +1,324 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.schema.data;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.apache.pirk.schema.data.partitioner.IPDataPartitioner;
+import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner;
+import org.apache.pirk.test.utils.TestUtils;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * Test suite for LoadDataSchema and DataSchema
+ */
+public class LoadDataSchemaTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(LoadDataSchemaTest.class);
+
+  private String dataSchemaName = "fakeDataSchema";
+
+  private String element1 = "elementName1";
+  private String element2 = "elementName2";
+  private String element3 = "elementName3";
+
+  @Test
+  public void testGeneralSchemaLoad() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
+
+    // Write the schema file
+    try
+    {
+      createDataSchema("schemaFile");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+
+    // Check the entries
+    DataSchema dSchema = DataSchemaRegistry.get(dataSchemaName);
+
+    assertEquals(dataSchemaName, dSchema.getSchemaName());
+
+    assertEquals(3, dSchema.getElementNames().size());
+
+    // TODO: check Hadoop text names
+
+    assertEquals(PrimitiveTypePartitioner.STRING, 
dSchema.getElementType(element1));
+    assertEquals(PrimitiveTypePartitioner.INT, 
dSchema.getElementType(element2));
+    assertEquals(PrimitiveTypePartitioner.STRING, 
dSchema.getElementType(element3));
+
+    assertEquals(PrimitiveTypePartitioner.class.getName(), 
dSchema.getPartitionerTypeName(element1));
+    if (!(dSchema.getPartitionerForElement(element1) instanceof 
PrimitiveTypePartitioner))
+    {
+      fail("Partitioner instance for element1 must be 
PrimitiveTypePartitioner");
+    }
+    assertEquals(IPDataPartitioner.class.getName(), 
dSchema.getPartitionerTypeName(element3));
+    if (!(dSchema.getPartitionerForElement(element3) instanceof 
IPDataPartitioner))
+    {
+      fail("Partitioner instance for element3 must be IPDataPartitioner");
+    }
+
+    assertEquals(2, dSchema.getArrayElements().size());
+    assertTrue(dSchema.getArrayElements().contains(element2));
+    assertTrue(dSchema.getArrayElements().contains(element3));
+
+    assertEquals(1, dSchema.getNonArrayElements().size());
+    assertTrue(dSchema.getNonArrayElements().contains(element1));
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    if (!schemasProp.equals("none"))
+    {
+      DataSchemaLoader.initialize();
+    }
+  }
+
+  @Test
+  public void testIncorrectJavaType() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas");
+
+    // Write the schema file
+    try
+    {
+      createDataSchemaIncorrectJavaType("wrongJavaType");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    try
+    {
+      // Force the schema to load
+      DataSchemaLoader.initialize();
+      fail("DataSchemaLoader did not throw exception for incorrect javaType");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+  }
+
+  @Test
+  public void testUnknownPartitioner() throws Exception
+  {
+    // Pull off the property and reset upon completion
+    String schemasProp = SystemConfiguration.getProperty("data.schemas");
+
+    // Write the schema file
+    try
+    {
+      createDataSchemaUnknownPartitioner("unknownPartitioner");
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+
+    try
+    {
+      // Force the schema to load
+      DataSchemaLoader.initialize();
+      fail("DataSchemaLoader did not throw exception for unknown partitioner");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original data.schemas property
+    SystemConfiguration.setProperty("data.schemas", schemasProp);
+
+    // Force the schema to load
+    DataSchemaLoader.initialize();
+  }
+
+  // Create the file that contains an unknown partitioner
+  private void createDataSchemaUnknownPartitioner(String schemaFile) throws 
IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the element - unknown partitioner
+      TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.INT, "false", "fakePartitioner");
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+
+  // Create the test data schema file
+  private void createDataSchema(String schemaFile) throws IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the elements
+      // element1 -- single String
+      // TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.STRING, "false", 
PrimitiveTypePartitioner.class.getName());
+      TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.STRING, "false", null);
+
+      // element2 - -- array of Integers
+      TestUtils.addElement(doc, rootElement, element2, 
PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
+
+      // element3 -- array of IP addresses
+      TestUtils.addElement(doc, rootElement, element3, 
PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+
+  // Create the test schema file
+  private void createDataSchemaIncorrectJavaType(String schemaFile) throws 
IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the element - unknown Java type
+      TestUtils.addElement(doc, rootElement, element1, "bogus", "false", 
PrimitiveTypePartitioner.class.getName());
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java 
b/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
new file mode 100644
index 0000000..55cb0a9
--- /dev/null
+++ b/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
@@ -0,0 +1,368 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.schema.query;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.transform.Transformer;
+import javax.xml.transform.TransformerFactory;
+import javax.xml.transform.dom.DOMSource;
+import javax.xml.transform.stream.StreamResult;
+
+import org.apache.pirk.schema.data.DataSchemaLoader;
+import org.apache.pirk.schema.data.partitioner.IPDataPartitioner;
+import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner;
+import org.apache.pirk.schema.query.filter.StopListFilter;
+import org.apache.pirk.test.utils.Inputs;
+import org.apache.pirk.test.utils.TestUtils;
+import org.apache.pirk.utils.PIRException;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+
+/**
+ * Test suite for LoadQuerySchema and QuerySchema
+ */
+public class LoadQuerySchemaTest
+{
+  private static final Logger logger = 
LoggerFactory.getLogger(LoadQuerySchemaTest.class);
+
+  private String querySchemaFile = "querySchemaFile";
+  private String dataSchemaName = "fakeDataSchema";
+  private String querySchemaName = "fakeQuerySchema";
+
+  private String element1 = "elementName1";
+  private String element2 = "elementName2";
+  private String element3 = "elementName3";
+  private String element4 = "elementName4";
+
+  private List<String> queryElements = Arrays.asList(element1, element2, 
element3);
+  private List<String> filterElements = Collections.singletonList(element2);
+
+  @Test
+  public void testGeneralSchemaLoad() throws Exception
+  {
+    logger.info("Starting testGeneralSchemaLoad: ");
+
+    // Pull off the properties and reset upon completion
+    String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
+    String querySchemasProp = SystemConfiguration.getProperty("query.schemas", 
"none");
+    String stopListFileProp = 
SystemConfiguration.getProperty("pir.stopListFile");
+
+    // Create the stoplist file
+    createStopListFile();
+
+    // Create the data schema used and force it to load
+    try
+    {
+      createDataSchema("dataSchemaFile");
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    DataSchemaLoader.initialize();
+
+    // Create the query schema used and force it to load
+    try
+    {
+      TestUtils.createQuerySchema(querySchemaFile, querySchemaName, 
dataSchemaName, element4, queryElements, filterElements, 
StopListFilter.class.getName());
+
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    QuerySchemaLoader.initialize();
+
+    // Check the entries
+    QuerySchema qSchema = QuerySchemaRegistry.get(querySchemaName);
+
+    assertEquals(querySchemaName, qSchema.getSchemaName());
+    assertEquals(dataSchemaName, qSchema.getDataSchemaName());
+    assertEquals(element4, qSchema.getSelectorName());
+
+    assertEquals(StopListFilter.class.getName(), qSchema.getFilterTypeName());
+    if (!(qSchema.getFilter() instanceof StopListFilter))
+    {
+      fail("Filter class instance must be StopListFilter");
+    }
+
+    assertEquals(3, qSchema.getElementNames().size());
+    for (String item : qSchema.getElementNames())
+    {
+      if (!(item.equals(element1) || item.equals(element2) || 
item.equals(element3)))
+      {
+        fail("elementNames: item = " + item + " must equal one of: " + 
element1 + ", " + element2 + ", or " + element3);
+      }
+    }
+    assertEquals(1, qSchema.getFilteredElementNames().size());
+    for (String item : qSchema.getFilteredElementNames())
+    {
+      if (!item.equals(element2))
+      {
+        fail("filterElementNames: item = " + item + " must equal " + element2);
+      }
+    }
+
+    // one string, array IPs, array integers
+    int stringSize = 
Integer.parseInt(SystemConfiguration.getProperty("pir.stringBits"));
+    int arrayMult = 
Integer.parseInt(SystemConfiguration.getProperty("pir.numReturnArrayElements"));
+    int dataElementSize = stringSize + 32 * arrayMult + 32 * arrayMult;
+    assertEquals(dataElementSize, qSchema.getDataElementSize());
+
+    // Reset original query and data schema properties
+    SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
+    SystemConfiguration.setProperty("query.schemas", querySchemasProp);
+    SystemConfiguration.setProperty("pir.stopListFile", stopListFileProp);
+
+    // Force the query and data schemas to load their original values
+    if (!dataSchemasProp.equals("none"))
+    {
+      DataSchemaLoader.initialize();
+    }
+
+    if (!querySchemasProp.equals("none"))
+    {
+      QuerySchemaLoader.initialize();
+    }
+
+    logger.info("Finished testGeneralSchemaLoad: ");
+  }
+
+  @Test
+  public void testUnknownFilterClass() throws Exception
+  {
+    // Pull off the properties and reset upon completion
+    String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
+    String querySchemasProp = SystemConfiguration.getProperty("query.schemas", 
"none");
+
+    // Create the data schema used and force it to load
+    try
+    {
+      createDataSchema("dataSchemaFile");
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    DataSchemaLoader.initialize();
+
+    // Create the query schema used and force it to load
+    try
+    {
+      TestUtils.createQuerySchema(querySchemaFile, querySchemaName, 
dataSchemaName, "nonExistentElement", queryElements, filterElements, 
"bogusFilterClass");
+
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    try
+    {
+      QuerySchemaLoader.initialize();
+      fail("QuerySchemaLoader did not throw exception for bogus filter class");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original query and data schema properties
+    SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
+    SystemConfiguration.setProperty("query.schemas", querySchemasProp);
+
+    // Force the query and data schemas to load their original values
+    if (!dataSchemasProp.equals("none"))
+    {
+      DataSchemaLoader.initialize();
+    }
+
+    if (!querySchemasProp.equals("none"))
+    {
+      QuerySchemaLoader.initialize();
+    }
+
+    logger.info("Finished testFunkyFilterScenarios");
+  }
+
+  @Test
+  public void testDataSchemaDoesNotExist() throws Exception
+  {
+    logger.info("Starting testDataSchemaDoesNotExist: ");
+
+    // Pull off the properties and reset upon completion
+    String querySchemasProp = SystemConfiguration.getProperty("query.schemas", 
"none");
+
+    // Create the query schema used and force it to load
+    try
+    {
+      TestUtils.createQuerySchema(querySchemaFile, querySchemaName, 
dataSchemaName, element4, queryElements, filterElements, null);
+
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    try
+    {
+      QuerySchemaLoader.initialize();
+      fail("QuerySchemaLoader did not throw exception for non-existent 
DataSchema");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original query properties and force to load
+    SystemConfiguration.setProperty("query.schemas", querySchemasProp);
+    if (!querySchemasProp.equals("none"))
+    {
+      QuerySchemaLoader.initialize();
+    }
+
+    logger.info("Finished testDataSchemaDoesNotExist ");
+  }
+
+  @Test
+  public void testSelectorDoesNotExistInDataSchema() throws Exception
+  {
+    logger.info("Starting testSelectorDoesNotExistInDataSchema: ");
+
+    // Pull off the properties and reset upon completion
+    String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
+    String querySchemasProp = SystemConfiguration.getProperty("query.schemas", 
"none");
+
+    // Create the data schema used and force it to load
+    try
+    {
+      createDataSchema("dataSchemaFile");
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    DataSchemaLoader.initialize();
+
+    // Create the query schema used and force it to load
+    try
+    {
+      TestUtils.createQuerySchema(querySchemaFile, querySchemaName, 
dataSchemaName, "nonExistentElement", queryElements, filterElements,
+          StopListFilter.class.getName());
+
+    } catch (IOException e)
+    {
+      e.printStackTrace();
+      fail(e.toString());
+    }
+    try
+    {
+      QuerySchemaLoader.initialize();
+      fail("QuerySchemaLoader did not throw exception for non-existent 
selectorName");
+    } catch (Exception ignore)
+    {}
+
+    // Reset original query and data schema properties
+    SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
+    SystemConfiguration.setProperty("query.schemas", querySchemasProp);
+
+    // Force the query and data schemas to load their original values
+    if (!dataSchemasProp.equals("none"))
+    {
+      DataSchemaLoader.initialize();
+    }
+
+    if (!querySchemasProp.equals("none"))
+    {
+      QuerySchemaLoader.initialize();
+    }
+
+    logger.info("Finished testSelectorDoesNotExistInDataSchema ");
+  }
+
+  // Create the stoplist file and alter the properties accordingly
+  private void createStopListFile() throws IOException, PIRException
+  {
+    SystemConfiguration.setProperty("pir.stopListFile", "testStopListFile");
+    String newSLFile = Inputs.createPIRStopList(null, false);
+    SystemConfiguration.setProperty("pir.stopListFile", newSLFile);
+  }
+
+  // Create the test data schema file
+  private void createDataSchema(String schemaFile) throws IOException
+  {
+    // Create a temporary file for the test schema, set in the properties
+    File file = File.createTempFile(schemaFile, ".xml");
+    file.deleteOnExit();
+    logger.info("file = " + file.toString());
+    SystemConfiguration.setProperty("data.schemas", file.toString());
+
+    // Write to the file
+    try
+    {
+      DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+      DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+      Document doc = dBuilder.newDocument();
+
+      // root element
+      Element rootElement = doc.createElement("schema");
+      doc.appendChild(rootElement);
+
+      // Add the schemaName
+      Element schemaNameElement = doc.createElement("schemaName");
+      schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+      rootElement.appendChild(schemaNameElement);
+
+      // Add the elements
+      // element1 -- single String
+      TestUtils.addElement(doc, rootElement, element1, 
PrimitiveTypePartitioner.STRING, "false", 
PrimitiveTypePartitioner.class.getName());
+
+      // element2 - -- array of Integers
+      TestUtils.addElement(doc, rootElement, element2, 
PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
+
+      // element3 -- array of IP addresses
+      TestUtils.addElement(doc, rootElement, element3, 
PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
+
+      // element4 -- single byte type
+      TestUtils.addElement(doc, rootElement, element4, 
PrimitiveTypePartitioner.BYTE, "false", 
PrimitiveTypePartitioner.class.getName());
+
+      // Write to a xml file
+      TransformerFactory transformerFactory = TransformerFactory.newInstance();
+      Transformer transformer = transformerFactory.newTransformer();
+      DOMSource source = new DOMSource(doc);
+      StreamResult result = new StreamResult(file);
+      transformer.transform(source, result);
+
+      // Output for testing
+      StreamResult consoleResult = new StreamResult(System.out);
+      transformer.transform(source, consoleResult);
+      System.out.println();
+
+    } catch (Exception e)
+    {
+      e.printStackTrace();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/serialization/SerializationTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/serialization/SerializationTest.java 
b/src/test/java/org/apache/pirk/serialization/SerializationTest.java
new file mode 100644
index 0000000..8689d43
--- /dev/null
+++ b/src/test/java/org/apache/pirk/serialization/SerializationTest.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.serialization;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.Serializable;
+import java.util.Objects;
+import java.util.Random;
+
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+public class SerializationTest
+{
+
+  @Rule
+  public TemporaryFolder folder = new TemporaryFolder();
+
+  private static JsonSerializer jsonSerializer;
+  private static JavaSerializer javaSerializer;
+
+  @BeforeClass
+  public static void setUp() throws Exception
+  {
+    jsonSerializer = new JsonSerializer();
+    javaSerializer = new JavaSerializer();
+  }
+
+  @Test
+  public void testJsonSerDe() throws Exception
+  {
+    File tempFile = folder.newFile("test-json-serialize");
+    FileOutputStream fos = new FileOutputStream(tempFile);
+    DummyRecord dummyRecord = new DummyRecord();
+
+    jsonSerializer.write(fos, dummyRecord);
+
+    FileInputStream fis = new FileInputStream(tempFile);
+    Object deserializedDummyObject = jsonSerializer.read(fis, 
DummyRecord.class);
+    Assert.assertEquals(dummyRecord, deserializedDummyObject);
+  }
+
+  @Test
+  public void testJavaSerDe() throws Exception
+  {
+    File tempFile = folder.newFile("test-java-serialize");
+    FileOutputStream fos = new FileOutputStream(tempFile);
+    DummyRecord dummyRecord = new DummyRecord();
+
+    javaSerializer.write(fos, new DummyRecord());
+
+    FileInputStream fis = new FileInputStream(tempFile);
+    Object deserializedDummyObject = javaSerializer.read(fis, 
DummyRecord.class);
+    Assert.assertTrue(deserializedDummyObject.equals(dummyRecord));
+  }
+
+  private static class DummyRecord implements Serializable, Storable
+  {
+    private int id;
+    private String message;
+    private long seed = 100L;
+
+    DummyRecord()
+    {
+      this.id = (new Random(seed)).nextInt(5);
+      this.message = "The next message id is " + id;
+    }
+
+    public int getId()
+    {
+      return id;
+    }
+
+    public void setId(int id)
+    {
+      this.id = id;
+    }
+
+    public String getMessage()
+    {
+      return message;
+    }
+
+    public void setMessage(String message)
+    {
+      this.message = message;
+    }
+
+    @Override
+    public String toString()
+    {
+      return "DummyRecord{" + "id=" + id + ", message='" + message + '\'' + 
'}';
+    }
+
+    @Override
+    public boolean equals(Object o)
+    {
+      if (this == o)
+        return true;
+      if (o == null || getClass() != o.getClass())
+        return false;
+      DummyRecord that = (DummyRecord) o;
+      return id == that.id && Objects.equals(message, that.message);
+    }
+
+    @Override
+    public int hashCode()
+    {
+      return Objects.hash(id, message);
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/test/benchmark/BenchmarkDriver.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/pirk/test/benchmark/BenchmarkDriver.java 
b/src/test/java/org/apache/pirk/test/benchmark/BenchmarkDriver.java
deleted file mode 100644
index e71c487..0000000
--- a/src/test/java/org/apache/pirk/test/benchmark/BenchmarkDriver.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.pirk.test.benchmark;
-
-import java.io.IOException;
-
-import org.openjdk.jmh.Main;
-import org.openjdk.jmh.runner.RunnerException;
-
-/**
- * Driver for JMH benchmarking
- */
-public class BenchmarkDriver
-{
-  public static void main(String[] args) throws RunnerException, IOException
-  {
-    Main.main(args);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/test/benchmark/PaillierBenchmark.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/benchmark/PaillierBenchmark.java 
b/src/test/java/org/apache/pirk/test/benchmark/PaillierBenchmark.java
deleted file mode 100644
index 15533b5..0000000
--- a/src/test/java/org/apache/pirk/test/benchmark/PaillierBenchmark.java
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.pirk.test.benchmark;
-
-import java.math.BigInteger;
-
-import org.apache.pirk.encryption.ModPowAbstraction;
-import org.apache.pirk.encryption.Paillier;
-import org.apache.pirk.utils.PIRException;
-import org.apache.pirk.utils.SystemConfiguration;
-import org.openjdk.jmh.annotations.Benchmark;
-import org.openjdk.jmh.annotations.BenchmarkMode;
-import org.openjdk.jmh.annotations.Mode;
-import org.openjdk.jmh.annotations.Scope;
-import org.openjdk.jmh.annotations.Setup;
-import org.openjdk.jmh.annotations.State;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A JMH benchmark to evaluate Paillier performance both with and without 
using com.square.jnagmp.gmp to accelerate modPow
- * <p>
- * Guides to using JMH can be found at: 
http://tutorials.jenkov.com/java-performance/jmh.html and 
http://nitschinger.at/Using-JMH-for-Java-Microbenchmarking/
- */
-
-public class PaillierBenchmark
-{
-  private static final int MODULUS_SIZE = 3074;
-  private static final Logger logger = 
LoggerFactory.getLogger(PaillierBenchmark.class);
-
-  @State(Scope.Benchmark)
-  public static class PaillierBenchmarkState
-  {
-    BigInteger r1 = null; // random number in (Z/NZ)*
-    BigInteger m1 = null; // message to encrypt
-
-    Paillier pallier = null;
-
-    /**
-     * This sets up the state for the two separate benchmarks
-     */
-    @Setup(org.openjdk.jmh.annotations.Level.Trial)
-    public void setUp()
-    {
-      int systemPrimeCertainty = 
SystemConfiguration.getIntProperty("pir.primeCertainty", 100);
-      try
-      {
-        pallier = new Paillier(MODULUS_SIZE, systemPrimeCertainty);
-
-      } catch (PIRException e)
-      {
-        System.out.printf("Couldn't build pallier object!%n");
-      }
-
-      r1 = BigInteger.valueOf(3);
-      m1 = BigInteger.valueOf(5);
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.Throughput)
-  public void testWithGMP(PaillierBenchmarkState allState)
-  {
-    SystemConfiguration.setProperty("paillier.useGMPForModPow", "true");
-    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "false");
-    ModPowAbstraction.reloadConfiguration();
-
-    try
-    {
-      allState.pallier.encrypt(allState.m1, allState.r1);
-    } catch (PIRException e)
-    {
-      logger.info("Exception in testWithGMP!\n");
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.Throughput)
-  public void testWithGMPConstantTime(PaillierBenchmarkState allState)
-  {
-    SystemConfiguration.setProperty("paillier.useGMPForModPow", "true");
-    SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "true");
-    ModPowAbstraction.reloadConfiguration();
-
-    try
-    {
-      allState.pallier.encrypt(allState.m1, allState.r1);
-    } catch (PIRException e)
-    {
-      logger.info("Exception in testWithGMPConstantTime!\n");
-    }
-  }
-
-  @Benchmark
-  @BenchmarkMode(Mode.Throughput)
-  public void testWithoutGMP(PaillierBenchmarkState allState)
-  {
-    SystemConfiguration.setProperty("paillier.useGMPForModPow", "false");
-    ModPowAbstraction.reloadConfiguration();
-
-    try
-    {
-      allState.pallier.encrypt(allState.m1, allState.r1);
-    } catch (PIRException e)
-    {
-      logger.info("Exception in testWithoutGMP!\n");
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/test/distributed/DistributedTestCLI.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/distributed/DistributedTestCLI.java 
b/src/test/java/org/apache/pirk/test/distributed/DistributedTestCLI.java
deleted file mode 100644
index 1535e1f..0000000
--- a/src/test/java/org/apache/pirk/test/distributed/DistributedTestCLI.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.pirk.test.distributed;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A container for Apache's Command Line Interface that contains custom 
functionality for the MapReduce functional tests.
- */
-public class DistributedTestCLI
-{
-  private static final Logger logger = 
LoggerFactory.getLogger(DistributedTestCLI.class);
-
-  private CommandLine commandLine = null;
-  private Options cliOptions = null;
-
-  /**
-   * Create and parse allowable options
-   * 
-   * @param args
-   *          - arguments fed into the main method
-   */
-  public DistributedTestCLI(String[] args)
-  {
-    // create the command line options
-    cliOptions = createOptions();
-
-    try
-    {
-      // parse the command line options
-      CommandLineParser parser = new GnuParser();
-      commandLine = parser.parse(cliOptions, args, true);
-
-      // if help option is selected, just print help text and exit
-      if (hasOption("h"))
-      {
-        printHelp();
-        System.exit(1);
-      }
-
-      // The full path of the jar file must be set
-      if (!hasOption("j"))
-      {
-        logger.info("The full path of the jar file must be set with -j");
-        System.exit(1);
-      }
-    } catch (Exception e)
-    {
-      e.printStackTrace();
-      System.exit(1);
-    }
-  }
-
-  /**
-   * Determine if an option was provided by the user via the CLI
-   * 
-   * @param option
-   *          - the option of interest
-   * @return true if option was provided, false otherwise
-   */
-  public boolean hasOption(String option)
-  {
-    return commandLine.hasOption(option);
-  }
-
-  /**
-   * Obtain the argument of the option provided by the user via the CLI
-   * 
-   * @param option
-   *          - the option of interest
-   * @return value of the argument of the option
-   */
-  public String getOptionValue(String option)
-  {
-    return commandLine.getOptionValue(option);
-  }
-
-  /**
-   * Determine if the argument was provided, which determines if a test should 
or should not be run
-   * 
-   * @param allowed
-   *          - argument string you are looking for
-   * @return true if argument was provided via the CLI, false otherwise
-   */
-  public boolean run(String allowed)
-  {
-    return run(allowed, "t");
-  }
-
-  /**
-   * Determine if the argument was provided for the selected option, which 
determines if a test should or should not be run
-   * 
-   * @param allowed
-   *          - argument string you are looking for
-   * @param option
-   *          - the option of interest
-   * @return true if argument was provided via the CLI, false otherwise
-   */
-  public boolean run(String allowed, String option)
-  {
-    if (!hasOption(option))
-    {
-      return true;
-    }
-
-    String selection = getOptionValue(option);
-    String[] selectionList = selection.split(",");
-
-    for (String selectionItem : selectionList)
-    {
-      if (selectionItem.equals(allowed))
-      {
-        return true;
-      }
-    }
-
-    return false;
-  }
-
-  /**
-   * Create the options available for the DistributedTestDriver
-   * 
-   * @return Apache's CLI Options object
-   */
-  private Options createOptions()
-  {
-    Options options = new Options();
-
-    // help
-    Option optionHelp = new Option("h", "help", false, "Print out the help 
documentation for this command line execution");
-    optionHelp.setRequired(false);
-    options.addOption(optionHelp);
-
-    // jar file
-    Option optionJar = new Option("j", "jar", true, "required -- Fully 
qualified jar file");
-    optionJar.setRequired(false);
-    options.addOption(optionJar);
-
-    // test selection
-    String tests = "testNum = 1: Wideskies Tests\n";
-    tests += "Subtests:\n";
-    tests += "E - Elasticsearch MapReduce\n";
-    tests += "J - JSON/HDFS MapReduce\n";
-    tests += "ES - Elasticsearch Spark \n";
-    tests += "JS - JSON/HDFS Spark \n";
-
-    Option optionTestSelection = new Option("t", "tests", true, "optional -- 
Select which tests to execute: \n" + tests);
-    optionTestSelection.setRequired(false);
-    optionTestSelection.setArgName("<testNum>:<subtestDesignator>");
-    optionTestSelection.setType(String.class);
-    options.addOption(optionTestSelection);
-
-    return options;
-  }
-
-  /**
-   * Prints out the help message
-   */
-  private void printHelp()
-  {
-    HelpFormatter formatter = new HelpFormatter();
-    formatter.setWidth(140);
-    formatter.printHelp("DistributedTestDriver", cliOptions);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/9244df72/src/test/java/org/apache/pirk/test/distributed/DistributedTestDriver.java
----------------------------------------------------------------------
diff --git 
a/src/test/java/org/apache/pirk/test/distributed/DistributedTestDriver.java 
b/src/test/java/org/apache/pirk/test/distributed/DistributedTestDriver.java
deleted file mode 100755
index ee37e63..0000000
--- a/src/test/java/org/apache/pirk/test/distributed/DistributedTestDriver.java
+++ /dev/null
@@ -1,149 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.pirk.test.distributed;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.pirk.schema.data.DataSchemaLoader;
-import org.apache.pirk.schema.query.QuerySchemaLoader;
-import org.apache.pirk.schema.query.filter.StopListFilter;
-import org.apache.pirk.test.distributed.testsuite.DistTestSuite;
-import org.apache.pirk.test.utils.Inputs;
-import org.apache.pirk.utils.SystemConfiguration;
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Driver class to run the suite of functional tests for MR and Spark PIR
- *
- */
-public class DistributedTestDriver
-{
-  private static final Logger logger = 
LoggerFactory.getLogger(DistributedTestDriver.class);
-
-  // Input
-  public static final String JSON_PIR_INPUT_FILE_PROPERTY = 
"test.pir.inputJSONFile";
-  public static final String ES_PIR_INPUT_INDEX_PROPERTY = "test.pir.es.index";
-  public static final String PIR_QUERY_INPUT_DIR = "test.pir.queryInputDir";
-  public static final String PIR_STOPLIST_FILE = "test.pir.stopListFile";
-  public static final String ES_PIR_INPUT_RESOURCE_PROPERTY = 
"test.pir.es.resource";
-
-  // Elastic Search
-  public static final String ES_INPUT_NODES_PROPERTY = "es.nodes";
-  public static final String ES_INPUT_PORT_PROPERTY = "es.port";
-  public static final String ES_INPUT_INDEX_PROPERTY = "test.es.index";
-  public static final String ES_INPUT_TYPE_PROPERTY = "test.es.type";
-  public static final String ES_INPUT_RESOURCE_PROPERTY = "test.es.resource";
-
-  // Output
-  public static final String OUTPUT_DIRECTORY_PROPERTY = "test.outputHDFSFile";
-
-  public static void main(String[] args) throws Exception
-  {
-    // create a cli object to handle all program inputs
-    DistributedTestCLI cli = new DistributedTestCLI(args);
-
-    logger.info("DistributedTest Suite Beginning");
-    FileSystem fs = FileSystem.get(new Configuration());
-
-    String jarFile = cli.getOptionValue("j");
-    logger.info("jarFile = " + jarFile);
-    SystemConfiguration.setProperty("jarFile", jarFile);
-
-    List<JSONObject> dataElements = initialize(fs);
-
-    // Pull off the properties and reset upon completion
-    String dataSchemasProp = SystemConfiguration.getProperty("data.schemas", 
"none");
-    String querySchemasProp = SystemConfiguration.getProperty("query.schemas", 
"none");
-    String stopListFileProp = 
SystemConfiguration.getProperty("pir.stopListFile");
-
-    test(fs, cli, dataElements);
-
-    cleanup(fs, dataSchemasProp, querySchemasProp, stopListFileProp);
-    logger.info("Distributed Test Suite Complete");
-  }
-
-  /**
-   * Create all inputs
-   */
-  public static List<JSONObject> initialize(FileSystem fs) throws Exception
-  {
-    List<JSONObject> dataElements = Inputs.createPIRJSONInput(fs);
-
-    String localStopListFile = Inputs.createPIRStopList(fs, true);
-    SystemConfiguration.setProperty("pir.stopListFile", localStopListFile);
-
-    Inputs.createSchemaFiles(fs, true, StopListFilter.class.getName());
-
-    return dataElements;
-  }
-
-  /**
-   * Execute Tests
-   */
-  public static void test(FileSystem fs, DistributedTestCLI cli, 
List<JSONObject> pirDataElements) throws Exception
-  {
-    if (cli.run("1:J"))
-    {
-      DistTestSuite.testJSONInputMR(fs, pirDataElements);
-    }
-    if (cli.run("1:E") || cli.run("1:ES"))
-    {
-      Inputs.createPIRESInput();
-      if (cli.run("1:E"))
-      {
-        DistTestSuite.testESInputMR(fs, pirDataElements);
-      }
-      if (cli.run("1:ES"))
-      {
-        DistTestSuite.testESInputSpark(fs, pirDataElements);
-      }
-    }
-    if (cli.run("1:JS"))
-    {
-      DistTestSuite.testJSONInputSpark(fs, pirDataElements);
-    }
-  }
-
-  /**
-   * Delete all necessary inputs, clean up
-   */
-  public static void cleanup(FileSystem fs, String dataSchemasProp, String 
querySchemasProp, String stopListProp) throws Exception
-  {
-    Inputs.deleteESInput();
-    fs.close();
-
-    SystemConfiguration.setProperty("pir.stopListFile", stopListProp);
-
-    // Force the query and data schemas to load their original values
-    if (!dataSchemasProp.equals("none"))
-    {
-      DataSchemaLoader.initialize();
-    }
-
-    if (!querySchemasProp.equals("none"))
-    {
-      QuerySchemaLoader.initialize();
-    }
-  }
-}

Reply via email to