http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/main/java/org/apache/pirk/test/utils/Inputs.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/pirk/test/utils/Inputs.java b/src/main/java/org/apache/pirk/test/utils/Inputs.java deleted file mode 100644 index 10c1386..0000000 --- a/src/main/java/org/apache/pirk/test/utils/Inputs.java +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.test.utils; - -import java.io.File; -import java.io.IOException; -import java.io.OutputStreamWriter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; - -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.pirk.schema.data.DataSchemaLoader; -import org.apache.pirk.schema.data.partitioner.IPDataPartitioner; -import org.apache.pirk.schema.data.partitioner.ISO8601DatePartitioner; -import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner; -import org.apache.pirk.schema.query.QuerySchemaLoader; -import org.apache.pirk.test.distributed.DistributedTestDriver; -import org.apache.pirk.utils.HDFS; -import org.apache.pirk.utils.PIRException; -import org.apache.pirk.utils.SystemConfiguration; -import org.json.simple.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.Element; - -/** - * Input files for distributed testing - * - */ -public class Inputs -{ - private static final Logger logger = LoggerFactory.getLogger(Inputs.class); - - // Test data schema fields - public static final String DATE = "date"; - public static final String QNAME = "qname"; - public static final String SRCIP = "src_ip"; - public static final String DSTIP = "dest_ip"; - public static final String QTYPE = "qtype"; - public static final String RCODE = "rcode"; - public static final String IPS = "ip"; - - // Test query types - public static final String DNS_HOSTNAME_QUERY = "dns-hostname-query"; // Query for the watched hostnames occurred; ; watched value type -- hostname - public static final String DNS_IP_QUERY = "dns-ip-query"; // The watched IP address(es) were detected in the response to a query; watched value type -- IP in - // IPS field (resolution IP) - public static final String DNS_NXDOMAIN_QUERY = "dns-nxdomain-query"; // Query for nxdomain responses that were made for watched qnames - public static final String DNS_SRCIP_QUERY = "dns-srcip-query"; // Query for responses from watched srcIPs - public static final String DNS_SRCIP_QUERY_NO_FILTER = "dns-srcip-query-no-filter"; // Query for responses from watched srcIPs, no data filter used - - // Test query type files - localfs - public static final String DNS_HOSTNAME_QUERY_FILE = DNS_HOSTNAME_QUERY + "_file"; - public static final String DNS_IP_QUERY_FILE = DNS_IP_QUERY + "_file"; - public static final String DNS_NXDOMAIN_QUERY_FILE = DNS_NXDOMAIN_QUERY + "_file"; - public static final String DNS_SRCIP_QUERY_FILE = DNS_SRCIP_QUERY + "_file"; - public static final String DNS_SRCIP_QUERY_NO_FILTER_FILE = DNS_SRCIP_QUERY_NO_FILTER + "_file"; - - // Test query files hdfs - public static final String DNS_HOSTNAME_QUERY_FILE_HDFS = "/tmp/" + DNS_HOSTNAME_QUERY + "_file"; - public static final String DNS_IP_QUERY_FILE_HDFS = "/tmp/" + DNS_IP_QUERY + "_file"; - public static final String DNS_NXDOMAIN_QUERY_FILE_HDFS = "/tmp/" + DNS_NXDOMAIN_QUERY + "_file"; - public static final String DNS_SRCIP_QUERY_FILE_HDFS = "/tmp/" + DNS_SRCIP_QUERY + "_file"; - public static final String DNS_SRCIP_QUERY_NO_FILTER_FILE_HDFS = "/tmp/" + DNS_SRCIP_QUERY_NO_FILTER + "_file"; - - // Combined query file strings -- used to set properties - public static final String LOCALFS_QUERY_FILES = DNS_HOSTNAME_QUERY_FILE + "," + DNS_IP_QUERY_FILE + "," + DNS_NXDOMAIN_QUERY_FILE + "," - + DNS_SRCIP_QUERY_FILE + "," + DNS_SRCIP_QUERY_NO_FILTER_FILE; - - public static final String HDFS_QUERY_FILES = DNS_HOSTNAME_QUERY_FILE_HDFS + "," + DNS_IP_QUERY_FILE_HDFS + "," + DNS_NXDOMAIN_QUERY_FILE_HDFS + "," - + DNS_SRCIP_QUERY_FILE_HDFS + "," + DNS_SRCIP_QUERY_NO_FILTER_FILE_HDFS; - - // Test data schema files -- localFS and hdfs - public static final String TEST_DATA_SCHEMA_NAME = "testDataSchema"; - public static final String DATA_SCHEMA_FILE_LOCALFS = "testDataSchemaFile"; - public static final String DATA_SCHEMA_FILE_HDFS = "/tmp/testDataSchemaFile.xml"; - - /** - * Delete the ElasticSearch indices that was used for functional testing - */ - public static void deleteESInput() - { - String esPIRIndex = SystemConfiguration.getProperty(DistributedTestDriver.ES_INPUT_NODES_PROPERTY) + ":" - + SystemConfiguration.getProperty(DistributedTestDriver.ES_INPUT_PORT_PROPERTY) + "/" - + SystemConfiguration.getProperty(DistributedTestDriver.ES_PIR_INPUT_INDEX_PROPERTY); - logger.info("ES input being deleted at " + esPIRIndex); - - ProcessBuilder pDeletePIR = new ProcessBuilder("curl", "-XDELETE", esPIRIndex); - try - { - TestUtils.executeCommand(pDeletePIR); - logger.info("ES input deleted!"); - } catch (IOException e) - { - e.printStackTrace(); - } - } - - /** - * Creates PIR JSON input - */ - @SuppressWarnings("unchecked") - public static ArrayList<JSONObject> createJSONDataElements() - { - ArrayList<JSONObject> dataElementsJSON = new ArrayList<>(); - - JSONObject jsonObj1 = new JSONObject(); - jsonObj1.put(DATE, "2016-02-20T23:29:05.000Z"); - jsonObj1.put(QNAME, "a.b.c.com"); // hits on domain selector - jsonObj1.put(SRCIP, "55.55.55.55"); // hits on IP selector - jsonObj1.put(DSTIP, "1.2.3.6"); - jsonObj1.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj1.put(RCODE, 0); - jsonObj1.put(IPS, new ArrayList<>(Arrays.asList("10.20.30.40", "10.20.30.60"))); - - dataElementsJSON.add(jsonObj1); - - JSONObject jsonObj2 = new JSONObject(); - jsonObj2.put(DATE, "2016-02-20T23:29:06.000Z"); - jsonObj2.put(QNAME, "d.e.com"); - jsonObj2.put(SRCIP, "127.128.129.130"); - jsonObj2.put(DSTIP, "1.2.3.4"); - jsonObj2.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj2.put(RCODE, 0); - jsonObj2.put(IPS, new ArrayList<>(Collections.singletonList("5.6.7.8"))); - - dataElementsJSON.add(jsonObj2); - - JSONObject jsonObj3 = new JSONObject(); - jsonObj3.put(DATE, "2016-02-20T23:29:07.000Z"); - jsonObj3.put(QNAME, "d.e.com"); - jsonObj3.put(SRCIP, "131.132.133.134"); - jsonObj3.put(DSTIP, "9.10.11.12"); - jsonObj3.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj3.put(RCODE, 0); - jsonObj3.put(IPS, new ArrayList<>(Collections.singletonList("13.14.15.16"))); - - dataElementsJSON.add(jsonObj3); - - JSONObject jsonObj4 = new JSONObject(); - jsonObj4.put(DATE, "2016-02-20T23:29:08.000Z"); - jsonObj4.put(QNAME, "d.e.com"); - jsonObj4.put(SRCIP, "135.136.137.138"); - jsonObj4.put(DSTIP, "17.18.19.20"); - jsonObj4.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj4.put(RCODE, 3); - jsonObj4.put(IPS, new ArrayList<>(Collections.singletonList("21.22.23.24"))); - - dataElementsJSON.add(jsonObj4); - - JSONObject jsonObj5 = new JSONObject(); - jsonObj5.put(DATE, "2016-02-20T23:29:09.000Z"); - jsonObj5.put(QNAME, "d.e.com"); - jsonObj5.put(SRCIP, "139.140.141.142"); - jsonObj5.put(DSTIP, "25.26.27.28"); - jsonObj5.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj5.put(RCODE, 0); - jsonObj5.put(IPS, new ArrayList<>(Collections.singletonList("5.6.7.8"))); - - dataElementsJSON.add(jsonObj5); - - JSONObject jsonObj6 = new JSONObject(); - jsonObj6.put(DATE, "2016-02-20T23:29:10.000Z"); - jsonObj6.put(QNAME, "d.e.com"); - jsonObj6.put(SRCIP, "143.144.145.146"); - jsonObj6.put(DSTIP, "33.34.35.36"); - jsonObj6.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj6.put(RCODE, 0); - jsonObj6.put(IPS, new ArrayList<>(Collections.singletonList("5.6.7.8"))); - - dataElementsJSON.add(jsonObj6); - - JSONObject jsonObj7 = new JSONObject(); - jsonObj7.put(DATE, "2016-02-20T23:29:11.000Z"); - jsonObj7.put(QNAME, "something.else"); - jsonObj7.put(SRCIP, "1.1.1.1"); - jsonObj7.put(DSTIP, "2.2.2.2"); - jsonObj7.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj7.put(RCODE, 0); - jsonObj7.put(IPS, new ArrayList<>(Collections.singletonList("3.3.3.3"))); - - dataElementsJSON.add(jsonObj7); - - // This should never be returned - doesn't hit on any domain selectors - // resolution ip on stoplist - JSONObject jsonObj8 = new JSONObject(); - jsonObj8.put(DATE, "2016-02-20T23:29:12.000Z"); - jsonObj8.put(QNAME, "something.else2"); - jsonObj8.put(SRCIP, "5.6.7.8"); - jsonObj8.put(DSTIP, "2.2.2.22"); - jsonObj8.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj8.put(RCODE, 0); - jsonObj8.put(IPS, new ArrayList<>(Collections.singletonList("3.3.3.132"))); - - dataElementsJSON.add(jsonObj8); - - // This should never be returned in distributed case -- domain and resolution ip on stoplist - JSONObject jsonObj9 = new JSONObject(); - jsonObj9.put(DATE, "2016-02-20T23:29:13.000Z"); - jsonObj9.put(QNAME, "something.else.on.stoplist"); - jsonObj9.put(SRCIP, "55.55.55.55"); - jsonObj9.put(DSTIP, "2.2.2.232"); - jsonObj9.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj9.put(RCODE, 0); - jsonObj9.put(IPS, new ArrayList<>(Collections.singletonList("3.3.3.132"))); - - dataElementsJSON.add(jsonObj9); - - return dataElementsJSON; - } - - /** - * Creates an ArrayList of JSONObjects with RCODE value of 3 - */ - @SuppressWarnings("unchecked") - public static ArrayList<JSONObject> getRcode3JSONDataElements() - { - ArrayList<JSONObject> dataElementsJSON = new ArrayList<>(); - - JSONObject jsonObj4 = new JSONObject(); - jsonObj4.put(DATE, "2016-02-20T23:29:08.000Z"); - jsonObj4.put(QNAME, "d.e.com"); - jsonObj4.put(SRCIP, "135.136.137.138"); - jsonObj4.put(DSTIP, "17.18.19.20"); - jsonObj4.put(QTYPE, new ArrayList<>(Collections.singletonList((short) 1))); - jsonObj4.put(RCODE, 3); - jsonObj4.put(IPS, new ArrayList<>(Collections.singletonList("21.22.23.24"))); - - dataElementsJSON.add(jsonObj4); - - return dataElementsJSON; - } - - /** - * Creates PIR JSON input and writes to hdfs - */ - public static List<JSONObject> createPIRJSONInput(FileSystem fs) - { - String inputJSONFile = SystemConfiguration.getProperty(DistributedTestDriver.JSON_PIR_INPUT_FILE_PROPERTY); - logger.info("PIR JSON input being created at " + inputJSONFile); - - List<JSONObject> dataElementsJSON = createJSONDataElements(); - - HDFS.writeFile(dataElementsJSON, fs, inputJSONFile, true); - logger.info("PIR JSON input successfully created!"); - - return dataElementsJSON; - } - - /** - * Creates PIR Elasticsearch input - */ - public static void createPIRESInput() - { - String esTestIndex = SystemConfiguration.getProperty(DistributedTestDriver.ES_INPUT_NODES_PROPERTY) + ":" - + SystemConfiguration.getProperty(DistributedTestDriver.ES_INPUT_PORT_PROPERTY) + "/" - + SystemConfiguration.getProperty(DistributedTestDriver.ES_PIR_INPUT_INDEX_PROPERTY); - String esType = SystemConfiguration.getProperty(DistributedTestDriver.ES_INPUT_TYPE_PROPERTY); - logger.info("ES input being created at " + esTestIndex + " with type " + esType); - - // Create ES Index - logger.info("Creating new testindex:"); - ProcessBuilder pCreate = new ProcessBuilder("curl", "-XPUT", esTestIndex); - try - { - TestUtils.executeCommand(pCreate); - } catch (IOException e) - { - e.printStackTrace(); - } - - // Add elements - logger.info(" \n \n Adding elements to testindex:"); - - String indexTypeNum1 = esTestIndex + "/" + esType + "/1"; - logger.info("indexTypeNum1 = " + indexTypeNum1); - ProcessBuilder pAdd1 = new ProcessBuilder("curl", "-XPUT", indexTypeNum1, "-d", - "{\"qname\":\"a.b.c.com\",\"date\":\"2016-02-20T23:29:05.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"55.55.55.55\",\"dest_ip\":\"1.2.3.6\"" + ",\"ip\":[\"10.20.30.40\",\"10.20.30.60\"]}"); - - String indexTypeNum2 = esTestIndex + "/" + esType + "/2"; - logger.info("indexTypeNum2 = " + indexTypeNum2); - ProcessBuilder pAdd2 = new ProcessBuilder("curl", "-XPUT", indexTypeNum2, "-d", - "{\"qname\":\"d.e.com\",\"date\":\"2016-02-20T23:29:06.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"127.128.129.130\",\"dest_ip\":\"1.2.3.4\"" + ",\"ip\":[\"5.6.7.8\"]}"); - - String indexTypeNum3 = esTestIndex + "/" + esType + "/3"; - logger.info("indexTypeNum3 = " + indexTypeNum3); - ProcessBuilder pAdd3 = new ProcessBuilder("curl", "-XPUT", indexTypeNum3, "-d", - "{\"qname\":\"d.e.com\",\"date\":\"2016-02-20T23:29:07.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"131.132.133.134\",\"dest_ip\":\"9.10.11.12\"" + ",\"ip\":[\"13.14.15.16\"]}"); - - String indexTypeNum4 = esTestIndex + "/" + esType + "/4"; - logger.info("indexTypeNum4 = " + indexTypeNum4); - ProcessBuilder pAdd4 = new ProcessBuilder("curl", "-XPUT", indexTypeNum4, "-d", - "{\"qname\":\"d.e.com\",\"date\":\"2016-02-20T23:29:08.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"3\",\"src_ip\":\"135.136.137.138\",\"dest_ip\":\"17.18.19.20\"" + ",\"ip\":[\"21.22.23.24\"]}"); - - String indexTypeNum5 = esTestIndex + "/" + esType + "/5"; - logger.info("indexTypeNum5 = " + indexTypeNum5); - ProcessBuilder pAdd5 = new ProcessBuilder("curl", "-XPUT", indexTypeNum5, "-d", - "{\"qname\":\"d.e.com\",\"date\":\"2016-02-20T23:29:09.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"139.140.141.142\",\"dest_ip\":\"25.26.27.28\"" + ",\"ip\":[\"5.6.7.8\"]}"); - - String indexTypeNum6 = esTestIndex + "/" + esType + "/6"; - logger.info("indexTypeNum6 = " + indexTypeNum6); - ProcessBuilder pAdd6 = new ProcessBuilder("curl", "-XPUT", indexTypeNum6, "-d", - "{\"qname\":\"d.e.com\",\"date\":\"2016-02-20T23:29:10.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"143.144.145.146\",\"dest_ip\":\"33.34.35.36\"" + ",\"ip\":[\"5.6.7.8\"]}"); - - String indexTypeNum7 = esTestIndex + "/" + esType + "/7"; - logger.info("indexTypeNum7 = " + indexTypeNum7); - ProcessBuilder pAdd7 = new ProcessBuilder("curl", "-XPUT", indexTypeNum7, "-d", - "{\"qname\":\"something.else\",\"date\":\"2016-02-20T23:29:11.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"1.1.1.1\",\"dest_ip\":\"2.2.2.2\"" + ",\"ip\":[\"3.3.3.3\"]}"); - - // Never should be returned - doesn't hit on any selectors - String indexTypeNum8 = esTestIndex + "/" + esType + "/8"; - logger.info("indexTypeNum8 = " + indexTypeNum8); - ProcessBuilder pAdd8 = new ProcessBuilder("curl", "-XPUT", indexTypeNum8, "-d", - "{\"qname\":\"something.else2\",\"date\":\"2016-02-20T23:29:12.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"1.1.1.12\",\"dest_ip\":\"2.2.2.22\"" + ",\"ip\":[\"3.3.3.32\"]}"); - - // This should never be returned -- domain on stoplist - String indexTypeNum9 = esTestIndex + "/" + esType + "/9"; - logger.info("indexTypeNum9 = " + indexTypeNum9); - ProcessBuilder pAdd9 = new ProcessBuilder("curl", "-XPUT", indexTypeNum9, "-d", - "{\"qname\":\"something.else.on.stoplist\",\"date\":\"2016-02-20T23:29:13.000Z\",\"qtype\":[\"1\"]" - + ",\"rcode\":\"0\",\"src_ip\":\"55.55.55.55\",\"dest_ip\":\"2.2.2.232\"" + ",\"ip\":[\"3.3.3.132\"]}"); - - try - { - TestUtils.executeCommand(pAdd1); - TestUtils.executeCommand(pAdd2); - TestUtils.executeCommand(pAdd3); - TestUtils.executeCommand(pAdd4); - TestUtils.executeCommand(pAdd5); - TestUtils.executeCommand(pAdd6); - TestUtils.executeCommand(pAdd7); - TestUtils.executeCommand(pAdd8); - TestUtils.executeCommand(pAdd9); - } catch (IOException e) - { - e.printStackTrace(); - } - - // Retrieve and print all of the elements - for (int i = 1; i < 7; ++i) - { - logger.info("Retrieving element number = " + i + " from " + esTestIndex); - String elementGet = esTestIndex + "/" + esType + "/" + i; - logger.info("elementGet = " + elementGet); - ProcessBuilder pGet = new ProcessBuilder("curl", "-XGET", elementGet); - try - { - TestUtils.executeCommand(pGet); - } catch (IOException e) - { - e.printStackTrace(); - } - } - } - - /** - * Creates PIR stoplist file - */ - public static String createPIRStopList(FileSystem fs, boolean hdfs) throws IOException, PIRException - { - logger.info("PIR stopList file being created"); - - List<String> elements = Arrays.asList("something.else.on.stoplist", "3.3.3.132"); - - if (hdfs) - { - String pirStopListFile = SystemConfiguration.getProperty(DistributedTestDriver.PIR_STOPLIST_FILE); - if (pirStopListFile == null) - { - throw new PIRException("HDFS stop list file configuration name is required."); - } - HDFS.writeFile(elements, fs, pirStopListFile, true); - logger.info("pirStopListFile file successfully created on hdfs!"); - } - - String prefix = SystemConfiguration.getProperty("pir.stopListFile"); - if (prefix == null) - { - throw new PIRException("Local stop list file configuration name is required."); - } - return TestUtils.writeToTmpFile(elements, prefix, null); - } - - /** - * Create and load the data and query schema files used for testing - */ - public static void createSchemaFiles(String filter) throws Exception - { - createSchemaFiles(null, false, filter); - } - - /** - * Create and load the data and query schema files used for testing - * <p> - * Writes both local and hdfs schema files if hdfs=true -- only updates the corresponding properties for the local files - */ - public static void createSchemaFiles(FileSystem fs, boolean hdfs, String filter) throws Exception - { - // Create and load the data schema - if (!hdfs) - { - createDataSchema(false); - } - else - { - createDataSchema(fs, true); - } - DataSchemaLoader.initialize(); - - // Create and load the query schemas - // DNS_HOSTNAME_QUERY - List<String> dnsHostnameQueryElements = Arrays.asList(DATE, SRCIP, DSTIP, QTYPE, RCODE, IPS); - List<String> dnsHostnameQueryFilterElements = Collections.singletonList(QNAME); - - TestUtils.createQuerySchema(DNS_HOSTNAME_QUERY_FILE, DNS_HOSTNAME_QUERY, TEST_DATA_SCHEMA_NAME, QNAME, dnsHostnameQueryElements, - dnsHostnameQueryFilterElements, filter); - if (hdfs) - { - TestUtils.createQuerySchema(DNS_HOSTNAME_QUERY_FILE_HDFS, DNS_HOSTNAME_QUERY, TEST_DATA_SCHEMA_NAME, QNAME, dnsHostnameQueryElements, - dnsHostnameQueryFilterElements, filter, false, fs, hdfs); - } - - // DNS_IP_QUERY - List<String> dnsIPQueryElements = Arrays.asList(SRCIP, DSTIP, IPS); - List<String> dnsIPQueryFilterElements = Collections.singletonList(QNAME); - - TestUtils.createQuerySchema(DNS_IP_QUERY_FILE, DNS_IP_QUERY, TEST_DATA_SCHEMA_NAME, IPS, dnsIPQueryElements, dnsIPQueryFilterElements, filter); - if (hdfs) - { - TestUtils.createQuerySchema(DNS_IP_QUERY_FILE_HDFS, DNS_IP_QUERY, TEST_DATA_SCHEMA_NAME, IPS, dnsIPQueryElements, dnsIPQueryFilterElements, filter, - false, fs, hdfs); - } - - // DNS_NXDOMAIN_QUERY - List<String> dnsNXQueryElements = Arrays.asList(QNAME, SRCIP, DSTIP); - List<String> dnsNXQueryFilterElements = Collections.singletonList(QNAME); - - TestUtils - .createQuerySchema(DNS_NXDOMAIN_QUERY_FILE, DNS_NXDOMAIN_QUERY, TEST_DATA_SCHEMA_NAME, QNAME, dnsNXQueryElements, dnsNXQueryFilterElements, filter); - if (hdfs) - { - TestUtils.createQuerySchema(DNS_NXDOMAIN_QUERY_FILE_HDFS, DNS_NXDOMAIN_QUERY, TEST_DATA_SCHEMA_NAME, QNAME, dnsNXQueryElements, dnsNXQueryFilterElements, - filter, false, fs, hdfs); - } - - // DNS_SRCIP_QUERY - List<String> dnsSrcIPQueryElements = Arrays.asList(QNAME, DSTIP, IPS); - List<String> dnsSrcIPQueryFilterElements = Arrays.asList(SRCIP, IPS); - - TestUtils - .createQuerySchema(DNS_SRCIP_QUERY_FILE, DNS_SRCIP_QUERY, TEST_DATA_SCHEMA_NAME, SRCIP, dnsSrcIPQueryElements, dnsSrcIPQueryFilterElements, filter); - if (hdfs) - { - TestUtils.createQuerySchema(DNS_SRCIP_QUERY_FILE_HDFS, DNS_SRCIP_QUERY, TEST_DATA_SCHEMA_NAME, SRCIP, dnsSrcIPQueryElements, dnsSrcIPQueryFilterElements, - filter, false, fs, hdfs); - } - - // DNS_SRCIP_QUERY_NO_FILTER - List<String> dnsSrcIPQueryNoFilterElements = Arrays.asList(QNAME, DSTIP, IPS); - TestUtils.createQuerySchema(DNS_SRCIP_QUERY_NO_FILTER_FILE, DNS_SRCIP_QUERY_NO_FILTER, TEST_DATA_SCHEMA_NAME, SRCIP, dnsSrcIPQueryNoFilterElements, null, - null); - if (hdfs) - { - TestUtils.createQuerySchema(DNS_SRCIP_QUERY_NO_FILTER_FILE_HDFS, DNS_SRCIP_QUERY_NO_FILTER, TEST_DATA_SCHEMA_NAME, SRCIP, dnsSrcIPQueryNoFilterElements, - null, null, false, fs, hdfs); - } - - QuerySchemaLoader.initialize(); - } - - /** - * Create the test data schema file - */ - private static void createDataSchema(boolean hdfs) throws IOException - { - createDataSchema(null, hdfs); - } - - /** - * Create the test data schema file - */ - private static void createDataSchema(FileSystem fs, boolean hdfs) throws IOException - { - // Create a temporary file for the test schema, set in the properties - File file = File.createTempFile(DATA_SCHEMA_FILE_LOCALFS, ".xml"); - file.deleteOnExit(); - logger.info("file = " + file.toString()); - SystemConfiguration.setProperty("data.schemas", file.toString()); - - // If we are performing distributed testing, write both the local and hdfs files - OutputStreamWriter osw = null; - if (hdfs) - { - Path filePath = new Path(DATA_SCHEMA_FILE_HDFS); - fs.deleteOnExit(filePath); - osw = new OutputStreamWriter(fs.create(filePath, true)); - - logger.info("hdfs: filePath = " + filePath.toString()); - } - - // Write to the file - try - { - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc = dBuilder.newDocument(); - - // root element - Element rootElement = doc.createElement("schema"); - doc.appendChild(rootElement); - - // Add the schemaName - Element schemaNameElement = doc.createElement("schemaName"); - schemaNameElement.appendChild(doc.createTextNode(TEST_DATA_SCHEMA_NAME)); - rootElement.appendChild(schemaNameElement); - - String primitiveTypePartitionerName = PrimitiveTypePartitioner.class.getName(); - String ipPartitionerName = IPDataPartitioner.class.getName(); - String datePartitioner = ISO8601DatePartitioner.class.getName(); - - // date - TestUtils.addElement(doc, rootElement, DATE, PrimitiveTypePartitioner.STRING, "false", datePartitioner); - - // qname - TestUtils.addElement(doc, rootElement, QNAME, PrimitiveTypePartitioner.STRING, "false", primitiveTypePartitionerName); - - // src_ip - TestUtils.addElement(doc, rootElement, SRCIP, PrimitiveTypePartitioner.STRING, "false", ipPartitionerName); - - // dest_ip - TestUtils.addElement(doc, rootElement, DSTIP, PrimitiveTypePartitioner.STRING, "false", ipPartitionerName); - - // qtype - TestUtils.addElement(doc, rootElement, QTYPE, PrimitiveTypePartitioner.SHORT, "true", primitiveTypePartitionerName); - - // rcode - TestUtils.addElement(doc, rootElement, RCODE, PrimitiveTypePartitioner.INT, "false", primitiveTypePartitionerName); - - // ip - TestUtils.addElement(doc, rootElement, IPS, PrimitiveTypePartitioner.STRING, "true", ipPartitionerName); - - // Write to a xml file - both localFS and hdfs - TransformerFactory transformerFactory = TransformerFactory.newInstance(); - Transformer transformer = transformerFactory.newTransformer(); - DOMSource source = new DOMSource(doc); - - // LocalFS - StreamResult resultLocalFS = new StreamResult(file); - transformer.transform(source, resultLocalFS); - - if (hdfs) - { - StreamResult resultHDFS = new StreamResult(osw); - transformer.transform(source, resultHDFS); - } - - // Output for testing - StreamResult consoleResult = new StreamResult(System.out); - transformer.transform(source, consoleResult); - System.out.println(); - - if (osw != null) - { - osw.close(); - } - - } catch (Exception e) - { - e.printStackTrace(); - } - } -}
http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java b/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java deleted file mode 100644 index 1c26bdd..0000000 --- a/src/main/java/org/apache/pirk/test/utils/StandaloneQuery.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.test.utils; - -import org.apache.pirk.encryption.Paillier; -import org.apache.pirk.querier.wideskies.Querier; -import org.apache.pirk.querier.wideskies.QuerierConst; -import org.apache.pirk.querier.wideskies.decrypt.DecryptResponse; -import org.apache.pirk.querier.wideskies.encrypt.EncryptQuery; -import org.apache.pirk.query.wideskies.Query; -import org.apache.pirk.query.wideskies.QueryInfo; -import org.apache.pirk.query.wideskies.QueryUtils; -import org.apache.pirk.responder.wideskies.standalone.Responder; -import org.apache.pirk.response.wideskies.Response; -import org.apache.pirk.schema.query.QuerySchema; -import org.apache.pirk.schema.query.QuerySchemaRegistry; -import org.apache.pirk.schema.response.QueryResponseJSON; -import org.apache.pirk.serialization.LocalFileSystemStore; -import org.apache.pirk.utils.PIRException; -import org.apache.pirk.utils.SystemConfiguration; -import org.json.simple.JSONObject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.HashMap; -import java.util.List; - -import static org.junit.Assert.fail; - -public class StandaloneQuery -{ - private static final Logger logger = LoggerFactory.getLogger(StandaloneQuery.class); - - static String queryFileDomain = "qfDomain"; - static String queryFileIP = "qfIP"; - - String testDataSchemaName = "testDataSchema"; - String testQuerySchemaName = "testQuerySchema"; - - // Base method to perform the query - public static List<QueryResponseJSON> performStandaloneQuery(List<JSONObject> dataElements, String queryType, List<String> selectors, - int numThreads, boolean testFalsePositive) throws IOException, InterruptedException, PIRException - { - logger.info("Performing watchlisting: "); - - QuerySchema qSchema = QuerySchemaRegistry.get(queryType); - - // Create the necessary files - LocalFileSystemStore storage = new LocalFileSystemStore(); - String querySideOuputFilePrefix = "querySideOut"; - File fileQuerier = File.createTempFile(querySideOuputFilePrefix + "-" + QuerierConst.QUERIER_FILETAG, ".txt"); - File fileQuery = File.createTempFile(querySideOuputFilePrefix + "-" + QuerierConst.QUERY_FILETAG, ".txt"); - String responseFile = "encryptedResponse"; - File fileResponse = File.createTempFile(responseFile, ".txt"); - String finalResultsFile = "finalResultFile"; - File fileFinalResults = File.createTempFile(finalResultsFile, ".txt"); - - logger.info("fileQuerier = " + fileQuerier.getAbsolutePath() + " fileQuery = " + fileQuery.getAbsolutePath() + " responseFile = " - + fileResponse.getAbsolutePath() + " fileFinalResults = " + fileFinalResults.getAbsolutePath()); - - boolean embedSelector = SystemConfiguration.getBooleanProperty("pirTest.embedSelector", false); - boolean useExpLookupTable = SystemConfiguration.getBooleanProperty("pirTest.useExpLookupTable", false); - boolean useHDFSExpLookupTable = SystemConfiguration.getBooleanProperty("pirTest.useHDFSExpLookupTable", false); - - // Set the necessary objects - QueryInfo queryInfo = new QueryInfo(BaseTests.queryIdentifier, selectors.size(), BaseTests.hashBitSize, BaseTests.hashKey, BaseTests.dataPartitionBitSize, - queryType, useExpLookupTable, embedSelector, useHDFSExpLookupTable); - - if (SystemConfiguration.getBooleanProperty("pir.embedQuerySchema", false)) - { - queryInfo.addQuerySchema(qSchema); - } - - Paillier paillier = new Paillier(BaseTests.paillierBitSize, BaseTests.certainty); - - // Perform the encryption - logger.info("Performing encryption of the selectors - forming encrypted query vectors:"); - EncryptQuery encryptQuery = new EncryptQuery(queryInfo, selectors, paillier); - encryptQuery.encrypt(numThreads); - logger.info("Completed encryption of the selectors - completed formation of the encrypted query vectors:"); - - // Dork with the embedSelectorMap to generate a false positive for the last valid selector in selectors - if (testFalsePositive) - { - Querier querier = encryptQuery.getQuerier(); - HashMap<Integer,String> embedSelectorMap = querier.getEmbedSelectorMap(); - logger.info("embedSelectorMap((embedSelectorMap.size()-2)) = " + embedSelectorMap.get((embedSelectorMap.size() - 2)) + " selector = " - + selectors.get((embedSelectorMap.size() - 2))); - embedSelectorMap.put((embedSelectorMap.size() - 2), "fakeEmbeddedSelector"); - } - - // Write necessary output files - storage.store(fileQuerier, encryptQuery.getQuerier()); - storage.store(fileQuery, encryptQuery.getQuery()); - - // Perform the PIR query and build the response elements - logger.info("Performing the PIR Query and constructing the response elements:"); - Query query = storage.recall(fileQuery, Query.class); - Responder pirResponder = new Responder(query); - logger.info("Query and Responder elements constructed"); - for (JSONObject jsonData : dataElements) - { - String selector = QueryUtils.getSelectorByQueryTypeJSON(qSchema, jsonData); - logger.info("selector = " + selector + " numDataElements = " + jsonData.size()); - try - { - pirResponder.addDataElement(selector, jsonData); - } catch (Exception e) - { - fail(e.toString()); - } - } - logger.info("Completed the PIR Query and construction of the response elements:"); - - // Set the response object, extract, write to file - logger.info("Forming response from response elements; writing to a file"); - pirResponder.setResponseElements(); - Response responseOut = pirResponder.getResponse(); - storage.store(fileResponse, responseOut); - logger.info("Completed forming response from response elements and writing to a file"); - - // Perform decryption - // Reconstruct the necessary objects from the files - logger.info("Performing decryption; writing final results file"); - Response responseIn = storage.recall(fileResponse, Response.class); - Querier querier = storage.recall(fileQuerier, Querier.class); - - // Perform decryption and output the result file - DecryptResponse decryptResponse = new DecryptResponse(responseIn, querier); - decryptResponse.decrypt(numThreads); - decryptResponse.writeResultFile(fileFinalResults); - logger.info("Completed performing decryption and writing final results file"); - - // Read in results - logger.info("Reading in and checking results"); - List<QueryResponseJSON> results = TestUtils.readResultsFile(fileFinalResults); - - // Clean up - fileQuerier.delete(); - fileQuery.delete(); - fileResponse.delete(); - fileFinalResults.delete(); - - return results; - } -} http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/main/java/org/apache/pirk/test/utils/TestUtils.java ---------------------------------------------------------------------- diff --git a/src/main/java/org/apache/pirk/test/utils/TestUtils.java b/src/main/java/org/apache/pirk/test/utils/TestUtils.java deleted file mode 100644 index 1ea01fb..0000000 --- a/src/main/java/org/apache/pirk/test/utils/TestUtils.java +++ /dev/null @@ -1,312 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.test.utils; - -import java.io.BufferedReader; -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStreamReader; -import java.io.OutputStreamWriter; -import java.util.ArrayList; -import java.util.List; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.transform.Transformer; -import javax.xml.transform.TransformerFactory; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; - -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.pirk.schema.response.QueryResponseJSON; -import org.apache.pirk.utils.SystemConfiguration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.Element; - -/** - * Class to hold testing utilities - * - */ -public class TestUtils -{ - private static final Logger logger = LoggerFactory.getLogger(TestUtils.class); - - /** - * Method to delete an ES index - */ - public static void deleteESTestIndex(String index) - { - logger.info("Deleting index:"); - ProcessBuilder pDelete = new ProcessBuilder("curl", "-XDELETE", index); - try - { - executeCommand(pDelete); - } catch (IOException e) - { - e.printStackTrace(); - } - } - - /** - * Method to execute process - */ - public static void executeCommand(ProcessBuilder p) throws IOException - { - Process proc = p.start(); - - try (BufferedReader stdInput = new BufferedReader(new InputStreamReader(proc.getInputStream())); - BufferedReader stdError = new BufferedReader(new InputStreamReader(proc.getErrorStream()))) - { - // Read the output from the command - logger.info("Standard output of the command:\n"); - String s; - while ((s = stdInput.readLine()) != null) - { - logger.info(s); - } - - // Read any errors from the attempted command - logger.info("Standard error of the command (if any):\n"); - while ((s = stdError.readLine()) != null) - { - logger.info(s); - } - } - } - - /** - * Helper method to add elements to the test data schema - */ - public static void addElement(Document doc, Element rootElement, String elementName, String typeIn, String isArrayIn, String partitionerIn) - { - Element element = doc.createElement("element"); - rootElement.appendChild(element); - - Element name = doc.createElement("name"); - name.appendChild(doc.createTextNode(elementName)); - element.appendChild(name); - - Element type = doc.createElement("type"); - type.appendChild(doc.createTextNode(typeIn)); - element.appendChild(type); - - if (isArrayIn.equals("true")) - { - element.appendChild(doc.createElement("isArray")); - } - - if (partitionerIn != null) - { - Element partitioner = doc.createElement("partitioner"); - partitioner.appendChild(doc.createTextNode(partitionerIn)); - element.appendChild(partitioner); - } - } - - /** - * Creates the test query schema file - */ - public static void createQuerySchema(String schemaFile, String querySchemaName, String dataSchemaNameInput, String selectorNameInput, - List<String> elementNames, List<String> filterNames, String filter) throws IOException - { - createQuerySchema(schemaFile, querySchemaName, dataSchemaNameInput, selectorNameInput, elementNames, filterNames, filter, true, null, false); - } - - /** - * Creates the test query schema file - */ - public static void createQuerySchema(String schemaFile, String querySchemaName, String dataSchemaNameInput, String selectorNameInput, - List<String> elementNames, List<String> filterNames, String filter, boolean append, FileSystem fs, boolean hdfs) throws IOException - { - logger.info("createQuerySchema: querySchemaName = " + querySchemaName); - - // Create a temporary file for the test schema, set in the properties - String fileName; - File file = null; - OutputStreamWriter osw = null; - if (hdfs) - { - Path filePath = new Path(schemaFile); - fs.deleteOnExit(filePath); - fileName = filePath.toString(); - - osw = new OutputStreamWriter(fs.create(filePath, true)); - - logger.info("hdfs: filePath = " + fileName); - } - else - { - file = File.createTempFile(schemaFile, ".xml"); - file.deleteOnExit(); - fileName = file.toString(); - logger.info("localFS: file = " + file.toString()); - } - - if (append) - { - String currentSchemas = SystemConfiguration.getProperty("query.schemas", ""); - if (currentSchemas.equals("") || currentSchemas.equals("none")) - { - SystemConfiguration.setProperty("query.schemas", fileName); - } - else - { - SystemConfiguration.setProperty("query.schemas", SystemConfiguration.getProperty("query.schemas", "") + "," + fileName); - } - } - logger.info("query.schemas = " + SystemConfiguration.getProperty("query.schemas")); - - // Write to the file - try - { - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - Document doc = dBuilder.newDocument(); - - // root element - Element rootElement = doc.createElement("schema"); - doc.appendChild(rootElement); - - // Add the schemaName - Element schemaNameElement = doc.createElement("schemaName"); - schemaNameElement.appendChild(doc.createTextNode(querySchemaName)); - rootElement.appendChild(schemaNameElement); - - // Add the dataSchemaName - Element dataSchemaNameElement = doc.createElement("dataSchemaName"); - dataSchemaNameElement.appendChild(doc.createTextNode(dataSchemaNameInput)); - rootElement.appendChild(dataSchemaNameElement); - - // Add the selectorName - Element selectorNameElement = doc.createElement("selectorName"); - selectorNameElement.appendChild(doc.createTextNode(selectorNameInput)); - rootElement.appendChild(selectorNameElement); - - // Add the elementNames - Element elements = doc.createElement("elements"); - rootElement.appendChild(elements); - for (String elementName : elementNames) - { - logger.info("elementName = " + elementName); - Element name = doc.createElement("name"); - name.appendChild(doc.createTextNode(elementName)); - elements.appendChild(name); - } - - // Add the filter - if (filter != null) - { - Element filterElement = doc.createElement("filter"); - filterElement.appendChild(doc.createTextNode(filter)); - rootElement.appendChild(filterElement); - - // Add the filterNames - Element filterNamesElement = doc.createElement("filterNames"); - rootElement.appendChild(filterNamesElement); - for (String filterName : filterNames) - { - logger.info("filterName = " + filterName); - Element name = doc.createElement("name"); - name.appendChild(doc.createTextNode(filterName)); - filterNamesElement.appendChild(name); - } - } - - // Write to a xml file - TransformerFactory transformerFactory = TransformerFactory.newInstance(); - Transformer transformer = transformerFactory.newTransformer(); - DOMSource source = new DOMSource(doc); - StreamResult result; - if (hdfs) - { - result = new StreamResult(osw); - } - else - { - result = new StreamResult(file); - } - transformer.transform(source, result); - - // Output for testing - StreamResult consoleResult = new StreamResult(System.out); - transformer.transform(source, consoleResult); - System.out.println(); - - if (osw != null) - { - osw.close(); - } - - } catch (Exception e) - { - e.printStackTrace(); - } - } - - /** - * Converts the result file into an ArrayList of QueryResponseJSON objects - */ - public static List<QueryResponseJSON> readResultsFile(File file) - { - List<QueryResponseJSON> results = new ArrayList<>(); - try (BufferedReader br = new BufferedReader(new FileReader(file))) - { - String line; - while ((line = br.readLine()) != null) - { - QueryResponseJSON jsonResult = new QueryResponseJSON(line); - results.add(jsonResult); - } - } catch (Exception e) - { - logger.error(e.toString()); - } - - return results; - } - - /** - * Write the ArrayList<String to a tmp file in the local filesystem with the given fileName - * - */ - public static String writeToTmpFile(List<String> list, String fileName, String suffix) throws IOException - { - File file = File.createTempFile(fileName, suffix); - file.deleteOnExit(); - logger.info("localFS: file = " + file); - - FileWriter fw = new FileWriter(file); - try (BufferedWriter bw = new BufferedWriter(fw)) - { - for (String s : list) - { - bw.write(s); - bw.newLine(); - } - } - - return file.getPath(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/general/ISO8601DateParserTest.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/pirk/general/ISO8601DateParserTest.java b/src/test/java/org/apache/pirk/general/ISO8601DateParserTest.java deleted file mode 100644 index 786f289..0000000 --- a/src/test/java/org/apache/pirk/general/ISO8601DateParserTest.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.general; - -import static org.junit.Assert.assertEquals; - -import java.text.ParseException; - -import org.apache.pirk.utils.ISO8601DateParser; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Class to test basic functionality of ISO8601DateParser class - */ -public class ISO8601DateParserTest -{ - private static final Logger logger = LoggerFactory.getLogger(ISO8601DateParserTest.class); - - @Test - public void testDateParsing() throws ParseException - { - logger.info("Starting testDateParsing: "); - - String date = "2016-02-20T23:29:05.000Z"; - long longDate = Long.parseLong("1456010945000"); // date in UTC - - assertEquals(longDate, ISO8601DateParser.getLongDate(date)); - assertEquals(date, ISO8601DateParser.fromLongDate(longDate)); - - logger.info("Successfully completed testDateParsing"); - } -} http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/general/KeyedHashTest.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/pirk/general/KeyedHashTest.java b/src/test/java/org/apache/pirk/general/KeyedHashTest.java deleted file mode 100644 index 676609f..0000000 --- a/src/test/java/org/apache/pirk/general/KeyedHashTest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.general; - -import static org.junit.Assert.assertEquals; - -import org.apache.pirk.utils.KeyedHash; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Basic functional tests for KeyedHash - * - */ -public class KeyedHashTest -{ - private static final Logger logger = LoggerFactory.getLogger(KeyedHashTest.class); - - @Test - public void testKeyedHash() - { - logger.info("Starting testKeyedHash: "); - - int hash1 = KeyedHash.hash("someKey", 12, "someInput"); - logger.info("hash1 = " + hash1 + " hash1 = " + Integer.toString(hash1, 2)); - - int hash2 = KeyedHash.hash("someKey", 32, "someInput"); - logger.info("hash2 = " + hash2 + " hash2 = " + Integer.toString(hash2, 2)); - - int hash3 = KeyedHash.hash("someKey", 34, "someInput"); - logger.info("hash3 = " + hash3 + " hash3 = " + Integer.toString(hash3, 2)); - - assertEquals(hash2, hash3); - assertEquals(hash1, hash2 & 0xFFF); - - logger.info("Successfully completed testKeyedHash"); - } - - @Test - public void testKeyedHashWithType() - { - testKeyedHashType("MD5"); - testKeyedHashType("SHA-1"); - testKeyedHashType("SHA-256"); - testKeyedHashType("FAKE-HASH-TYPE"); - } - - private void testKeyedHashType(String type) - { - logger.info("Starting testKeyedHashType with type: " + type); - - int hash1 = KeyedHash.hash("someKey", 12, "someInput", type); - logger.info("hash1 = " + hash1 + " hash1 = " + Integer.toString(hash1, 2)); - - int hash2 = KeyedHash.hash("someKey", 32, "someInput", type); - logger.info("hash2 = " + hash2 + " hash2 = " + Integer.toString(hash2, 2)); - - int hash3 = KeyedHash.hash("someKey", 34, "someInput", type); - logger.info("hash3 = " + hash3 + " hash3 = " + Integer.toString(hash3, 2)); - - assertEquals(hash2, hash3); - assertEquals(hash1, hash2 & 0xFFF); - - logger.info("Successfully completed testKeyedHashType with type: " + type); - } -} http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/general/PaillierTest.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/pirk/general/PaillierTest.java b/src/test/java/org/apache/pirk/general/PaillierTest.java deleted file mode 100644 index 14347fa..0000000 --- a/src/test/java/org/apache/pirk/general/PaillierTest.java +++ /dev/null @@ -1,303 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.general; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; - -import java.math.BigInteger; -import java.util.Random; - -import org.apache.pirk.encryption.Paillier; -import org.apache.pirk.utils.PIRException; -import org.apache.pirk.utils.SystemConfiguration; -import org.junit.BeforeClass; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Basic test functionality for Paillier library - * - */ -public class PaillierTest -{ - private static final Logger logger = LoggerFactory.getLogger(PaillierTest.class); - - private static BigInteger p = null; // large prime - private static BigInteger q = null; // large prime - private static BigInteger N = null; // N=pq, RSA modulus - private static BigInteger NSquared = null; // N^2 - private static BigInteger lambdaN = null; // lambda(N) = lcm(p-1,q-1) - - private static int bitLength = 0; // bit length of the modulus N - private static int certainty = 64; // prob that new BigInteger values represents primes will exceed (1 - (1/2)^certainty) - - private static BigInteger r1 = null; // random number in (Z/NZ)* - private static BigInteger r2 = null; // random number in (Z/NZ)* - - private static BigInteger m1 = null; // message to encrypt - private static BigInteger m2 = null; // message to encrypt - - @BeforeClass - public static void setup() - { - p = BigInteger.valueOf(7); - q = BigInteger.valueOf(17); - N = p.multiply(q); - NSquared = N.multiply(N); - - lambdaN = BigInteger.valueOf(48); - - r1 = BigInteger.valueOf(3); - r2 = BigInteger.valueOf(4); - - m1 = BigInteger.valueOf(5); - m2 = BigInteger.valueOf(2); - - bitLength = 201;// bitLength = 384; - certainty = 128; - - logger.info("p = " + p.intValue() + " q = " + q.intValue() + " N = " + N.intValue() + " bitLength = " + N.bitLength() + " lambdaN = " + lambdaN + " m1 = " - + m1.intValue() + " m2 = " + m2.intValue() + " r1 = " + r1.intValue() + " r2 = " + r2.intValue()); - } - - @Test - public void testPIRExceptions() - { - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(2), BigInteger.valueOf(2), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for p,q < 3"); - } catch (PIRException ignore) - {} - - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(2), BigInteger.valueOf(3), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for p < 3"); - } catch (PIRException ignore) - {} - - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(3), BigInteger.valueOf(2), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for q < 3"); - } catch (PIRException ignore) - {} - - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(7), BigInteger.valueOf(7), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for p = q"); - } catch (PIRException ignore) - {} - - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(8), BigInteger.valueOf(7), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for p not prime"); - } catch (PIRException ignore) - {} - - try - { - Paillier paillier = new Paillier(BigInteger.valueOf(7), BigInteger.valueOf(10), 128); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for q not prime"); - } catch (PIRException ignore) - {} - - try - { - int systemPrimeCertainty = SystemConfiguration.getIntProperty("pir.primeCertainty", 128); - Paillier paillier = new Paillier(3072, systemPrimeCertainty - 10); - assertNotNull(paillier); - fail("Paillier constructor did not throw PIRException for certainty less than system default of " + systemPrimeCertainty); - } catch (PIRException ignore) - {} - - try - { - Paillier pailler = new Paillier(p, q, bitLength); - BigInteger encM1 = pailler.encrypt(N); - assertNotNull(encM1); - fail("Paillier encryption did not throw PIRException for message m = N"); - } catch (PIRException ignore) - {} - - try - { - Paillier pailler = new Paillier(p, q, bitLength); - BigInteger encM1 = pailler.encrypt(N.add(BigInteger.TEN)); - assertNotNull(encM1); - fail("Paillier encryption did not throw PIRException for message m > N"); - } catch (PIRException ignore) - {} - - try - { - Paillier pailler = new Paillier(bitLength, 128, bitLength); - assertNotNull(pailler); - fail("Paillier constructor did not throw PIRException for ensureBitSet = bitLength"); - } catch (PIRException ignore) - {} - - try - { - Paillier pailler = new Paillier(bitLength, 128, bitLength + 1); - assertNotNull(pailler); - fail("Paillier constructor did not throw PIRException for ensureBitSet > bitLength"); - } catch (PIRException ignore) - {} - } - - @Test - public void testPaillierGivenAllParameters() throws Exception - { - logger.info("Starting testPaillierGivenAllParameters: "); - - Paillier pailler = new Paillier(p, q, bitLength); - - assertEquals(pailler.getN(), N); - assertEquals(pailler.getLambdaN(), lambdaN); - - // Check encryption - BigInteger encM1 = pailler.encrypt(m1, r1); - BigInteger encM2 = pailler.encrypt(m2, r2); - logger.info("encM1 = " + encM1.intValue() + " encM2 = " + encM2.intValue()); - - assertEquals(encM1, BigInteger.valueOf(14019)); - assertEquals(encM2, BigInteger.valueOf(8836)); - - // Check decryption - BigInteger decM1 = pailler.decrypt(encM1); - BigInteger decM2 = pailler.decrypt(encM2); - logger.info("decM1 = " + decM1.intValue() + " decM2 = " + decM2.intValue()); - - assertEquals(decM1, m1); - assertEquals(decM2, m2); - - // Check homomorphic property: E_r1(m1)*E_r2(m2) mod N^2 = E_r1r2((m1+m2) mod N) mod N^2 - BigInteger encM1_times_encM2 = (encM1.multiply(encM2)).mod(NSquared); - BigInteger encM1plusM2 = pailler.encrypt((m1.add(m2)).mod(N), r1.multiply(r2)); - logger.info("encM1_times_encM2 = " + encM1_times_encM2.intValue() + " encM1plusM2 = " + encM1plusM2.intValue()); - - assertEquals(encM1_times_encM2, BigInteger.valueOf(5617)); - assertEquals(encM1plusM2, BigInteger.valueOf(5617)); - - logger.info("Successfully completed testPaillierGivenAllParameters: "); - } - - @Test - public void testPaillierWithKeyGeneration() throws Exception - { - logger.info("Starting testPaillierWithKeyGeneration: "); - - // Test with and without gmp optimization for modPow - SystemConfiguration.setProperty("pallier.FIPSPrimeGenerationChecks", "true"); - SystemConfiguration.setProperty("paillier.useGMPForModPow", "true"); - SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "true"); - testPaillerWithKeyGenerationGeneral(); - - SystemConfiguration.setProperty("pallier.FIPSPrimeGenerationChecks", "false"); - - SystemConfiguration.setProperty("paillier.useGMPForModPow", "true"); - SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "true"); - testPaillerWithKeyGenerationGeneral(); - - SystemConfiguration.setProperty("paillier.useGMPForModPow", "true"); - SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "false"); - testPaillerWithKeyGenerationGeneral(); - - SystemConfiguration.setProperty("paillier.useGMPForModPow", "false"); - SystemConfiguration.setProperty("paillier.GMPConstantTimeMode", "false"); - testPaillerWithKeyGenerationGeneral(); - - // Reset the properties - SystemConfiguration.initialize(); - - logger.info("Ending testPaillierWithKeyGeneration: "); - } - - public void testPaillerWithKeyGenerationGeneral() throws Exception - { - // Test without requiring highest bit to be set - logger.info("Starting testPaillierWithKeyGenerationBitSetOption with ensureHighBitSet = false"); - testPaillierWithKeyGenerationBitSetOption(-1); - - // Test requiring highest bit to be set - logger.info("Starting testPaillierWithKeyGenerationBitSetOption with ensureHighBitSet = true"); - testPaillierWithKeyGenerationBitSetOption(5); - } - - public void testPaillierWithKeyGenerationBitSetOption(int ensureBitSet) throws Exception - { - Random r = new Random(); - int lowBitLength = 3073; // inclusive - int highBitLength = 7001; // exclusive - - int loopVal = 1; // int loopVal = 1000; //change this and re-test for high loop testing - for (int i = 0; i < loopVal; ++i) - { - logger.info("i = " + i); - - basicTestPaillierWithKeyGeneration(bitLength, certainty, ensureBitSet); - basicTestPaillierWithKeyGeneration(3072, certainty, ensureBitSet); - - // Test with random bit length between 3073 and 7000 - int randomLargeBitLength = r.nextInt(highBitLength - lowBitLength) + lowBitLength; - basicTestPaillierWithKeyGeneration(randomLargeBitLength, certainty, ensureBitSet); - } - } - - private void basicTestPaillierWithKeyGeneration(int bitLengthInput, int certaintyInput, int ensureBitSet) throws Exception - { - Paillier pailler = new Paillier(bitLengthInput, certaintyInput, ensureBitSet); - BigInteger generatedN = pailler.getN(); - BigInteger geneartedNsquared = generatedN.multiply(generatedN); - - // Check the decrypting the encryption yields the message - BigInteger encM1 = pailler.encrypt(m1); - BigInteger encM2 = pailler.encrypt(m2); - logger.info("encM1 = " + encM1.intValue() + " encM2 = " + encM2.intValue()); - - BigInteger decM1 = pailler.decrypt(encM1); - BigInteger decM2 = pailler.decrypt(encM2); - logger.info("decM1 = " + decM1.intValue() + " decM2 = " + decM2.intValue()); - - assertEquals(decM1, m1); - assertEquals(decM2, m2); - - // Check homomorphic property: E_r1(m1)*E_r2(m2) mod N^2 = E_r1r2((m1+m2) mod N) mod N^2 - BigInteger encM1_times_encM2 = (encM1.multiply(encM2)).mod(geneartedNsquared); - BigInteger multDecrypt = pailler.decrypt(encM1_times_encM2); - BigInteger m1_plus_m2 = (m1.add(m2)).mod(N); - - logger.info("encM1_times_encM2 = " + encM1_times_encM2.intValue() + " multDecrypt = " + multDecrypt.intValue() + " m1_plus_m2 = " + m1_plus_m2.intValue()); - - assertEquals(multDecrypt, m1_plus_m2); - } -} http://git-wip-us.apache.org/repos/asf/incubator-pirk/blob/a643ae68/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java ---------------------------------------------------------------------- diff --git a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java b/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java deleted file mode 100644 index 6f779cd..0000000 --- a/src/test/java/org/apache/pirk/general/PartitionUtilsTest.java +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.apache.pirk.general; - -import org.apache.pirk.schema.data.partitioner.IPDataPartitioner; -import org.apache.pirk.schema.data.partitioner.ISO8601DatePartitioner; -import org.apache.pirk.schema.data.partitioner.PrimitiveTypePartitioner; -import org.apache.pirk.utils.PIRException; -import org.apache.pirk.utils.SystemConfiguration; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.math.BigInteger; -import java.util.Arrays; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; - -/** - * Class to functionally test the bit conversion utils - */ -public class PartitionUtilsTest -{ - private static final Logger logger = LoggerFactory.getLogger(PartitionUtilsTest.class); - - @Test - public void testMask() - { - logger.info("Starting testMask: "); - - assertEquals(0, PrimitiveTypePartitioner.formBitMask(0).intValue()); - - assertEquals(0b000000000000001, PrimitiveTypePartitioner.formBitMask(1).intValue()); - assertEquals(0b000000000001111, PrimitiveTypePartitioner.formBitMask(4).intValue()); - assertEquals(0b000000001111111, PrimitiveTypePartitioner.formBitMask(7).intValue()); - assertEquals(0b111111111111111, PrimitiveTypePartitioner.formBitMask(15).intValue()); - - assertEquals(new BigInteger("FFFFF", 16), PrimitiveTypePartitioner.formBitMask(20)); - assertEquals(new BigInteger("FFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(32)); - assertEquals(new BigInteger("3FFFFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(42)); - assertEquals(new BigInteger("7FFFFFFFFFFFFFFF", 16), PrimitiveTypePartitioner.formBitMask(63)); - - logger.info("Successfully completed testMask"); - } - - @Test - public void testPartitionBits() throws PIRException - { - logger.info("Starting testPartitionBits: "); - - BigInteger value = new BigInteger("245"); // 11110101 - BigInteger value2 = new BigInteger("983"); // 1111010111 - - BigInteger mask4 = PrimitiveTypePartitioner.formBitMask(4); // 1111 - BigInteger mask8 = PrimitiveTypePartitioner.formBitMask(8); // 11111111 - - List<BigInteger> partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask4); - assertEquals(2, partitions.size()); - assertEquals(0b1111, partitions.get(0).intValue()); - assertEquals(0b0101, partitions.get(1).intValue()); - - partitions = PrimitiveTypePartitioner.partitionBits(value2, 4, mask4); - assertEquals(3, partitions.size()); - assertEquals(0b1111, partitions.get(0).intValue()); - assertEquals(0b0101, partitions.get(1).intValue()); - assertEquals(0b0011, partitions.get(2).intValue()); - - partitions = PrimitiveTypePartitioner.partitionBits(value, 8, mask8); - assertEquals(1, partitions.size()); - assertEquals(0b11110101, partitions.get(0).intValue()); - - try - { - partitions = PrimitiveTypePartitioner.partitionBits(value, 4, mask8); - fail("BitConversionUtils.partitionBits did not throw error for mismatched partitionSize and mask size"); - } catch (Exception ignore) - { - // Expected. - } - - logger.info("Successfully completed testPartitionBits"); - } - - @Test - public void testPartitions() throws Exception - { - logger.info("Starting testToPartitions:"); - - PrimitiveTypePartitioner primitivePartitioner = new PrimitiveTypePartitioner(); - IPDataPartitioner ipPartitioner = new IPDataPartitioner(); - ISO8601DatePartitioner datePartitioner = new ISO8601DatePartitioner(); - - // Test IP - String ipTest = "127.0.0.1"; - List<BigInteger> partsIP = ipPartitioner.toPartitions(ipTest, PrimitiveTypePartitioner.STRING); - assertEquals(4, partsIP.size()); - assertEquals(ipTest, ipPartitioner.fromPartitions(partsIP, 0, PrimitiveTypePartitioner.STRING)); - - // Test Date - String dateTest = "2016-02-20T23:29:05.000Z"; - List<BigInteger> partsDate = datePartitioner.toPartitions(dateTest, null); - assertEquals(8, partsDate.size()); - assertEquals(dateTest, datePartitioner.fromPartitions(partsDate, 0, null)); - - // Test byte - byte bTest = Byte.parseByte("10"); - List<BigInteger> partsByte = primitivePartitioner.toPartitions(bTest, PrimitiveTypePartitioner.BYTE); - assertEquals(1, partsByte.size()); - assertEquals(bTest, primitivePartitioner.fromPartitions(partsByte, 0, PrimitiveTypePartitioner.BYTE)); - - partsByte = primitivePartitioner.toPartitions("12", PrimitiveTypePartitioner.BYTE); - assertEquals(1, partsByte.size()); - assertEquals((byte) 12, primitivePartitioner.fromPartitions(partsByte, 0, PrimitiveTypePartitioner.BYTE)); - - List<BigInteger> partsByteMax = primitivePartitioner.toPartitions(Byte.MAX_VALUE, PrimitiveTypePartitioner.BYTE); - assertEquals(1, partsByteMax.size()); - assertEquals(Byte.MAX_VALUE, primitivePartitioner.fromPartitions(partsByteMax, 0, PrimitiveTypePartitioner.BYTE)); - - // Test string - String stringBits = SystemConfiguration.getProperty("pir.stringBits"); - SystemConfiguration.setProperty("pir.stringBits", "64"); - testString("testString"); // over the allowed bit size - testString("t"); // under the allowed bit size - SystemConfiguration.setProperty("pir.stringBits", stringBits); - - // Test short - short shortTest = Short.valueOf("2456"); - List<BigInteger> partsShort = primitivePartitioner.toPartitions(shortTest, PrimitiveTypePartitioner.SHORT); - assertEquals(2, partsShort.size()); - assertEquals(shortTest, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT)); - - partsShort = primitivePartitioner.toPartitions("32767", PrimitiveTypePartitioner.SHORT); - assertEquals(2, partsShort.size()); - assertEquals((short) 32767, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT)); - - partsShort = primitivePartitioner.toPartitions((short) -42, PrimitiveTypePartitioner.SHORT); - assertEquals(2, partsShort.size()); - assertEquals((short) -42, primitivePartitioner.fromPartitions(partsShort, 0, PrimitiveTypePartitioner.SHORT)); - - List<BigInteger> partsShortMax = primitivePartitioner.toPartitions(Short.MAX_VALUE, PrimitiveTypePartitioner.SHORT); - assertEquals(2, partsShortMax.size()); - assertEquals(Short.MAX_VALUE, primitivePartitioner.fromPartitions(partsShortMax, 0, PrimitiveTypePartitioner.SHORT)); - - // Test int - int intTest = Integer.parseInt("-5789"); - List<BigInteger> partsInt = primitivePartitioner.toPartitions(intTest, PrimitiveTypePartitioner.INT); - assertEquals(4, partsInt.size()); - assertEquals(intTest, primitivePartitioner.fromPartitions(partsInt, 0, PrimitiveTypePartitioner.INT)); - - partsInt = primitivePartitioner.toPartitions("2016", PrimitiveTypePartitioner.INT); - assertEquals(4, partsInt.size()); - assertEquals(2016, primitivePartitioner.fromPartitions(partsInt, 0, PrimitiveTypePartitioner.INT)); - - partsInt = primitivePartitioner.toPartitions(1386681237, PrimitiveTypePartitioner.INT); - assertEquals(4, partsInt.size()); - assertEquals(1386681237, primitivePartitioner.fromPartitions(partsInt, 0, PrimitiveTypePartitioner.INT)); - - List<BigInteger> partsIntMax = primitivePartitioner.toPartitions(Integer.MAX_VALUE, PrimitiveTypePartitioner.INT); - assertEquals(4, partsIntMax.size()); - assertEquals(Integer.MAX_VALUE, primitivePartitioner.fromPartitions(partsIntMax, 0, PrimitiveTypePartitioner.INT)); - - // Test long - long longTest = Long.parseLong("56789"); - List<BigInteger> partsLong = primitivePartitioner.toPartitions(longTest, PrimitiveTypePartitioner.LONG); - assertEquals(8, partsLong.size()); - assertEquals(longTest, primitivePartitioner.fromPartitions(partsLong, 0, PrimitiveTypePartitioner.LONG)); - - List<BigInteger> partsLongMax = primitivePartitioner.toPartitions(Long.MAX_VALUE, PrimitiveTypePartitioner.LONG); - assertEquals(8, partsLongMax.size()); - assertEquals(Long.MAX_VALUE, primitivePartitioner.fromPartitions(partsLongMax, 0, PrimitiveTypePartitioner.LONG)); - - // Test float - float floatTest = Float.parseFloat("567.77"); - List<BigInteger> partsFloat = primitivePartitioner.toPartitions(floatTest, PrimitiveTypePartitioner.FLOAT); - assertEquals(4, partsFloat.size()); - assertEquals(floatTest, primitivePartitioner.fromPartitions(partsFloat, 0, PrimitiveTypePartitioner.FLOAT)); - - partsFloat = primitivePartitioner.toPartitions(-99.99f, PrimitiveTypePartitioner.FLOAT); - assertEquals(4, partsFloat.size()); - assertEquals(-99.99f, primitivePartitioner.fromPartitions(partsFloat, 0, PrimitiveTypePartitioner.FLOAT)); - - List<BigInteger> partsFloatMax = primitivePartitioner.toPartitions(Float.MAX_VALUE, PrimitiveTypePartitioner.FLOAT); - assertEquals(4, partsFloatMax.size()); - assertEquals(Float.MAX_VALUE, primitivePartitioner.fromPartitions(partsFloatMax, 0, PrimitiveTypePartitioner.FLOAT)); - - // Test double - double doubleTest = Double.parseDouble("567.77"); - List<BigInteger> partsDouble = primitivePartitioner.toPartitions(doubleTest, PrimitiveTypePartitioner.DOUBLE); - assertEquals(8, partsDouble.size()); - assertEquals(doubleTest, primitivePartitioner.fromPartitions(partsDouble, 0, PrimitiveTypePartitioner.DOUBLE)); - - List<BigInteger> partsDoubleMax = primitivePartitioner.toPartitions(Double.MAX_VALUE, PrimitiveTypePartitioner.DOUBLE); - assertEquals(8, partsDoubleMax.size()); - assertEquals(Double.MAX_VALUE, primitivePartitioner.fromPartitions(partsDoubleMax, 0, PrimitiveTypePartitioner.DOUBLE)); - - // Test char - char charTest = 'b'; - List<BigInteger> partsChar = primitivePartitioner.toPartitions(charTest, PrimitiveTypePartitioner.CHAR); - assertEquals(2, partsChar.size()); - assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, PrimitiveTypePartitioner.CHAR)); - - // Ensure Endianness preserved - charTest = '\uFFFE'; - partsChar = primitivePartitioner.toPartitions(charTest, PrimitiveTypePartitioner.CHAR); - assertEquals(2, partsChar.size()); - assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, PrimitiveTypePartitioner.CHAR)); - - charTest = '\uFEFF'; - partsChar = primitivePartitioner.toPartitions(charTest, PrimitiveTypePartitioner.CHAR); - assertEquals(2, partsChar.size()); - assertEquals(charTest, primitivePartitioner.fromPartitions(partsChar, 0, PrimitiveTypePartitioner.CHAR)); - - List<BigInteger> partsCharMax = primitivePartitioner.toPartitions(Character.MAX_VALUE, PrimitiveTypePartitioner.CHAR); - assertEquals(2, partsCharMax.size()); - assertEquals(Character.MAX_VALUE, primitivePartitioner.fromPartitions(partsCharMax, 0, PrimitiveTypePartitioner.CHAR)); - - logger.info("Sucessfully completed testToPartitions:"); - } - - @Test - public void testPaddedPartitions() throws PIRException - { - PrimitiveTypePartitioner primitivePartitioner = new PrimitiveTypePartitioner(); - - List<String> primitiveTypes = Arrays.asList(PrimitiveTypePartitioner.BYTE, PrimitiveTypePartitioner.CHAR, PrimitiveTypePartitioner.SHORT, - PrimitiveTypePartitioner.INT, PrimitiveTypePartitioner.LONG, PrimitiveTypePartitioner.FLOAT, PrimitiveTypePartitioner.DOUBLE, - PrimitiveTypePartitioner.STRING); - for (String type : primitiveTypes) - { - assertEquals(primitivePartitioner.getNumPartitions(type), primitivePartitioner.getPaddedPartitions(type).size()); - } - } - - private void testString(String testString) throws Exception - { - PrimitiveTypePartitioner ptp = new PrimitiveTypePartitioner(); - - List<BigInteger> partsString = ptp.toPartitions(testString, PrimitiveTypePartitioner.STRING); - int numParts = Integer.parseInt(SystemConfiguration.getProperty("pir.stringBits")) / 8; - assertEquals(numParts, partsString.size()); - - logger.info("testString.getBytes().length = " + testString.getBytes().length); - int offset = numParts; - if (testString.getBytes().length < numParts) - { - offset = testString.getBytes().length; - } - String element = new String(testString.getBytes(), 0, offset); - assertEquals(element, ptp.fromPartitions(partsString, 0, PrimitiveTypePartitioner.STRING)); - } -}